Merge branch 'master' of https://github.com/highfidelity/hifi into 21300

This commit is contained in:
Johnathan Franck 2017-05-18 20:53:40 -04:00
commit d365931611
283 changed files with 11756 additions and 2558 deletions

View file

@ -57,6 +57,7 @@ Agent::Agent(ReceivedMessage& message) :
ThreadedAssignment(message),
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES)
{
_entityEditSender.setPacketsPerSecond(DEFAULT_ENTITY_PPS_PER_SCRIPT);
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
ResourceManager::init();

View file

@ -53,31 +53,3 @@ QVariantMap AssignmentDynamic::getArguments() {
qDebug() << "UNEXPECTED -- AssignmentDynamic::getArguments called in assignment-client.";
return QVariantMap();
}
glm::vec3 AssignmentDynamic::getPosition() {
qDebug() << "UNEXPECTED -- AssignmentDynamic::getPosition called in assignment-client.";
return glm::vec3(0.0f);
}
glm::quat AssignmentDynamic::getRotation() {
qDebug() << "UNEXPECTED -- AssignmentDynamic::getRotation called in assignment-client.";
return glm::quat();
}
glm::vec3 AssignmentDynamic::getLinearVelocity() {
qDebug() << "UNEXPECTED -- AssignmentDynamic::getLinearVelocity called in assignment-client.";
return glm::vec3(0.0f);
}
void AssignmentDynamic::setLinearVelocity(glm::vec3 linearVelocity) {
qDebug() << "UNEXPECTED -- AssignmentDynamic::setLinearVelocity called in assignment-client.";
}
glm::vec3 AssignmentDynamic::getAngularVelocity() {
qDebug() << "UNEXPECTED -- AssignmentDynamic::getAngularVelocity called in assignment-client.";
return glm::vec3(0.0f);
}
void AssignmentDynamic::setAngularVelocity(glm::vec3 angularVelocity) {
qDebug() << "UNEXPECTED -- AssignmentDynamic::setAngularVelocity called in assignment-client.";
}

View file

@ -24,6 +24,8 @@ public:
AssignmentDynamic(EntityDynamicType type, const QUuid& id, EntityItemPointer ownerEntity);
virtual ~AssignmentDynamic();
virtual void remapIDs(QHash<EntityItemID, EntityItemID>& map) override {};
virtual void removeFromSimulation(EntitySimulationPointer simulation) const override;
virtual EntityItemWeakPointer getOwnerEntity() const override { return _ownerEntity; }
virtual void setOwnerEntity(const EntityItemPointer ownerEntity) override { _ownerEntity = ownerEntity; }
@ -37,13 +39,6 @@ private:
QByteArray _data;
protected:
virtual glm::vec3 getPosition() override;
virtual glm::quat getRotation() override;
virtual glm::vec3 getLinearVelocity() override;
virtual void setLinearVelocity(glm::vec3 linearVelocity) override;
virtual glm::vec3 getAngularVelocity() override;
virtual void setAngularVelocity(glm::vec3 angularVelocity) override;
bool _active;
EntityItemWeakPointer _ownerEntity;
};

View file

@ -38,13 +38,14 @@
#include "AudioMixer.h"
static const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.5f; // attenuation = -6dB * log2(distance)
static const int DISABLE_STATIC_JITTER_FRAMES = -1;
static const float DEFAULT_NOISE_MUTING_THRESHOLD = 1.0f;
static const QString AUDIO_MIXER_LOGGING_TARGET_NAME = "audio-mixer";
static const QString AUDIO_ENV_GROUP_KEY = "audio_env";
static const QString AUDIO_BUFFER_GROUP_KEY = "audio_buffer";
static const QString AUDIO_THREADING_GROUP_KEY = "audio_threading";
int AudioMixer::_numStaticJitterFrames{ -1 };
int AudioMixer::_numStaticJitterFrames{ DISABLE_STATIC_JITTER_FRAMES };
float AudioMixer::_noiseMutingThreshold{ DEFAULT_NOISE_MUTING_THRESHOLD };
float AudioMixer::_attenuationPerDoublingInDistance{ DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE };
std::map<QString, std::shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
@ -56,7 +57,12 @@ QVector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
AudioMixer::AudioMixer(ReceivedMessage& message) :
ThreadedAssignment(message) {
// Always clear settings first
// This prevents previous assignment settings from sticking around
clearDomainSettings();
// hash the available codecs (on the mixer)
_availableCodecs.clear(); // Make sure struct is clean
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
std::for_each(codecPlugins.cbegin(), codecPlugins.cend(),
[&](const CodecPluginPointer& codec) {
@ -232,7 +238,7 @@ void AudioMixer::sendStatsPacket() {
}
// general stats
statsObject["useDynamicJitterBuffers"] = _numStaticJitterFrames == -1;
statsObject["useDynamicJitterBuffers"] = _numStaticJitterFrames == DISABLE_STATIC_JITTER_FRAMES;
statsObject["threads"] = _slavePool.numThreads();
@ -490,6 +496,16 @@ int AudioMixer::prepareFrame(const SharedNodePointer& node, unsigned int frame)
return data->checkBuffersBeforeFrameSend();
}
void AudioMixer::clearDomainSettings() {
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
_attenuationPerDoublingInDistance = DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE;
_noiseMutingThreshold = DEFAULT_NOISE_MUTING_THRESHOLD;
_codecPreferenceOrder.clear();
_audioZones.clear();
_zoneSettings.clear();
_zoneReverbSettings.clear();
}
void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
qDebug() << "AVX2 Support:" << (cpuSupportsAVX2() ? "enabled" : "disabled");
@ -525,7 +541,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
qDebug() << "Static desired jitter buffer frames:" << _numStaticJitterFrames;
} else {
qDebug() << "Disabling dynamic jitter buffers.";
_numStaticJitterFrames = -1;
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
}
// check for deprecated audio settings

View file

@ -79,6 +79,7 @@ private:
QString percentageForMixStats(int counter);
void parseSettingsObject(const QJsonObject& settingsObject);
void clearDomainSettings();
float _trailingMixRatio { 0.0f };
float _throttlingRatio { 0.0f };

View file

@ -665,12 +665,12 @@ void AvatarMixer::sendStatsPacket() {
void AvatarMixer::run() {
qCDebug(avatars) << "Waiting for connection to domain to request settings from domain-server.";
// wait until we have the domain-server settings, otherwise we bail
DomainHandler& domainHandler = DependencyManager::get<NodeList>()->getDomainHandler();
connect(&domainHandler, &DomainHandler::settingsReceived, this, &AvatarMixer::domainSettingsRequestComplete);
connect(&domainHandler, &DomainHandler::settingsReceiveFail, this, &AvatarMixer::domainSettingsRequestFailed);
ThreadedAssignment::commonInit(AVATAR_MIXER_LOGGING_NAME, NodeType::AvatarMixer);
}
@ -695,7 +695,7 @@ void AvatarMixer::domainSettingsRequestComplete() {
// parse the settings to pull out the values we need
parseDomainServerSettings(nodeList->getDomainHandler().getSettingsObject());
// start our tight loop...
start();
}
@ -745,7 +745,7 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
} else {
qCDebug(avatars) << "Avatar mixer will automatically determine number of threads to use. Using:" << _slavePool.numThreads() << "threads.";
}
const QString AVATARS_SETTINGS_KEY = "avatars";
static const QString MIN_SCALE_OPTION = "min_avatar_scale";

View file

@ -145,7 +145,7 @@ private:
std::unordered_map<QUuid, QVector<JointData>> _lastOtherAvatarSentJoints;
uint64_t _identityChangeTimestamp;
bool _avatarSessionDisplayNameMustChange{ false };
bool _avatarSessionDisplayNameMustChange{ true };
int _numAvatarsSentLastFrame = 0;
int _numFramesSinceAdjustment = 0;

View file

@ -24,9 +24,6 @@
#include <ScriptEngine.h>
#include <ThreadedAssignment.h>
static const int DEFAULT_MAX_ENTITY_PPS = 9000;
static const int DEFAULT_ENTITY_PPS_PER_SCRIPT = 900;
class EntityScriptServer : public ThreadedAssignment {
Q_OBJECT

View file

@ -6,8 +6,8 @@ if (WIN32)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi7.zip
URL_MD5 bc2861e50852dd590cdc773a14a041a7
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi8.zip
URL_MD5 b01510437ea15527156bc25cdf733bd9
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

114
cmake/modules/FindFBX.cmake Normal file
View file

@ -0,0 +1,114 @@
# Locate the FBX SDK
#
# Defines the following variables:
#
# FBX_FOUND - Found the FBX SDK
# FBX_VERSION - Version number
# FBX_INCLUDE_DIRS - Include directories
# FBX_LIBRARIES - The libraries to link to
#
# Accepts the following variables as input:
#
# FBX_VERSION - as a CMake variable, e.g. 2017.0.1
# FBX_ROOT - (as a CMake or environment variable)
# The root directory of the FBX SDK install
# adapted from https://github.com/ufz-vislab/VtkFbxConverter/blob/master/FindFBX.cmake
# which uses the MIT license (https://github.com/ufz-vislab/VtkFbxConverter/blob/master/LICENSE.txt)
if (NOT FBX_VERSION)
if (WIN32)
set(FBX_VERSION 2017.1)
else()
set(FBX_VERSION 2017.0.1)
endif()
endif()
string(REGEX REPLACE "^([0-9]+).*$" "\\1" FBX_VERSION_MAJOR "${FBX_VERSION}")
string(REGEX REPLACE "^[0-9]+\\.([0-9]+).*$" "\\1" FBX_VERSION_MINOR "${FBX_VERSION}")
string(REGEX REPLACE "^[0-9]+\\.[0-9]+\\.([0-9]+).*$" "\\1" FBX_VERSION_PATCH "${FBX_VERSION}")
set(FBX_MAC_LOCATIONS "/Applications/Autodesk/FBX\ SDK/${FBX_VERSION}")
if (WIN32)
string(REGEX REPLACE "\\\\" "/" WIN_PROGRAM_FILES_X64_DIRECTORY $ENV{ProgramW6432})
endif()
set(FBX_WIN_LOCATIONS "${WIN_PROGRAM_FILES_X64_DIRECTORY}/Autodesk/FBX/FBX SDK/${FBX_VERSION}")
set(FBX_SEARCH_LOCATIONS $ENV{FBX_ROOT} ${FBX_ROOT} ${FBX_MAC_LOCATIONS} ${FBX_WIN_LOCATIONS})
function(_fbx_append_debugs _endvar _library)
if (${_library} AND ${_library}_DEBUG)
set(_output optimized ${${_library}} debug ${${_library}_DEBUG})
else()
set(_output ${${_library}})
endif()
set(${_endvar} ${_output} PARENT_SCOPE)
endfunction()
if (${CMAKE_CXX_COMPILER_ID} MATCHES "Clang")
set(fbx_compiler clang)
elseif (${CMAKE_CXX_COMPILER_ID} MATCHES "GNU")
set(fbx_compiler gcc4)
endif()
function(_fbx_find_library _name _lib _suffix)
if (MSVC12)
set(VS_PREFIX vs2013)
endif()
if (MSVC11)
set(VS_PREFIX vs2012)
endif()
if (MSVC10)
set(VS_PREFIX vs2010)
endif()
if (MSVC90)
set(VS_PREFIX vs2008)
endif()
find_library(${_name}
NAMES ${_lib}
HINTS ${FBX_SEARCH_LOCATIONS}
PATH_SUFFIXES lib/${fbx_compiler}/${_suffix} lib/${fbx_compiler}/ub/${_suffix} lib/${VS_PREFIX}/x64/${_suffix}
)
mark_as_advanced(${_name})
endfunction()
find_path(FBX_INCLUDE_DIR fbxsdk.h
PATHS ${FBX_SEARCH_LOCATIONS}
PATH_SUFFIXES include
)
mark_as_advanced(FBX_INCLUDE_DIR)
if (WIN32)
_fbx_find_library(FBX_LIBRARY libfbxsdk-md release)
_fbx_find_library(FBX_LIBRARY_DEBUG libfbxsdk-md debug)
elseif (APPLE)
find_library(CARBON NAMES Carbon)
find_library(SYSTEM_CONFIGURATION NAMES SystemConfiguration)
_fbx_find_library(FBX_LIBRARY libfbxsdk.a release)
_fbx_find_library(FBX_LIBRARY_DEBUG libfbxsdk.a debug)
endif()
include(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(FBX DEFAULT_MSG FBX_LIBRARY FBX_INCLUDE_DIR)
if (FBX_FOUND)
set(FBX_INCLUDE_DIRS ${FBX_INCLUDE_DIR})
_fbx_append_debugs(FBX_LIBRARIES FBX_LIBRARY)
add_definitions(-DFBXSDK_NEW_API)
if (WIN32)
add_definitions(-DK_PLUGIN -DK_FBXSDK -DK_NODLL)
set(CMAKE_EXE_LINKER_FLAGS /NODEFAULTLIB:\"LIBCMT\")
set(FBX_LIBRARIES ${FBX_LIBRARIES} Wininet.lib)
elseif (APPLE)
set(FBX_LIBRARIES ${FBX_LIBRARIES} ${CARBON} ${SYSTEM_CONFIGURATION})
endif()
endif()

View file

@ -49,48 +49,71 @@
"id": "ik",
"type": "inverseKinematics",
"data": {
"solutionSource": "relaxToUnderPoses",
"solutionSourceVar": "solutionSource",
"targets": [
{
"jointName": "Hips",
"positionVar": "hipsPosition",
"rotationVar": "hipsRotation",
"typeVar": "hipsType"
"typeVar": "hipsType",
"weightVar": "hipsWeight",
"weight": 1.0,
"flexCoefficients": [1]
},
{
"jointName": "RightHand",
"positionVar": "rightHandPosition",
"rotationVar": "rightHandRotation",
"typeVar": "rightHandType"
"typeVar": "rightHandType",
"weightVar": "rightHandWeight",
"weight": 1.0,
"flexCoefficients": [1, 0.5, 0.5, 0.25, 0.1, 0.05, 0.01, 0.0, 0.0]
},
{
"jointName": "LeftHand",
"positionVar": "leftHandPosition",
"rotationVar": "leftHandRotation",
"typeVar": "leftHandType"
"typeVar": "leftHandType",
"weightVar": "leftHandWeight",
"weight": 1.0,
"flexCoefficients": [1, 0.5, 0.5, 0.25, 0.1, 0.05, 0.01, 0.0, 0.0]
},
{
"jointName": "RightFoot",
"positionVar": "rightFootPosition",
"rotationVar": "rightFootRotation",
"typeVar": "rightFootType"
"typeVar": "rightFootType",
"weightVar": "rightFootWeight",
"weight": 1.0,
"flexCoefficients": [1, 0.45, 0.45]
},
{
"jointName": "LeftFoot",
"positionVar": "leftFootPosition",
"rotationVar": "leftFootRotation",
"typeVar": "leftFootType"
"typeVar": "leftFootType",
"weightVar": "leftFootWeight",
"weight": 1.0,
"flexCoefficients": [1, 0.45, 0.45]
},
{
"jointName": "Spine2",
"positionVar": "spine2Position",
"rotationVar": "spine2Rotation",
"typeVar": "spine2Type"
"typeVar": "spine2Type",
"weightVar": "spine2Weight",
"weight": 1.0,
"flexCoefficients": [0.45, 0.45]
},
{
"jointName": "Head",
"positionVar": "headPosition",
"rotationVar": "headRotation",
"typeVar": "headType"
"typeVar": "headType",
"weightVar": "headWeight",
"weight": 4.0,
"flexCoefficients": [1, 0.5, 0.5, 0.5, 0.5]
}
]
},

View file

@ -35,6 +35,11 @@
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
{ "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] }
{ "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] },
{ "from": "Vive.LeftFoot", "to" : "Standard.LeftFoot", "when": [ "Application.InHMD"] },
{ "from": "Vive.RightFoot", "to" : "Standard.RightFoot", "when": [ "Application.InHMD"] },
{ "from": "Vive.Hips", "to" : "Standard.Hips", "when": [ "Application.InHMD"] },
{ "from": "Vive.Spine2", "to" : "Standard.Spine2", "when": [ "Application.InHMD"] },
{ "from": "Vive.Head", "to" : "Standard.Head", "when" : [ "Application.InHMD"] }
]
}

View file

@ -0,0 +1,70 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generated by IcoMoon.io -->
<svg
xmlns:osb="http://www.openswatchbook.org/uri/2009/osb"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
width="50"
height="50"
viewBox="0 0 50 50"
id="svg3713"
sodipodi:docname="raise-hand-a.svg"
inkscape:version="0.92.1 r15371">
<metadata
id="metadata3719">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title />
</cc:Work>
</rdf:RDF>
</metadata>
<defs
id="defs3717">
<linearGradient
id="linearGradient6484"
osb:paint="solid">
<stop
style="stop-color:#000000;stop-opacity:1;"
offset="0"
id="stop6482" />
</linearGradient>
</defs>
<sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1920"
inkscape:window-height="1001"
id="namedview3715"
showgrid="false"
inkscape:zoom="11.8"
inkscape:cx="-20"
inkscape:cy="23.559322"
inkscape:window-x="-9"
inkscape:window-y="-9"
inkscape:window-maximized="1"
inkscape:current-layer="svg3713" />
<path
d="m 38.064897,12.746263 c -0.728,0 -1.412,0.196 -2,0.538 v -2.538 c 0,-2.2060002 -1.794,-4.0000002 -4,-4.0000002 -0.824,0 -1.588,0.25 -2.226,0.678 -0.548,-1.558 -2.032,-2.678 -3.774,-2.678 -1.742,0 -3.228,1.12 -3.774,2.678 -0.636,-0.428 -1.402,-0.678 -2.226,-0.678 -2.206,0 -4,1.794 -4,4.0000002 v 14.746 l -2.692,-4.666 c -0.522,-0.95 -1.374,-1.626 -2.398,-1.906 -0.9980001,-0.272 -2.0360001,-0.128 -2.9240001,0.404 -1.814,1.088 -2.506,3.548 -1.54,5.484 0.06,0.122 1.336,2.736 5.3200001,10.7 1.876,3.75 3.934,6.432 6.118,7.968 1.714,1.206 2.898,1.268 3.118,1.268 h 10 c 1.702,0 3.284,-0.554 4.704,-1.644 1.334,-1.026 2.492,-2.51 3.44,-4.408 1.868,-3.736 2.856,-8.904 2.856,-14.948 v -7 c 0,-2.206 -1.794,-4 -4,-4 z m 2,11 c 0,5.734 -0.914,10.592 -2.644,14.052 -1.128,2.256 -3.148,4.948 -6.356,4.948 h -9.98 c -0.078,-0.006 -0.92,-0.1 -2.19,-1.05 -1.266,-0.948 -3.21,-2.944 -5.276,-7.08 -4.0540001,-8.108 -5.3000001,-10.662 -5.3120001,-10.686 -0.002,-0.004 -0.002,-0.006 -0.004,-0.008 -0.502,-1.006 -0.146,-2.324 0.778,-2.878 0.416,-0.25 0.902,-0.316 1.3700001,-0.19 0.498,0.136 0.916,0.472 1.174,0.944 0.004,0.006 0.008,0.014 0.012,0.02 l 3.122,5.41 c 0.638,1.166 1.356,1.656 2.134,1.458 0.78,-0.198 1.174,-0.978 1.174,-2.314 v -15.626 c 0,-1.1020002 0.898,-2.0000002 2,-2.0000002 1.102,0 2,0.898 2,2.0000002 v 13 c 0,0.552 0.448,1 1,1 0.552,0 1,-0.448 1,-1 V 8.7462628 c 0,-1.102 0.898,-2 2,-2 1.102,0 2,0.898 2,2 V 23.746263 c 0,0.552 0.448,1 1,1 0.552,0 1,-0.448 1,-1 v -13 c 0,-1.1020002 0.898,-2.0000002 2,-2.0000002 1.102,0 2,0.898 2,2.0000002 v 15 c 0,0.552 0.448,1 1,1 0.552,0 1,-0.448 1,-1 v -9 c 0,-1.102 0.898,-2 2,-2 1.102,0 2,0.898 2,2 v 7 z"
id="path3711"
inkscape:connector-curvature="0"
style="fill:#000000;stroke-width:1;stroke:#000000;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:transform-center-x="0.33898322"
inkscape:transform-center-y="-0.6779661" />
</svg>

After

Width:  |  Height:  |  Size: 3.6 KiB

View file

@ -0,0 +1,60 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generated by IcoMoon.io -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
width="50"
height="50"
viewBox="0 0 50 50"
id="svg3713"
sodipodi:docname="raise-hand-i.svg"
inkscape:version="0.92.1 r15371">
<metadata
id="metadata3719">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title />
</cc:Work>
</rdf:RDF>
</metadata>
<defs
id="defs3717" />
<sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1920"
inkscape:window-height="1001"
id="namedview3715"
showgrid="false"
inkscape:zoom="11.8"
inkscape:cx="-20"
inkscape:cy="23.559322"
inkscape:window-x="-9"
inkscape:window-y="-9"
inkscape:window-maximized="1"
inkscape:current-layer="svg3713" />
<path
d="m 38.064897,12.746263 c -0.728,0 -1.412,0.196 -2,0.538 v -2.538 c 0,-2.2060002 -1.794,-4.0000002 -4,-4.0000002 -0.824,0 -1.588,0.25 -2.226,0.678 -0.548,-1.558 -2.032,-2.678 -3.774,-2.678 -1.742,0 -3.228,1.12 -3.774,2.678 -0.636,-0.428 -1.402,-0.678 -2.226,-0.678 -2.206,0 -4,1.794 -4,4.0000002 v 14.746 l -2.692,-4.666 c -0.522,-0.95 -1.374,-1.626 -2.398,-1.906 -0.9980001,-0.272 -2.0360001,-0.128 -2.9240001,0.404 -1.814,1.088 -2.506,3.548 -1.54,5.484 0.06,0.122 1.336,2.736 5.3200001,10.7 1.876,3.75 3.934,6.432 6.118,7.968 1.714,1.206 2.898,1.268 3.118,1.268 h 10 c 1.702,0 3.284,-0.554 4.704,-1.644 1.334,-1.026 2.492,-2.51 3.44,-4.408 1.868,-3.736 2.856,-8.904 2.856,-14.948 v -7 c 0,-2.206 -1.794,-4 -4,-4 z m 2,11 c 0,5.734 -0.914,10.592 -2.644,14.052 -1.128,2.256 -3.148,4.948 -6.356,4.948 h -9.98 c -0.078,-0.006 -0.92,-0.1 -2.19,-1.05 -1.266,-0.948 -3.21,-2.944 -5.276,-7.08 -4.0540001,-8.108 -5.3000001,-10.662 -5.3120001,-10.686 -0.002,-0.004 -0.002,-0.006 -0.004,-0.008 -0.502,-1.006 -0.146,-2.324 0.778,-2.878 0.416,-0.25 0.902,-0.316 1.3700001,-0.19 0.498,0.136 0.916,0.472 1.174,0.944 0.004,0.006 0.008,0.014 0.012,0.02 l 3.122,5.41 c 0.638,1.166 1.356,1.656 2.134,1.458 0.78,-0.198 1.174,-0.978 1.174,-2.314 v -15.626 c 0,-1.1020002 0.898,-2.0000002 2,-2.0000002 1.102,0 2,0.898 2,2.0000002 v 13 c 0,0.552 0.448,1 1,1 0.552,0 1,-0.448 1,-1 V 8.7462628 c 0,-1.102 0.898,-2 2,-2 1.102,0 2,0.898 2,2 V 23.746263 c 0,0.552 0.448,1 1,1 0.552,0 1,-0.448 1,-1 v -13 c 0,-1.1020002 0.898,-2.0000002 2,-2.0000002 1.102,0 2,0.898 2,2.0000002 v 15 c 0,0.552 0.448,1 1,1 0.552,0 1,-0.448 1,-1 v -9 c 0,-1.102 0.898,-2 2,-2 1.102,0 2,0.898 2,2 v 7 z"
id="path3711"
inkscape:connector-curvature="0"
style="fill:#ffffff;stroke-width:1;fill-opacity:1;stroke:#fdfdfc;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none"
inkscape:transform-center-x="0.33898322"
inkscape:transform-center-y="-0.6779661" />
</svg>

After

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 394 KiB

Binary file not shown.

View file

@ -17,26 +17,26 @@ Rectangle {
property alias pixelSize: label.font.pixelSize;
property bool selected: false
property bool hovered: false
property bool enabled: false
property int spacing: 2
property var action: function () {}
property string enabledColor: hifi.colors.blueHighlight
property string disabledColor: hifi.colors.blueHighlight
property string highlightColor: hifi.colors.blueHighlight;
width: label.width + 64
height: 32
color: hifi.colors.white
enabled: false
HifiConstants { id: hifi }
RalewaySemiBold {
id: label;
color: enabledColor
color: enabled ? enabledColor : disabledColor
font.pixelSize: 15;
anchors {
horizontalCenter: parent.horizontalCenter;
verticalCenter: parent.verticalCenter;
}
}
Rectangle {
id: indicator

View file

@ -8,6 +8,7 @@ import "../styles" as HifiStyles
import "../styles-uit"
import "../"
import "."
Item {
id: web
HifiConstants { id: hifi }
@ -22,17 +23,14 @@ Item {
property bool keyboardRaised: false
property bool punctuationMode: false
property bool isDesktop: false
property string initialPage: ""
property bool startingUp: true
property alias webView: webview
property alias profile: webview.profile
property bool remove: false
property var urlList: []
property var forwardList: []
property int currentPage: -1 // used as a model for repeater
property alias pagesModel: pagesModel
// Manage own browse history because WebEngineView history is wiped when a new URL is loaded via
// onNewViewRequested, e.g., as happens when a social media share button is clicked.
property var history: []
property int historyIndex: -1
Rectangle {
id: buttons
@ -51,21 +49,22 @@ Item {
TabletWebButton {
id: back
enabledColor: hifi.colors.baseGray
enabled: false
enabledColor: hifi.colors.darkGray
disabledColor: hifi.colors.lightGrayText
enabled: historyIndex > 0
text: "BACK"
MouseArea {
anchors.fill: parent
onClicked: goBack()
hoverEnabled: true
}
}
TabletWebButton {
id: close
enabledColor: hifi.colors.darkGray
disabledColor: hifi.colors.lightGrayText
enabled: true
text: "CLOSE"
MouseArea {
@ -75,7 +74,6 @@ Item {
}
}
RalewaySemiBold {
id: displayUrl
color: hifi.colors.baseGray
@ -90,7 +88,6 @@ Item {
}
}
MouseArea {
anchors.fill: parent
preventStealing: true
@ -98,29 +95,10 @@ Item {
}
}
ListModel {
id: pagesModel
onCountChanged: {
currentPage = count - 1;
if (currentPage > 0) {
back.enabledColor = hifi.colors.darkGray;
} else {
back.enabledColor = hifi.colors.baseGray;
}
}
}
function goBack() {
if (webview.canGoBack) {
forwardList.push(webview.url);
webview.goBack();
} else if (web.urlList.length > 0) {
var url = web.urlList.pop();
loadUrl(url);
} else if (web.forwardList.length > 0) {
var url = web.forwardList.pop();
loadUrl(url);
web.forwardList = [];
if (historyIndex > 0) {
historyIndex--;
loadUrl(history[historyIndex]);
}
}
@ -137,19 +115,12 @@ Item {
}
function goForward() {
if (currentPage < pagesModel.count - 1) {
currentPage++;
if (historyIndex < history.length - 1) {
historyIndex++;
loadUrl(history[historyIndex]);
}
}
function gotoPage(url) {
urlAppend(url)
}
function isUrlLoaded(url) {
return (pagesModel.get(currentPage).webUrl === url);
}
function reloadPage() {
view.reloadAndBypassCache()
view.setActiveFocusOnPress(true);
@ -161,36 +132,8 @@ Item {
web.url = webview.url;
}
function onInitialPage(url) {
return (url === webview.url);
}
function urlAppend(url) {
var lurl = decodeURIComponent(url)
if (lurl[lurl.length - 1] !== "/") {
lurl = lurl + "/"
}
web.urlList.push(url);
setBackButtonStatus();
}
function setBackButtonStatus() {
if (web.urlList.length > 0 || webview.canGoBack) {
back.enabledColor = hifi.colors.darkGray;
back.enabled = true;
} else {
back.enabledColor = hifi.colors.baseGray;
back.enabled = false;
}
}
onUrlChanged: {
loadUrl(url);
if (startingUp) {
web.initialPage = webview.url;
startingUp = false;
}
}
QtObject {
@ -258,6 +201,17 @@ Item {
grantFeaturePermission(securityOrigin, feature, true);
}
onUrlChanged: {
// Record history, skipping null and duplicate items.
var urlString = url + "";
urlString = urlString.replace(/\//g, "%2F"); // Consistent representation of "/"s to avoid false differences.
if (urlString.length > 0 && (historyIndex === -1 || urlString !== history[historyIndex])) {
historyIndex++;
history = history.slice(0, historyIndex);
history.push(urlString);
}
}
onLoadingChanged: {
keyboardRaised = false;
punctuationMode = false;
@ -277,17 +231,11 @@ Item {
}
if (WebEngineView.LoadSucceededStatus == loadRequest.status) {
if (startingUp) {
web.initialPage = webview.url;
startingUp = false;
}
webview.forceActiveFocus();
}
}
onNewViewRequested: {
var currentUrl = webview.url;
urlAppend(currentUrl);
request.openIn(webview);
}
}

View file

@ -35,11 +35,6 @@ Rectangle {
property string title: "Audio Options"
signal sendToScript(var message);
//set models after Components is shown
Component.onCompleted: {
refreshTimer.start()
refreshTimerOutput.start()
}
Component {
id: separator
@ -84,7 +79,7 @@ Rectangle {
}
Connections {
target: AvatarInputs
target: AvatarInputs !== undefined ? AvatarInputs : null
onShowAudioToolsChanged: {
audioTools.checkbox.checked = showAudioTools
}
@ -105,10 +100,12 @@ Rectangle {
id: audioTools
width: parent.width
anchors { left: parent.left; right: parent.right; leftMargin: 30 }
checkbox.checked: AvatarInputs.showAudioTools
checkbox.checked: AvatarInputs !== undefined ? AvatarInputs.showAudioTools : false
text.text: qsTr("Show audio level meter")
onCheckBoxClicked: {
AvatarInputs.showAudioTools = checked
if (AvatarInputs !== undefined) {
AvatarInputs.showAudioTools = checked
}
}
}
@ -138,30 +135,34 @@ Rectangle {
}
ListView {
Timer {
id: refreshTimer
interval: 1
repeat: false
onTriggered: {
//refresh model
inputAudioListView.model = undefined
inputAudioListView.model = AudioDevice.inputAudioDevices
}
}
id: inputAudioListView
anchors { left: parent.left; right: parent.right; leftMargin: 70 }
height: 125
spacing: 16
spacing: 0
clip: true
snapMode: ListView.SnapToItem
delegate: AudioCheckbox {
model: AudioDevice
delegate: Item {
width: parent.width
checkbox.checked: (modelData === AudioDevice.getInputDevice())
text.text: modelData
onCheckBoxClicked: {
if (checked) {
AudioDevice.setInputDevice(modelData)
refreshTimer.start()
visible: devicemode === 0
height: visible ? 36 : 0
AudioCheckbox {
id: cbin
anchors.verticalCenter: parent.verticalCenter
Binding {
target: cbin.checkbox
property: 'checked'
value: devicechecked
}
width: parent.width
cbchecked: devicechecked
text.text: devicename
onCheckBoxClicked: {
if (checked) {
AudioDevice.setInputDeviceAsync(devicename)
}
}
}
}
@ -191,31 +192,33 @@ Rectangle {
text: qsTr("CHOOSE OUTPUT DEVICE")
}
}
ListView {
id: outputAudioListView
Timer {
id: refreshTimerOutput
interval: 1
repeat: false
onTriggered: {
//refresh model
outputAudioListView.model = undefined
outputAudioListView.model = AudioDevice.outputAudioDevices
}
}
anchors { left: parent.left; right: parent.right; leftMargin: 70 }
height: 250
spacing: 16
spacing: 0
clip: true
snapMode: ListView.SnapToItem
delegate: AudioCheckbox {
model: AudioDevice
delegate: Item {
width: parent.width
checkbox.checked: (modelData === AudioDevice.getOutputDevice())
text.text: modelData
onCheckBoxClicked: {
if (checked) {
AudioDevice.setOutputDevice(modelData)
refreshTimerOutput.start()
visible: devicemode === 1
height: visible ? 36 : 0
AudioCheckbox {
id: cbout
width: parent.width
anchors.verticalCenter: parent.verticalCenter
Binding {
target: cbout.checkbox
property: 'checked'
value: devicechecked
}
text.text: devicename
onCheckBoxClicked: {
if (checked) {
AudioDevice.setOutputDeviceAsync(devicename)
}
}
}
}

View file

@ -238,8 +238,25 @@ Column {
stackShadowNarrowing: root.stackShadowNarrowing;
shadowHeight: root.stackedCardShadowHeight;
hoverThunk: function () { scroll.currentIndex = index; }
unhoverThunk: function () { scroll.currentIndex = -1; }
hoverThunk: function () { scrollToIndex(index); }
unhoverThunk: function () { scrollToIndex(-1); }
}
}
NumberAnimation {
id: anim;
target: scroll;
property: "contentX";
duration: 250;
}
function scrollToIndex(index) {
anim.running = false;
var pos = scroll.contentX;
var destPos;
scroll.positionViewAtIndex(index, ListView.Contain);
destPos = scroll.contentX;
anim.from = pos;
anim.to = destPos;
scroll.currentIndex = index;
anim.running = true;
}
}

View file

@ -132,62 +132,16 @@ Item {
color: hifi.colors.textFieldLightBackground
border.color: hifi.colors.blueHighlight
border.width: 0
TextInput {
id: myDisplayNameText
// Properties
text: thisNameCard.displayName
maximumLength: 256
clip: true
// Size
width: parent.width
height: parent.height
// Anchors
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
anchors.leftMargin: 10
anchors.right: parent.right
anchors.rightMargin: editGlyph.width + editGlyph.anchors.rightMargin
// Style
color: hifi.colors.darkGray
FontLoader { id: firaSansSemiBold; source: "../../fonts/FiraSans-SemiBold.ttf"; }
font.family: firaSansSemiBold.name
font.pixelSize: displayNameTextPixelSize
selectionColor: hifi.colors.blueAccent
selectedTextColor: "black"
// Text Positioning
verticalAlignment: TextInput.AlignVCenter
horizontalAlignment: TextInput.AlignLeft
autoScroll: false;
// Signals
onEditingFinished: {
if (MyAvatar.displayName !== text) {
MyAvatar.displayName = text;
UserActivityLogger.palAction("display_name_change", text);
}
cursorPosition = 0
focus = false
myDisplayName.border.width = 0
color = hifi.colors.darkGray
pal.currentlyEditingDisplayName = false
autoScroll = false;
}
}
MouseArea {
anchors.fill: parent
hoverEnabled: true
onClicked: {
myDisplayName.border.width = 1
myDisplayNameText.focus ? myDisplayNameText.cursorPosition = myDisplayNameText.positionAt(mouseX, mouseY, TextInput.CursorOnCharacter) : myDisplayNameText.selectAll();
myDisplayNameText.focus = true
myDisplayNameText.color = "black"
pal.currentlyEditingDisplayName = true
myDisplayNameText.autoScroll = true;
myDisplayNameText.focus = true;
myDisplayNameText.cursorPosition = myDisplayNameText.positionAt(mouseX - myDisplayNameText.anchors.leftMargin, mouseY, TextInput.CursorOnCharacter);
}
onDoubleClicked: {
myDisplayNameText.selectAll();
myDisplayNameText.focus = true;
pal.currentlyEditingDisplayName = true
myDisplayNameText.autoScroll = true;
}
onEntered: myDisplayName.color = hifi.colors.lightGrayText;
onExited: myDisplayName.color = hifi.colors.textFieldLightBackground;
@ -207,6 +161,54 @@ Item {
verticalAlignment: Text.AlignVCenter
color: hifi.colors.baseGray
}
TextInput {
id: myDisplayNameText
// Properties
text: thisNameCard.displayName
maximumLength: 256
clip: true
// Size
width: parent.width
height: parent.height
// Anchors
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
anchors.leftMargin: 10
anchors.right: parent.right
anchors.rightMargin: editGlyph.width + editGlyph.anchors.rightMargin
// Style
FontLoader { id: firaSansSemiBold; source: "../../fonts/FiraSans-SemiBold.ttf"; }
font.family: firaSansSemiBold.name
font.pixelSize: displayNameTextPixelSize
selectionColor: hifi.colors.blueAccent
selectedTextColor: "black"
// Text Positioning
verticalAlignment: TextInput.AlignVCenter
horizontalAlignment: TextInput.AlignLeft
autoScroll: false;
// Signals
onEditingFinished: {
if (MyAvatar.displayName !== text) {
MyAvatar.displayName = text;
UserActivityLogger.palAction("display_name_change", text);
}
focus = false;
}
onFocusChanged: {
if (focus === true) {
myDisplayName.border.width = 1
color = "black"
autoScroll = true;
pal.currentlyEditingDisplayName = true
} else {
myDisplayName.border.width = 0
color: hifi.colors.darkGray
cursorPosition = 0;
autoScroll = false;
pal.currentlyEditingDisplayName = false
}
}
}
}
// DisplayName container for others' cards
Item {

View file

@ -844,7 +844,7 @@ Rectangle {
boxSize: 24;
onClicked: {
var newValue = model.connection !== "friend";
connectionsUserModel.setProperty(model.userIndex, styleData.role, newValue);
connectionsUserModel.setProperty(model.userIndex, styleData.role, (newValue ? "friend" : "connection"));
connectionsUserModelData[model.userIndex][styleData.role] = newValue; // Defensive programming
pal.sendToScript({method: newValue ? 'addFriend' : 'removeFriend', params: model.userName});

View file

@ -8,6 +8,7 @@ Row {
id: row
spacing: 16
property alias checkbox: cb
property alias cbchecked: cb.checked
property alias text: txt
signal checkBoxClicked(bool checked)

View file

@ -33,6 +33,6 @@ StackView {
TabletPreferencesDialog {
id: root
objectName: "TabletGeneralPreferences"
showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers", "Perception Neuron", "Kinect"]
showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers", "Perception Neuron", "Kinect", "Vive Pucks Configuration"]
}
}

View file

@ -136,8 +136,8 @@ Item {
for (var i = 0; i < sections.length; i++) {
totalHeight += sections[i].height + sections[i].getPreferencesHeight();
}
console.log(totalHeight);
return totalHeight;
var bottomPadding = 100;
return (totalHeight + bottomPadding);
}
}
}

View file

@ -563,11 +563,8 @@ const bool DEFAULT_DESKTOP_TABLET_BECOMES_TOOLBAR = true;
const bool DEFAULT_HMD_TABLET_BECOMES_TOOLBAR = false;
const bool DEFAULT_PREFER_AVATAR_FINGER_OVER_STYLUS = false;
Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bool runServer, QString runServerPathOption) :
Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
QApplication(argc, argv),
_shouldRunServer(runServer),
_runServerPath(runServerPathOption),
_runningMarker(this, RUNNING_MARKER_FILENAME),
_window(new MainWindow(desktop())),
_sessionRunTimer(startupTimer),
_previousSessionCrashed(setupEssentials(argc, argv)),
@ -622,8 +619,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// make sure the debug draw singleton is initialized on the main thread.
DebugDraw::getInstance().removeMarker("");
_runningMarker.startRunningMarker();
PluginContainer* pluginContainer = dynamic_cast<PluginContainer*>(this); // set the container for any plugins that care
PluginManager::getInstance()->setContainer(pluginContainer);
@ -675,38 +670,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
static const QString OCULUS_STORE_ARG = "--oculus-store";
setProperty(hifi::properties::OCULUS_STORE, arguments().indexOf(OCULUS_STORE_ARG) != -1);
static const QString NO_UPDATER_ARG = "--no-updater";
static const bool noUpdater = arguments().indexOf(NO_UPDATER_ARG) != -1;
static const bool wantsSandboxRunning = shouldRunServer();
static bool determinedSandboxState = false;
static bool sandboxIsRunning = false;
SandboxUtils sandboxUtils;
// updateHeartbeat() because we are going to poll shortly...
updateHeartbeat();
sandboxUtils.ifLocalSandboxRunningElse([&]() {
qCDebug(interfaceapp) << "Home sandbox appears to be running.....";
determinedSandboxState = true;
sandboxIsRunning = true;
}, [&]() {
qCDebug(interfaceapp) << "Home sandbox does not appear to be running....";
if (wantsSandboxRunning) {
QString contentPath = getRunServerPath();
SandboxUtils::runLocalSandbox(contentPath, true, RUNNING_MARKER_FILENAME, noUpdater);
sandboxIsRunning = true;
}
determinedSandboxState = true;
});
// SandboxUtils::runLocalSandbox currently has 2 sec delay after spawning sandbox, so 4
// sec here is ok I guess. TODO: ping sandbox so we know it is up, perhaps?
quint64 MAX_WAIT_TIME = USECS_PER_SECOND * 4;
auto startWaiting = usecTimestampNow();
while (!determinedSandboxState && (usecTimestampNow() - startWaiting <= MAX_WAIT_TIME)) {
QCoreApplication::processEvents();
// updateHeartbeat() while polling so we don't scare the deadlock watchdog
updateHeartbeat();
usleep(USECS_PER_MSEC * 50); // 20hz
}
// start the nodeThread so its event loop is running
QThread* nodeThread = new QThread(this);
@ -941,10 +905,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
static const QString TESTER = "HIFI_TESTER";
auto gpuIdent = GPUIdent::getInstance();
auto glContextData = getGLContextData();
QJsonObject properties = {
{ "version", applicationVersion() },
{ "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) },
{ "previousSessionCrashed", _previousSessionCrashed },
{ "previousSessionRuntime", sessionRunTime.get() },
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },
@ -1221,6 +1187,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
#endif
// If launched from Steam, let it handle updates
const QString HIFI_NO_UPDATER_COMMAND_LINE_KEY = "--no-updater";
bool noUpdater = arguments().indexOf(HIFI_NO_UPDATER_COMMAND_LINE_KEY) != -1;
if (!noUpdater) {
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
connect(applicationUpdater.data(), &AutoUpdater::newVersionIsAvailable, dialogsManager.data(), &DialogsManager::showUpdateDialog);
@ -1436,15 +1404,17 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
connect(_window, SIGNAL(windowMinimizedChanged(bool)), this, SLOT(windowMinimizedChanged(bool)));
qCDebug(interfaceapp, "Startup time: %4.2f seconds.", (double)startupTimer.elapsed() / 1000.0);
auto textureCache = DependencyManager::get<TextureCache>();
{
PROFILE_RANGE(render, "Process Default Skybox");
auto textureCache = DependencyManager::get<TextureCache>();
QString skyboxUrl { PathUtils::resourcesPath() + "images/Default-Sky-9-cubemap.jpg" };
QString skyboxAmbientUrl { PathUtils::resourcesPath() + "images/Default-Sky-9-ambient.jpg" };
auto skyboxUrl = PathUtils::resourcesPath().toStdString() + "images/Default-Sky-9-cubemap.ktx";
_defaultSkyboxTexture = textureCache->getImageTexture(skyboxUrl, image::TextureUsage::CUBE_TEXTURE, { { "generateIrradiance", false } });
_defaultSkyboxAmbientTexture = textureCache->getImageTexture(skyboxAmbientUrl, image::TextureUsage::CUBE_TEXTURE, { { "generateIrradiance", true } });
_defaultSkyboxTexture = gpu::Texture::unserialize(skyboxUrl);
_defaultSkyboxAmbientTexture = _defaultSkyboxTexture;
_defaultSkybox->setCubemap(_defaultSkyboxTexture);
_defaultSkybox->setCubemap(_defaultSkyboxTexture);
}
EntityItem::setEntitiesShouldFadeFunction([this]() {
SharedNodePointer entityServerNode = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::EntityServer);
@ -1461,110 +1431,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
const auto testScript = property(hifi::properties::TEST).toUrl();
scriptEngines->loadScript(testScript, false);
} else {
enum HandControllerType {
Vive,
Oculus
};
static const std::map<HandControllerType, int> MIN_CONTENT_VERSION = {
{ Vive, 1 },
{ Oculus, 27 }
};
// Get sandbox content set version
auto acDirPath = PathUtils::getAppDataPath() + "../../" + BuildInfo::MODIFIED_ORGANIZATION + "/assignment-client/";
auto contentVersionPath = acDirPath + "content-version.txt";
qCDebug(interfaceapp) << "Checking " << contentVersionPath << " for content version";
int contentVersion = 0;
QFile contentVersionFile(contentVersionPath);
if (contentVersionFile.open(QIODevice::ReadOnly | QIODevice::Text)) {
QString line = contentVersionFile.readAll();
contentVersion = line.toInt(); // returns 0 if conversion fails
}
// Get controller availability
bool hasHandControllers = false;
HandControllerType handControllerType = Vive;
if (PluginUtils::isViveControllerAvailable()) {
hasHandControllers = true;
handControllerType = Vive;
} else if (PluginUtils::isOculusTouchControllerAvailable()) {
hasHandControllers = true;
handControllerType = Oculus;
}
// Check tutorial content versioning
bool hasTutorialContent = contentVersion >= MIN_CONTENT_VERSION.at(handControllerType);
// Check HMD use (may be technically available without being in use)
bool hasHMD = PluginUtils::isHMDAvailable();
bool isUsingHMD = hasHMD && hasHandControllers && _displayPlugin->isHmd();
Setting::Handle<bool> tutorialComplete { "tutorialComplete", false };
Setting::Handle<bool> firstRun { Settings::firstRun, true };
bool isTutorialComplete = tutorialComplete.get();
bool shouldGoToTutorial = isUsingHMD && hasTutorialContent && !isTutorialComplete;
qCDebug(interfaceapp) << "HMD:" << hasHMD << ", Hand Controllers: " << hasHandControllers << ", Using HMD: " << isUsingHMD;
qCDebug(interfaceapp) << "Tutorial version:" << contentVersion << ", sufficient:" << hasTutorialContent <<
", complete:" << isTutorialComplete << ", should go:" << shouldGoToTutorial;
// when --url in command line, teleport to location
const QString HIFI_URL_COMMAND_LINE_KEY = "--url";
int urlIndex = arguments().indexOf(HIFI_URL_COMMAND_LINE_KEY);
QString addressLookupString;
if (urlIndex != -1) {
addressLookupString = arguments().value(urlIndex + 1);
}
const QString TUTORIAL_PATH = "/tutorial_begin";
if (shouldGoToTutorial) {
if (sandboxIsRunning) {
qCDebug(interfaceapp) << "Home sandbox appears to be running, going to Home.";
DependencyManager::get<AddressManager>()->goToLocalSandbox(TUTORIAL_PATH);
} else {
qCDebug(interfaceapp) << "Home sandbox does not appear to be running, going to Entry.";
if (firstRun.get()) {
showHelp();
}
if (addressLookupString.isEmpty()) {
DependencyManager::get<AddressManager>()->goToEntry();
} else {
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
}
}
} else {
bool isFirstRun = firstRun.get();
if (isFirstRun) {
showHelp();
}
// If this is a first run we short-circuit the address passed in
if (isFirstRun) {
if (isUsingHMD) {
if (sandboxIsRunning) {
qCDebug(interfaceapp) << "Home sandbox appears to be running, going to Home.";
DependencyManager::get<AddressManager>()->goToLocalSandbox();
} else {
qCDebug(interfaceapp) << "Home sandbox does not appear to be running, going to Entry.";
DependencyManager::get<AddressManager>()->goToEntry();
}
} else {
DependencyManager::get<AddressManager>()->goToEntry();
}
} else {
qCDebug(interfaceapp) << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
}
}
_connectionMonitor.init();
// After all of the constructor is completed, then set firstRun to false.
firstRun.set(false);
PROFILE_RANGE(render, "GetSandboxStatus");
auto reply = SandboxUtils::getStatus();
connect(reply, &QNetworkReply::finished, this, [=] {
handleSandboxStatus(reply);
});
}
// Monitor model assets (e.g., from Clara.io) added to the world that may need resizing.
@ -1688,7 +1559,6 @@ void Application::updateHeartbeat() const {
void Application::aboutToQuit() {
emit beforeAboutToQuit();
DependencyManager::get<AudioClient>()->beforeAboutToQuit();
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
if (inputPlugin->isActive()) {
@ -1789,14 +1659,13 @@ void Application::cleanupBeforeQuit() {
_snapshotSoundInjector->stop();
}
// stop audio after QML, as there are unexplained audio crashes originating in qtwebengine
// stop the AudioClient, synchronously
// FIXME: something else is holding a reference to AudioClient,
// so it must be explicitly synchronously stopped here
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(),
"stop", Qt::BlockingQueuedConnection);
"cleanupBeforeQuit", Qt::BlockingQueuedConnection);
// destroy Audio so it and its threads have a chance to go down safely
// this must happen after QML, as there are unexplained audio crashes originating in qtwebengine
DependencyManager::destroy<AudioClient>();
DependencyManager::destroy<AudioInjectorManager>();
@ -2053,6 +1922,8 @@ void Application::initializeUi() {
rootContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor());
rootContext->setContextProperty("AvatarInputs", AvatarInputs::getInstance());
if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) {
rootContext->setContextProperty("Steam", new SteamScriptingInterface(engine, steamClient.get()));
}
@ -2470,6 +2341,118 @@ void Application::resizeGL() {
}
}
void Application::handleSandboxStatus(QNetworkReply* reply) {
PROFILE_RANGE(render, "HandleSandboxStatus");
bool sandboxIsRunning = SandboxUtils::readStatus(reply->readAll());
qDebug() << "HandleSandboxStatus" << sandboxIsRunning;
enum HandControllerType {
Vive,
Oculus
};
static const std::map<HandControllerType, int> MIN_CONTENT_VERSION = {
{ Vive, 1 },
{ Oculus, 27 }
};
// Get sandbox content set version
auto acDirPath = PathUtils::getAppDataPath() + "../../" + BuildInfo::MODIFIED_ORGANIZATION + "/assignment-client/";
auto contentVersionPath = acDirPath + "content-version.txt";
qCDebug(interfaceapp) << "Checking " << contentVersionPath << " for content version";
int contentVersion = 0;
QFile contentVersionFile(contentVersionPath);
if (contentVersionFile.open(QIODevice::ReadOnly | QIODevice::Text)) {
QString line = contentVersionFile.readAll();
contentVersion = line.toInt(); // returns 0 if conversion fails
}
// Get controller availability
bool hasHandControllers = false;
HandControllerType handControllerType = Vive;
if (PluginUtils::isViveControllerAvailable()) {
hasHandControllers = true;
handControllerType = Vive;
} else if (PluginUtils::isOculusTouchControllerAvailable()) {
hasHandControllers = true;
handControllerType = Oculus;
}
// Check tutorial content versioning
bool hasTutorialContent = contentVersion >= MIN_CONTENT_VERSION.at(handControllerType);
// Check HMD use (may be technically available without being in use)
bool hasHMD = PluginUtils::isHMDAvailable();
bool isUsingHMD = hasHMD && hasHandControllers && _displayPlugin->isHmd();
Setting::Handle<bool> tutorialComplete{ "tutorialComplete", false };
Setting::Handle<bool> firstRun{ Settings::firstRun, true };
bool isTutorialComplete = tutorialComplete.get();
bool shouldGoToTutorial = isUsingHMD && hasTutorialContent && !isTutorialComplete;
qCDebug(interfaceapp) << "HMD:" << hasHMD << ", Hand Controllers: " << hasHandControllers << ", Using HMD: " << isUsingHMD;
qCDebug(interfaceapp) << "Tutorial version:" << contentVersion << ", sufficient:" << hasTutorialContent <<
", complete:" << isTutorialComplete << ", should go:" << shouldGoToTutorial;
// when --url in command line, teleport to location
const QString HIFI_URL_COMMAND_LINE_KEY = "--url";
int urlIndex = arguments().indexOf(HIFI_URL_COMMAND_LINE_KEY);
QString addressLookupString;
if (urlIndex != -1) {
addressLookupString = arguments().value(urlIndex + 1);
}
const QString TUTORIAL_PATH = "/tutorial_begin";
if (shouldGoToTutorial) {
if (sandboxIsRunning) {
qCDebug(interfaceapp) << "Home sandbox appears to be running, going to Home.";
DependencyManager::get<AddressManager>()->goToLocalSandbox(TUTORIAL_PATH);
} else {
qCDebug(interfaceapp) << "Home sandbox does not appear to be running, going to Entry.";
if (firstRun.get()) {
showHelp();
}
if (addressLookupString.isEmpty()) {
DependencyManager::get<AddressManager>()->goToEntry();
} else {
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
}
}
} else {
bool isFirstRun = firstRun.get();
if (isFirstRun) {
showHelp();
}
// If this is a first run we short-circuit the address passed in
if (isFirstRun) {
if (isUsingHMD) {
if (sandboxIsRunning) {
qCDebug(interfaceapp) << "Home sandbox appears to be running, going to Home.";
DependencyManager::get<AddressManager>()->goToLocalSandbox();
} else {
qCDebug(interfaceapp) << "Home sandbox does not appear to be running, going to Entry.";
DependencyManager::get<AddressManager>()->goToEntry();
}
} else {
DependencyManager::get<AddressManager>()->goToEntry();
}
} else {
qCDebug(interfaceapp) << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
}
}
_connectionMonitor.init();
// After all of the constructor is completed, then set firstRun to false.
firstRun.set(false);
}
bool Application::importJSONFromURL(const QString& urlString) {
// we only load files that terminate in just .json (not .svo.json and not .ava.json)
// if they come from the High Fidelity Marketplace Assets CDN
@ -4331,13 +4314,6 @@ void Application::update(float deltaTime) {
if (nearbyEntitiesAreReadyForPhysics()) {
_physicsEnabled = true;
getMyAvatar()->updateMotionBehaviorFromMenu();
} else {
auto characterController = getMyAvatar()->getCharacterController();
if (characterController) {
// if we have a character controller, disable it here so the avatar doesn't get stuck due to
// a non-loading collision hull.
characterController->setEnabled(false);
}
}
}
} else if (domainLoadingInProgress) {
@ -5282,9 +5258,8 @@ void Application::nodeActivated(SharedNodePointer node) {
}
if (node->getType() == NodeType::AvatarMixer) {
// new avatar mixer, send off our identity packet right away
// new avatar mixer, send off our identity packet on next update loop
getMyAvatar()->markIdentityDataChanged();
getMyAvatar()->sendIdentityPacket();
getMyAvatar()->resetLastSent();
}
}
@ -6453,7 +6428,7 @@ void Application::takeSnapshot(bool notify, bool includeAnimated, float aspectRa
if (!includeAnimated) {
// Tell the dependency manager that the capture of the still snapshot has taken place.
emit DependencyManager::get<WindowScriptingInterface>()->stillSnapshotTaken(path, notify);
} else {
} else if (!SnapshotAnimated::isAlreadyTakingSnapshotAnimated()) {
// Get an animated GIF snapshot and save it
SnapshotAnimated::saveSnapshotAnimated(path, aspectRatio, qApp, DependencyManager::get<WindowScriptingInterface>());
}

View file

@ -112,17 +112,7 @@ class Application : public QApplication,
// TODO? Get rid of those
friend class OctreePacketProcessor;
private:
bool _shouldRunServer { false };
QString _runServerPath;
RunningMarker _runningMarker;
public:
// startup related getter/setters
bool shouldRunServer() const { return _shouldRunServer; }
bool hasRunServerPath() const { return !_runServerPath.isEmpty(); }
QString getRunServerPath() const { return _runServerPath; }
// virtual functions required for PluginContainer
virtual ui::Menu* getPrimaryMenu() override;
virtual void requestReset() override { resetSensors(true); }
@ -146,7 +136,7 @@ public:
static void initPlugins(const QStringList& arguments);
static void shutdownPlugins();
Application(int& argc, char** argv, QElapsedTimer& startup_time, bool runServer, QString runServerPathOption);
Application(int& argc, char** argv, QElapsedTimer& startup_time);
~Application();
void postLambdaEvent(std::function<void()> f) override;
@ -452,6 +442,8 @@ private slots:
void addAssetToWorldInfoTimeout();
void addAssetToWorldErrorTimeout();
void handleSandboxStatus(QNetworkReply* reply);
private:
static void initDisplay();
void init();

View file

@ -14,7 +14,7 @@
#include <avatar/AvatarActionHold.h>
#include <avatar/AvatarActionFarGrab.h>
#include <ObjectActionOffset.h>
#include <ObjectActionSpring.h>
#include <ObjectActionTractor.h>
#include <ObjectActionTravelOriented.h>
#include <ObjectConstraintHinge.h>
#include <ObjectConstraintSlider.h>
@ -32,7 +32,9 @@ EntityDynamicPointer interfaceDynamicFactory(EntityDynamicType type, const QUuid
case DYNAMIC_TYPE_OFFSET:
return std::make_shared<ObjectActionOffset>(id, ownerEntity);
case DYNAMIC_TYPE_SPRING:
return std::make_shared<ObjectActionSpring>(id, ownerEntity);
qDebug() << "The 'spring' Action is deprecated. Replacing with 'tractor' Action.";
case DYNAMIC_TYPE_TRACTOR:
return std::make_shared<ObjectActionTractor>(id, ownerEntity);
case DYNAMIC_TYPE_HOLD:
return std::make_shared<AvatarActionHold>(id, ownerEntity);
case DYNAMIC_TYPE_TRAVEL_ORIENTED:

View file

@ -197,7 +197,7 @@ Menu::Menu() {
0, // QML Qt::Key_Apostrophe,
qApp, SLOT(resetSensors()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableAvatarCollisions, 0, true,
avatar.get(), SLOT(updateMotionBehaviorFromMenu()));
// Avatar > AvatarBookmarks related menus -- Note: the AvatarBookmarks class adds its own submenus here.
@ -523,6 +523,8 @@ Menu::Menu() {
avatar.get(), SLOT(setEnableDebugDrawSensorToWorldMatrix(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderIKTargets, 0, false,
avatar.get(), SLOT(setEnableDebugDrawIKTargets(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderIKConstraints, 0, false,
avatar.get(), SLOT(setEnableDebugDrawIKConstraints(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ActionMotorControl,
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar.get(), SLOT(updateMotionBehaviorFromMenu()),

View file

@ -96,7 +96,7 @@ namespace MenuOption {
const QString DontRenderEntitiesAsScene = "Don't Render Entities as Scene";
const QString EchoLocalAudio = "Echo Local Audio";
const QString EchoServerAudio = "Echo Server Audio";
const QString EnableCharacterController = "Collide with world";
const QString EnableAvatarCollisions = "Enable Avatar Collisions";
const QString EnableInverseKinematics = "Enable Inverse Kinematics";
const QString EntityScriptServerLog = "Entity Script Server Log";
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
@ -161,6 +161,7 @@ namespace MenuOption {
const QString RenderResolutionQuarter = "1/4";
const QString RenderSensorToWorldMatrix = "Show SensorToWorld Matrix";
const QString RenderIKTargets = "Show IK Targets";
const QString RenderIKConstraints = "Show IK Constraints";
const QString ResetAvatarSize = "Reset Avatar Size";
const QString ResetSensors = "Reset Sensors";
const QString RunningScripts = "Running Scripts...";

View file

@ -12,7 +12,7 @@
#include "AvatarActionFarGrab.h"
AvatarActionFarGrab::AvatarActionFarGrab(const QUuid& id, EntityItemPointer ownerEntity) :
ObjectActionSpring(id, ownerEntity) {
ObjectActionTractor(id, ownerEntity) {
_type = DYNAMIC_TYPE_FAR_GRAB;
#if WANT_DEBUG
qDebug() << "AvatarActionFarGrab::AvatarActionFarGrab";
@ -32,7 +32,7 @@ QByteArray AvatarActionFarGrab::serialize() const {
dataStream << DYNAMIC_TYPE_FAR_GRAB;
dataStream << getID();
dataStream << ObjectActionSpring::springVersion;
dataStream << ObjectActionTractor::tractorVersion;
serializeParameters(dataStream);
@ -55,7 +55,7 @@ void AvatarActionFarGrab::deserialize(QByteArray serializedArguments) {
uint16_t serializationVersion;
dataStream >> serializationVersion;
if (serializationVersion != ObjectActionSpring::springVersion) {
if (serializationVersion != ObjectActionTractor::tractorVersion) {
assert(false);
return;
}

View file

@ -13,9 +13,9 @@
#define hifi_AvatarActionFarGrab_h
#include <EntityItem.h>
#include <ObjectActionSpring.h>
#include <ObjectActionTractor.h>
class AvatarActionFarGrab : public ObjectActionSpring {
class AvatarActionFarGrab : public ObjectActionTractor {
public:
AvatarActionFarGrab(const QUuid& id, EntityItemPointer ownerEntity);
virtual ~AvatarActionFarGrab();

View file

@ -21,7 +21,7 @@ const int AvatarActionHold::velocitySmoothFrames = 6;
AvatarActionHold::AvatarActionHold(const QUuid& id, EntityItemPointer ownerEntity) :
ObjectActionSpring(id, ownerEntity)
ObjectActionTractor(id, ownerEntity)
{
_type = DYNAMIC_TYPE_HOLD;
_measuredLinearVelocities.resize(AvatarActionHold::velocitySmoothFrames);
@ -224,12 +224,12 @@ bool AvatarActionHold::getTarget(float deltaTimeStep, glm::quat& rotation, glm::
void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
if (_kinematic) {
if (prepareForSpringUpdate(deltaTimeStep)) {
if (prepareForTractorUpdate(deltaTimeStep)) {
doKinematicUpdate(deltaTimeStep);
}
} else {
forceBodyNonStatic();
ObjectActionSpring::updateActionWorker(deltaTimeStep);
ObjectActionTractor::updateActionWorker(deltaTimeStep);
}
}

View file

@ -16,12 +16,12 @@
#include <EntityItem.h>
#include <AnimPose.h>
#include <ObjectActionSpring.h>
#include <ObjectActionTractor.h>
#include "avatar/MyAvatar.h"
class AvatarActionHold : public ObjectActionSpring {
class AvatarActionHold : public ObjectActionTractor {
public:
AvatarActionHold(const QUuid& id, EntityItemPointer ownerEntity);
virtual ~AvatarActionHold();

View file

@ -189,6 +189,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
btCollisionShape* shape = const_cast<btCollisionShape*>(ObjectMotionState::getShapeManager()->getShape(shapeInfo));
if (shape) {
AvatarMotionState* motionState = new AvatarMotionState(avatar, shape);
motionState->setMass(avatar->computeMass());
avatar->setPhysicsCallback([=] (uint32_t flags) { motionState->addDirtyFlags(flags); });
_motionStates.insert(avatar.get(), motionState);
_motionStatesToAddToPhysics.insert(motionState);

View file

@ -21,9 +21,9 @@
#include <PIDController.h>
#include <SimpleMovingAverage.h>
#include <shared/RateCounter.h>
#include <avatars-renderer/AvatarMotionState.h>
#include <avatars-renderer/ScriptAvatar.h>
#include "AvatarMotionState.h"
#include "MyAvatar.h"
class AudioInjector;

View file

@ -19,9 +19,6 @@
AvatarMotionState::AvatarMotionState(AvatarSharedPointer avatar, const btCollisionShape* shape) : ObjectMotionState(shape), _avatar(avatar) {
assert(_avatar);
_type = MOTIONSTATE_TYPE_AVATAR;
if (_shape) {
_mass = 100.0f; // HACK
}
}
AvatarMotionState::~AvatarMotionState() {

View file

@ -14,10 +14,10 @@
#include <QSet>
#include <avatars-renderer/Avatar.h>
#include <ObjectMotionState.h>
#include <BulletUtil.h>
#include "Avatar.h"
class AvatarMotionState : public ObjectMotionState {
public:

112
interface/src/avatar/MyAvatar.cpp Normal file → Executable file
View file

@ -38,6 +38,7 @@
#include <UserActivityLogger.h>
#include <AnimDebugDraw.h>
#include <AnimClip.h>
#include <AnimInverseKinematics.h>
#include <recording/Deck.h>
#include <recording/Recorder.h>
#include <recording/Clip.h>
@ -109,6 +110,9 @@ MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_useAdvancedMovementControls("advancedMovementForHandControllersIsChecked", false),
_smoothOrientationTimer(std::numeric_limits<float>::max()),
_smoothOrientationInitial(),
_smoothOrientationTarget(),
_hmdSensorMatrix(),
_hmdSensorOrientation(),
_hmdSensorPosition(),
@ -150,8 +154,6 @@ MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
// when we leave a domain we lift whatever restrictions that domain may have placed on our scale
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &MyAvatar::clearScaleRestriction);
_characterController.setEnabled(true);
_bodySensorMatrix = deriveBodyFromHMDSensor();
using namespace recording;
@ -165,12 +167,14 @@ MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
if (recordingInterface->getPlayFromCurrentLocation()) {
setRecordingBasis();
}
_wasCharacterControllerEnabled = _characterController.isEnabled();
_characterController.setEnabled(false);
_previousCollisionGroup = _characterController.computeCollisionGroup();
_characterController.setCollisionless(true);
} else {
clearRecordingBasis();
useFullAvatarURL(_fullAvatarURLFromPreferences, _fullAvatarModelName);
_characterController.setEnabled(_wasCharacterControllerEnabled);
if (_previousCollisionGroup != BULLET_COLLISION_GROUP_COLLISIONLESS) {
_characterController.setCollisionless(false);
}
}
auto audioIO = DependencyManager::get<AudioClient>();
@ -235,6 +239,7 @@ MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
});
connect(rig.get(), SIGNAL(onLoadComplete()), this, SIGNAL(onLoadComplete()));
_characterController.setDensity(_density);
}
MyAvatar::~MyAvatar() {
@ -264,6 +269,17 @@ QVariant MyAvatar::getOrientationVar() const {
return quatToVariant(Avatar::getOrientation());
}
glm::quat MyAvatar::getOrientationOutbound() const {
// Allows MyAvatar to send out smoothed data to remote agents if required.
if (_smoothOrientationTimer > SMOOTH_TIME_ORIENTATION) {
return (getLocalOrientation());
}
// Smooth the remote avatar movement.
float t = _smoothOrientationTimer / SMOOTH_TIME_ORIENTATION;
float interp = Interpolate::easeInOutQuad(glm::clamp(t, 0.0f, 1.0f));
return (slerp(_smoothOrientationInitial, _smoothOrientationTarget, interp));
}
// virtual
void MyAvatar::simulateAttachments(float deltaTime) {
@ -290,6 +306,11 @@ QByteArray MyAvatar::toByteArrayStateful(AvatarDataDetail dataDetail) {
}
void MyAvatar::resetSensorsAndBody() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "resetSensorsAndBody");
return;
}
qApp->getActiveDisplayPlugin()->resetSensors();
reset(true, false, true);
}
@ -387,6 +408,11 @@ void MyAvatar::update(float deltaTime) {
float tau = deltaTime / HMD_FACING_TIMESCALE;
_hmdSensorFacingMovingAverage = lerp(_hmdSensorFacingMovingAverage, _hmdSensorFacing, tau);
if (_smoothOrientationTimer < SMOOTH_TIME_ORIENTATION) {
_rotationChanged = usecTimestampNow();
_smoothOrientationTimer += deltaTime;
}
#ifdef DEBUG_DRAW_HMD_MOVING_AVERAGE
glm::vec3 p = transformPoint(getSensorToWorldMatrix(), _hmdSensorPosition + glm::vec3(_hmdSensorFacingMovingAverage.x, 0.0f, _hmdSensorFacingMovingAverage.y));
DebugDraw::getInstance().addMarker("facing-avg", getOrientation(), p, glm::vec4(1.0f));
@ -504,6 +530,7 @@ void MyAvatar::simulate(float deltaTime) {
if (_rig) {
_rig->setEnableDebugDrawIKTargets(_enableDebugDrawIKTargets);
_rig->setEnableDebugDrawIKConstraints(_enableDebugDrawIKConstraints);
}
_skeletonModel->simulate(deltaTime);
@ -552,12 +579,12 @@ void MyAvatar::simulate(float deltaTime) {
EntityTreePointer entityTree = entityTreeRenderer ? entityTreeRenderer->getTree() : nullptr;
if (entityTree) {
bool flyingAllowed = true;
bool ghostingAllowed = true;
bool collisionlessAllowed = true;
entityTree->withWriteLock([&] {
std::shared_ptr<ZoneEntityItem> zone = entityTreeRenderer->myAvatarZone();
if (zone) {
flyingAllowed = zone->getFlyingAllowed();
ghostingAllowed = zone->getGhostingAllowed();
collisionlessAllowed = zone->getGhostingAllowed();
}
auto now = usecTimestampNow();
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
@ -588,9 +615,7 @@ void MyAvatar::simulate(float deltaTime) {
}
});
_characterController.setFlyingAllowed(flyingAllowed);
if (!_characterController.isEnabled() && !ghostingAllowed) {
_characterController.setEnabled(true);
}
_characterController.setCollisionlessAllowed(collisionlessAllowed);
}
updateAvatarEntities();
@ -929,6 +954,10 @@ void MyAvatar::setEnableDebugDrawIKTargets(bool isEnabled) {
_enableDebugDrawIKTargets = isEnabled;
}
void MyAvatar::setEnableDebugDrawIKConstraints(bool isEnabled) {
_enableDebugDrawIKConstraints = isEnabled;
}
void MyAvatar::setEnableMeshVisible(bool isEnabled) {
_skeletonModel->setVisibleInScene(isEnabled, qApp->getMain3DScene());
}
@ -1449,7 +1478,8 @@ void MyAvatar::updateMotors() {
_characterController.clearMotors();
glm::quat motorRotation;
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
if (_characterController.getState() == CharacterController::State::Hover) {
if (_characterController.getState() == CharacterController::State::Hover ||
_characterController.computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS) {
motorRotation = getMyHead()->getCameraOrientation();
} else {
// non-hovering = walking: follow camera twist about vertical but not lift
@ -1495,6 +1525,7 @@ void MyAvatar::prepareForPhysicsSimulation() {
qDebug() << "Warning: getParentVelocity failed" << getID();
parentVelocity = glm::vec3();
}
_characterController.handleChangedCollisionGroup();
_characterController.setParentVelocity(parentVelocity);
_characterController.setPositionAndOrientation(getPosition(), getOrientation());
@ -1806,8 +1837,10 @@ void MyAvatar::updateOrientation(float deltaTime) {
// Comfort Mode: If you press any of the left/right rotation drive keys or input, you'll
// get an instantaneous 15 degree turn. If you keep holding the key down you'll get another
// snap turn every half second.
bool snapTurn = false;
if (getDriveKey(STEP_YAW) != 0.0f) {
totalBodyYaw += getDriveKey(STEP_YAW);
snapTurn = true;
}
// use head/HMD orientation to turn while flying
@ -1840,10 +1873,17 @@ void MyAvatar::updateOrientation(float deltaTime) {
totalBodyYaw += (speedFactor * deltaAngle * (180.0f / PI));
}
// update body orientation by movement inputs
glm::quat initialOrientation = getOrientationOutbound();
setOrientation(getOrientation() * glm::quat(glm::radians(glm::vec3(0.0f, totalBodyYaw, 0.0f))));
if (snapTurn) {
// Whether or not there is an existing smoothing going on, just reset the smoothing timer and set the starting position as the avatar's current position, then smooth to the new position.
_smoothOrientationInitial = initialOrientation;
_smoothOrientationTarget = getOrientation();
_smoothOrientationTimer = 0.0f;
}
getHead()->setBasePitch(getHead()->getBasePitch() + getDriveKey(PITCH) * _pitchSpeed * deltaTime);
if (qApp->isHMDMode()) {
@ -1883,8 +1923,9 @@ void MyAvatar::updateActionMotor(float deltaTime) {
glm::vec3 direction = forward + right;
CharacterController::State state = _characterController.getState();
if (state == CharacterController::State::Hover) {
// we're flying --> support vertical motion
if (state == CharacterController::State::Hover ||
_characterController.computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS) {
// we can fly --> support vertical motion
glm::vec3 up = (getDriveKey(TRANSLATE_Y)) * IDENTITY_UP;
direction += up;
}
@ -1906,7 +1947,7 @@ void MyAvatar::updateActionMotor(float deltaTime) {
float finalMaxMotorSpeed = getUniformScale() * MAX_ACTION_MOTOR_SPEED;
float speedGrowthTimescale = 2.0f;
float speedIncreaseFactor = 1.8f;
motorSpeed *= 1.0f + glm::clamp(deltaTime / speedGrowthTimescale , 0.0f, 1.0f) * speedIncreaseFactor;
motorSpeed *= 1.0f + glm::clamp(deltaTime / speedGrowthTimescale, 0.0f, 1.0f) * speedIncreaseFactor;
const float maxBoostSpeed = getUniformScale() * MAX_BOOST_SPEED;
if (_isPushing) {
@ -1949,9 +1990,17 @@ void MyAvatar::updatePosition(float deltaTime) {
measureMotionDerivatives(deltaTime);
_moving = speed2 > MOVING_SPEED_THRESHOLD_SQUARED;
} else {
// physics physics simulation updated elsewhere
float speed2 = glm::length2(velocity);
_moving = speed2 > MOVING_SPEED_THRESHOLD_SQUARED;
if (_moving) {
// scan for walkability
glm::vec3 position = getPosition();
MyCharacterController::RayShotgunResult result;
glm::vec3 step = deltaTime * (getRotation() * _actionMotorVelocity);
_characterController.testRayShotgun(position, step, result);
_characterController.setStepUpEnabled(result.walkable);
}
}
// capture the head rotation, in sensor space, when the user first indicates they would like to move/fly.
@ -2188,30 +2237,33 @@ void MyAvatar::updateMotionBehaviorFromMenu() {
} else {
_motionBehaviors &= ~AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED;
}
setCharacterControllerEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
setCollisionsEnabled(menu->isOptionChecked(MenuOption::EnableAvatarCollisions));
}
void MyAvatar::setCharacterControllerEnabled(bool enabled) {
void MyAvatar::setCollisionsEnabled(bool enabled) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "setCharacterControllerEnabled", Q_ARG(bool, enabled));
QMetaObject::invokeMethod(this, "setCollisionsEnabled", Q_ARG(bool, enabled));
return;
}
bool ghostingAllowed = true;
auto entityTreeRenderer = qApp->getEntities();
if (entityTreeRenderer) {
std::shared_ptr<ZoneEntityItem> zone = entityTreeRenderer->myAvatarZone();
if (zone) {
ghostingAllowed = zone->getGhostingAllowed();
}
}
_characterController.setEnabled(ghostingAllowed ? enabled : true);
_characterController.setCollisionless(!enabled);
}
bool MyAvatar::getCollisionsEnabled() {
// may return 'false' even though the collisionless option was requested
// because the zone may disallow collisionless avatars
return _characterController.computeCollisionGroup() != BULLET_COLLISION_GROUP_COLLISIONLESS;
}
void MyAvatar::setCharacterControllerEnabled(bool enabled) {
qCDebug(interfaceapp) << "MyAvatar.characterControllerEnabled is deprecated. Use MyAvatar.collisionsEnabled instead.";
setCollisionsEnabled(enabled);
}
bool MyAvatar::getCharacterControllerEnabled() {
return _characterController.isEnabled();
qCDebug(interfaceapp) << "MyAvatar.characterControllerEnabled is deprecated. Use MyAvatar.collisionsEnabled instead.";
return getCollisionsEnabled();
}
void MyAvatar::clearDriveKeys() {

View file

@ -96,7 +96,7 @@ class MyAvatar : public Avatar {
* @property rightHandTipPose {Pose} READ-ONLY. Returns a pose offset 30 cm from MyAvatar.rightHandPose
* @property hmdLeanRecenterEnabled {bool} This can be used disable the hmd lean recenter behavior. This behavior is what causes your avatar
* to follow your HMD as you walk around the room, in room scale VR. Disabling this is useful if you desire to pin the avatar to a fixed location.
* @property characterControllerEnabled {bool} This can be used to disable collisions between the avatar and the world.
* @property collisionsEnabled {bool} This can be used to disable collisions between the avatar and the world.
* @property useAdvancedMovementControls {bool} Stores the user preference only, does not change user mappings, this is done in the defaultScript
* "scripts/system/controllers/toggleAdvancedMovementForHandControllers.js".
*/
@ -125,9 +125,10 @@ class MyAvatar : public Avatar {
Q_PROPERTY(controller::Pose rightHandTipPose READ getRightHandTipPose)
Q_PROPERTY(float energy READ getEnergy WRITE setEnergy)
Q_PROPERTY(float isAway READ getIsAway WRITE setAway)
Q_PROPERTY(bool isAway READ getIsAway WRITE setAway)
Q_PROPERTY(bool hmdLeanRecenterEnabled READ getHMDLeanRecenterEnabled WRITE setHMDLeanRecenterEnabled)
Q_PROPERTY(bool collisionsEnabled READ getCollisionsEnabled WRITE setCollisionsEnabled)
Q_PROPERTY(bool characterControllerEnabled READ getCharacterControllerEnabled WRITE setCharacterControllerEnabled)
Q_PROPERTY(bool useAdvancedMovementControls READ useAdvancedMovementControls WRITE setUseAdvancedMovementControls)
@ -189,6 +190,8 @@ public:
Q_INVOKABLE void setOrientationVar(const QVariant& newOrientationVar);
Q_INVOKABLE QVariant getOrientationVar() const;
// A method intended to be overriden by MyAvatar for polling orientation for network transmission.
glm::quat getOrientationOutbound() const override;
// Pass a recent sample of the HMD to the avatar.
// This can also update the avatar's position to follow the HMD
@ -470,8 +473,10 @@ public:
bool hasDriveInput() const;
Q_INVOKABLE void setCharacterControllerEnabled(bool enabled);
Q_INVOKABLE bool getCharacterControllerEnabled();
Q_INVOKABLE void setCollisionsEnabled(bool enabled);
Q_INVOKABLE bool getCollisionsEnabled();
Q_INVOKABLE void setCharacterControllerEnabled(bool enabled); // deprecated
Q_INVOKABLE bool getCharacterControllerEnabled(); // deprecated
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
@ -518,6 +523,7 @@ public slots:
void setEnableDebugDrawHandControllers(bool isEnabled);
void setEnableDebugDrawSensorToWorldMatrix(bool isEnabled);
void setEnableDebugDrawIKTargets(bool isEnabled);
void setEnableDebugDrawIKConstraints(bool isEnabled);
bool getEnableMeshVisible() const { return _skeletonModel->isVisible(); }
void setEnableMeshVisible(bool isEnabled);
void setUseAnimPreAndPostRotations(bool isEnabled);
@ -614,7 +620,7 @@ private:
SharedSoundPointer _collisionSound;
MyCharacterController _characterController;
bool _wasCharacterControllerEnabled { true };
int16_t _previousCollisionGroup { BULLET_COLLISION_GROUP_MY_AVATAR };
AvatarWeakPointer _lookAtTargetAvatar;
glm::vec3 _targetAvatarPosition;
@ -629,6 +635,14 @@ private:
Setting::Handle<float> _realWorldFieldOfView;
Setting::Handle<bool> _useAdvancedMovementControls;
// Smoothing.
const float SMOOTH_TIME_ORIENTATION = 0.5f;
// Smoothing data for blending from one position/orientation to another on remote agents.
float _smoothOrientationTimer;
glm::quat _smoothOrientationInitial;
glm::quat _smoothOrientationTarget;
// private methods
void updateOrientation(float deltaTime);
void updateActionMotor(float deltaTime);
@ -703,6 +717,7 @@ private:
bool _enableDebugDrawHandControllers { false };
bool _enableDebugDrawSensorToWorldMatrix { false };
bool _enableDebugDrawIKTargets { false };
bool _enableDebugDrawIKConstraints { false };
AudioListenerMode _audioListenerMode;
glm::vec3 _customListenPosition;

314
interface/src/avatar/MyCharacterController.cpp Normal file → Executable file
View file

@ -15,11 +15,15 @@
#include "MyAvatar.h"
// TODO: improve walking up steps
// TODO: make avatars able to walk up and down steps/slopes
// TODO: make avatars stand on steep slope
// TODO: make avatars not snag on low ceilings
void MyCharacterController::RayShotgunResult::reset() {
hitFraction = 1.0f;
walkable = true;
}
MyCharacterController::MyCharacterController(MyAvatar* avatar) {
assert(avatar);
@ -30,39 +34,33 @@ MyCharacterController::MyCharacterController(MyAvatar* avatar) {
MyCharacterController::~MyCharacterController() {
}
void MyCharacterController::setDynamicsWorld(btDynamicsWorld* world) {
CharacterController::setDynamicsWorld(world);
if (world) {
initRayShotgun(world);
}
}
void MyCharacterController::updateShapeIfNecessary() {
if (_pendingFlags & PENDING_FLAG_UPDATE_SHAPE) {
_pendingFlags &= ~PENDING_FLAG_UPDATE_SHAPE;
// compute new dimensions from avatar's bounding box
float x = _boxScale.x;
float z = _boxScale.z;
_radius = 0.5f * sqrtf(0.5f * (x * x + z * z));
_halfHeight = 0.5f * _boxScale.y - _radius;
float MIN_HALF_HEIGHT = 0.1f;
if (_halfHeight < MIN_HALF_HEIGHT) {
_halfHeight = MIN_HALF_HEIGHT;
}
// NOTE: _shapeLocalOffset is already computed
if (_radius > 0.0f) {
// create RigidBody if it doesn't exist
if (!_rigidBody) {
// HACK: use some simple mass property defaults for now
const float DEFAULT_AVATAR_MASS = 100.0f;
const btVector3 DEFAULT_AVATAR_INERTIA_TENSOR(30.0f, 8.0f, 30.0f);
btCollisionShape* shape = new btCapsuleShape(_radius, 2.0f * _halfHeight);
_rigidBody = new btRigidBody(DEFAULT_AVATAR_MASS, nullptr, shape, DEFAULT_AVATAR_INERTIA_TENSOR);
btCollisionShape* shape = computeShape();
btScalar mass = 1.0f;
btVector3 inertia(1.0f, 1.0f, 1.0f);
_rigidBody = new btRigidBody(mass, nullptr, shape, inertia);
} else {
btCollisionShape* shape = _rigidBody->getCollisionShape();
if (shape) {
delete shape;
}
shape = new btCapsuleShape(_radius, 2.0f * _halfHeight);
shape = computeShape();
_rigidBody->setCollisionShape(shape);
}
updateMassProperties();
_rigidBody->setSleepingThresholds(0.0f, 0.0f);
_rigidBody->setAngularFactor(0.0f);
@ -72,12 +70,282 @@ void MyCharacterController::updateShapeIfNecessary() {
if (_state == State::Hover) {
_rigidBody->setGravity(btVector3(0.0f, 0.0f, 0.0f));
} else {
_rigidBody->setGravity(DEFAULT_CHARACTER_GRAVITY * _currentUp);
_rigidBody->setGravity(_gravity * _currentUp);
}
//_rigidBody->setCollisionFlags(btCollisionObject::CF_CHARACTER_OBJECT);
_rigidBody->setCollisionFlags(_rigidBody->getCollisionFlags() &
~(btCollisionObject::CF_KINEMATIC_OBJECT | btCollisionObject::CF_STATIC_OBJECT));
} else {
// TODO: handle this failure case
}
}
}
bool MyCharacterController::testRayShotgun(const glm::vec3& position, const glm::vec3& step, RayShotgunResult& result) {
btVector3 rayDirection = glmToBullet(step);
btScalar stepLength = rayDirection.length();
if (stepLength < FLT_EPSILON) {
return false;
}
rayDirection /= stepLength;
// get _ghost ready for ray traces
btTransform transform = _rigidBody->getWorldTransform();
btVector3 newPosition = glmToBullet(position);
transform.setOrigin(newPosition);
_ghost.setWorldTransform(transform);
btMatrix3x3 rotation = transform.getBasis();
_ghost.refreshOverlappingPairCache();
CharacterRayResult rayResult(&_ghost);
CharacterRayResult closestRayResult(&_ghost);
btVector3 rayStart;
btVector3 rayEnd;
// compute rotation that will orient local ray start points to face step direction
btVector3 forward = rotation * btVector3(0.0f, 0.0f, -1.0f);
btVector3 adjustedDirection = rayDirection - rayDirection.dot(_currentUp) * _currentUp;
btVector3 axis = forward.cross(adjustedDirection);
btScalar lengthAxis = axis.length();
if (lengthAxis > FLT_EPSILON) {
// we're walking sideways
btScalar angle = acosf(lengthAxis / adjustedDirection.length());
if (rayDirection.dot(forward) < 0.0f) {
angle = PI - angle;
}
axis /= lengthAxis;
rotation = btMatrix3x3(btQuaternion(axis, angle)) * rotation;
} else if (rayDirection.dot(forward) < 0.0f) {
// we're walking backwards
rotation = btMatrix3x3(btQuaternion(_currentUp, PI)) * rotation;
}
// scan the top
// NOTE: if we scan an extra distance forward we can detect flat surfaces that are too steep to walk on.
// The approximate extra distance can be derived with trigonometry.
//
// minimumForward = [ (maxStepHeight + radius / cosTheta - radius) * (cosTheta / sinTheta) - radius ]
//
// where: theta = max angle between floor normal and vertical
//
// if stepLength is not long enough we can add the difference.
//
btScalar cosTheta = _minFloorNormalDotUp;
btScalar sinTheta = sqrtf(1.0f - cosTheta * cosTheta);
const btScalar MIN_FORWARD_SLOP = 0.12f; // HACK: not sure why this is necessary to detect steepest walkable slope
btScalar forwardSlop = (_maxStepHeight + _radius / cosTheta - _radius) * (cosTheta / sinTheta) - (_radius + stepLength) + MIN_FORWARD_SLOP;
if (forwardSlop < 0.0f) {
// BIG step, no slop necessary
forwardSlop = 0.0f;
}
const btScalar backSlop = 0.04f;
for (int32_t i = 0; i < _topPoints.size(); ++i) {
rayStart = newPosition + rotation * _topPoints[i] - backSlop * rayDirection;
rayEnd = rayStart + (backSlop + stepLength + forwardSlop) * rayDirection;
if (_ghost.rayTest(rayStart, rayEnd, rayResult)) {
if (rayResult.m_closestHitFraction < closestRayResult.m_closestHitFraction) {
closestRayResult = rayResult;
}
if (result.walkable) {
if (rayResult.m_hitNormalWorld.dot(_currentUp) < _minFloorNormalDotUp) {
result.walkable = false;
// the top scan wasn't walkable so don't bother scanning the bottom
// remove both forwardSlop and backSlop
result.hitFraction = glm::min(1.0f, (closestRayResult.m_closestHitFraction * (backSlop + stepLength + forwardSlop) - backSlop) / stepLength);
return result.hitFraction < 1.0f;
}
}
}
}
if (_state == State::Hover) {
// scan the bottom just like the top
for (int32_t i = 0; i < _bottomPoints.size(); ++i) {
rayStart = newPosition + rotation * _bottomPoints[i] - backSlop * rayDirection;
rayEnd = rayStart + (backSlop + stepLength + forwardSlop) * rayDirection;
if (_ghost.rayTest(rayStart, rayEnd, rayResult)) {
if (rayResult.m_closestHitFraction < closestRayResult.m_closestHitFraction) {
closestRayResult = rayResult;
}
if (result.walkable) {
if (rayResult.m_hitNormalWorld.dot(_currentUp) < _minFloorNormalDotUp) {
result.walkable = false;
// the bottom scan wasn't walkable
// remove both forwardSlop and backSlop
result.hitFraction = glm::min(1.0f, (closestRayResult.m_closestHitFraction * (backSlop + stepLength + forwardSlop) - backSlop) / stepLength);
return result.hitFraction < 1.0f;
}
}
}
}
} else {
// scan the bottom looking for nearest step point
// remove forwardSlop
result.hitFraction = (closestRayResult.m_closestHitFraction * (backSlop + stepLength + forwardSlop)) / (backSlop + stepLength);
for (int32_t i = 0; i < _bottomPoints.size(); ++i) {
rayStart = newPosition + rotation * _bottomPoints[i] - backSlop * rayDirection;
rayEnd = rayStart + (backSlop + stepLength) * rayDirection;
if (_ghost.rayTest(rayStart, rayEnd, rayResult)) {
if (rayResult.m_closestHitFraction < closestRayResult.m_closestHitFraction) {
closestRayResult = rayResult;
}
}
}
// remove backSlop
// NOTE: backSlop removal can produce a NEGATIVE hitFraction!
// which means the shape is actually in interpenetration
result.hitFraction = ((closestRayResult.m_closestHitFraction * (backSlop + stepLength)) - backSlop) / stepLength;
}
return result.hitFraction < 1.0f;
}
btConvexHullShape* MyCharacterController::computeShape() const {
// HACK: the avatar collides using convex hull with a collision margin equal to
// the old capsule radius. Two points define a capsule and additional points are
// spread out at chest level to produce a slight taper toward the feet. This
// makes the avatar more likely to collide with vertical walls at a higher point
// and thus less likely to produce a single-point collision manifold below the
// _maxStepHeight when walking into against vertical surfaces --> fixes a bug
// where the "walk up steps" feature would allow the avatar to walk up vertical
// walls.
const int32_t NUM_POINTS = 6;
btVector3 points[NUM_POINTS];
btVector3 xAxis = btVector3(1.0f, 0.0f, 0.0f);
btVector3 yAxis = btVector3(0.0f, 1.0f, 0.0f);
btVector3 zAxis = btVector3(0.0f, 0.0f, 1.0f);
points[0] = _halfHeight * yAxis;
points[1] = -_halfHeight * yAxis;
points[2] = (0.75f * _halfHeight) * yAxis - (0.1f * _radius) * zAxis;
points[3] = (0.75f * _halfHeight) * yAxis + (0.1f * _radius) * zAxis;
points[4] = (0.75f * _halfHeight) * yAxis - (0.1f * _radius) * xAxis;
points[5] = (0.75f * _halfHeight) * yAxis + (0.1f * _radius) * xAxis;
btConvexHullShape* shape = new btConvexHullShape(reinterpret_cast<btScalar*>(points), NUM_POINTS);
shape->setMargin(_radius);
return shape;
}
void MyCharacterController::initRayShotgun(const btCollisionWorld* world) {
// In order to trace rays out from the avatar's shape surface we need to know where the start points are in
// the local-frame. Since the avatar shape is somewhat irregular computing these points by hand is a hassle
// so instead we ray-trace backwards to the avatar to find them.
//
// We trace back a regular grid (see below) of points against the shape and keep any that hit.
// ___
// + / + \ +
// |+ +|
// +| + | +
// |+ +|
// +| + | +
// |+ +|
// + \ + / +
// ---
// The shotgun will send rays out from these same points to see if the avatar's shape can proceed through space.
// helper class for simple ray-traces against character
class MeOnlyResultCallback : public btCollisionWorld::ClosestRayResultCallback {
public:
MeOnlyResultCallback (btRigidBody* me) : btCollisionWorld::ClosestRayResultCallback(btVector3(0.0f, 0.0f, 0.0f), btVector3(0.0f, 0.0f, 0.0f)) {
_me = me;
m_collisionFilterGroup = BULLET_COLLISION_GROUP_DYNAMIC;
m_collisionFilterMask = BULLET_COLLISION_MASK_DYNAMIC;
}
virtual btScalar addSingleResult(btCollisionWorld::LocalRayResult& rayResult,bool normalInWorldSpace) override {
if (rayResult.m_collisionObject != _me) {
return 1.0f;
}
return ClosestRayResultCallback::addSingleResult(rayResult, normalInWorldSpace);
}
btRigidBody* _me;
};
const btScalar fullHalfHeight = _radius + _halfHeight;
const btScalar divisionLine = -fullHalfHeight + _maxStepHeight; // line between top and bottom
const btScalar topHeight = fullHalfHeight - divisionLine;
const btScalar slop = 0.02f;
const int32_t NUM_ROWS = 5; // must be odd number > 1
const int32_t NUM_COLUMNS = 5; // must be odd number > 1
btVector3 reach = (2.0f * _radius) * btVector3(0.0f, 0.0f, 1.0f);
{ // top points
_topPoints.clear();
_topPoints.reserve(NUM_ROWS * NUM_COLUMNS);
btScalar stepY = (topHeight - slop) / (btScalar)(NUM_ROWS - 1);
btScalar stepX = 2.0f * (_radius - slop) / (btScalar)(NUM_COLUMNS - 1);
btTransform transform = _rigidBody->getWorldTransform();
btVector3 position = transform.getOrigin();
btMatrix3x3 rotation = transform.getBasis();
for (int32_t i = 0; i < NUM_ROWS; ++i) {
int32_t maxJ = NUM_COLUMNS;
btScalar offsetX = -(btScalar)((NUM_COLUMNS - 1) / 2) * stepX;
if (i % 2 == 1) {
// odd rows have one less point and start a halfStep closer
maxJ -= 1;
offsetX += 0.5f * stepX;
}
for (int32_t j = 0; j < maxJ; ++j) {
btVector3 localRayEnd(offsetX + (btScalar)(j) * stepX, divisionLine + (btScalar)(i) * stepY, 0.0f);
btVector3 localRayStart = localRayEnd - reach;
MeOnlyResultCallback result(_rigidBody);
world->rayTest(position + rotation * localRayStart, position + rotation * localRayEnd, result);
if (result.m_closestHitFraction < 1.0f) {
_topPoints.push_back(localRayStart + result.m_closestHitFraction * reach);
}
}
}
}
{ // bottom points
_bottomPoints.clear();
_bottomPoints.reserve(NUM_ROWS * NUM_COLUMNS);
btScalar steepestStepHitHeight = (_radius + 0.04f) * (1.0f - DEFAULT_MIN_FLOOR_NORMAL_DOT_UP);
btScalar stepY = (_maxStepHeight - slop - steepestStepHitHeight) / (btScalar)(NUM_ROWS - 1);
btScalar stepX = 2.0f * (_radius - slop) / (btScalar)(NUM_COLUMNS - 1);
btTransform transform = _rigidBody->getWorldTransform();
btVector3 position = transform.getOrigin();
btMatrix3x3 rotation = transform.getBasis();
for (int32_t i = 0; i < NUM_ROWS; ++i) {
int32_t maxJ = NUM_COLUMNS;
btScalar offsetX = -(btScalar)((NUM_COLUMNS - 1) / 2) * stepX;
if (i % 2 == 1) {
// odd rows have one less point and start a halfStep closer
maxJ -= 1;
offsetX += 0.5f * stepX;
}
for (int32_t j = 0; j < maxJ; ++j) {
btVector3 localRayEnd(offsetX + (btScalar)(j) * stepX, (divisionLine - slop) - (btScalar)(i) * stepY, 0.0f);
btVector3 localRayStart = localRayEnd - reach;
MeOnlyResultCallback result(_rigidBody);
world->rayTest(position + rotation * localRayStart, position + rotation * localRayEnd, result);
if (result.m_closestHitFraction < 1.0f) {
_bottomPoints.push_back(localRayStart + result.m_closestHitFraction * reach);
}
}
}
}
}
void MyCharacterController::updateMassProperties() {
assert(_rigidBody);
// the inertia tensor of a capsule with Y-axis of symmetry, radius R and cylinder height H is:
// Ix = density * (volumeCylinder * (H^2 / 12 + R^2 / 4) + volumeSphere * (2R^2 / 5 + H^2 / 2 + 3HR / 8))
// Iy = density * (volumeCylinder * (R^2 / 2) + volumeSphere * (2R^2 / 5)
btScalar r2 = _radius * _radius;
btScalar h2 = 4.0f * _halfHeight * _halfHeight;
btScalar volumeSphere = 4.0f * PI * r2 * _radius / 3.0f;
btScalar volumeCylinder = TWO_PI * r2 * 2.0f * _halfHeight;
btScalar cylinderXZ = volumeCylinder * (h2 / 12.0f + r2 / 4.0f);
btScalar capsXZ = volumeSphere * (2.0f * r2 / 5.0f + h2 / 2.0f + 6.0f * _halfHeight * _radius / 8.0f);
btScalar inertiaXZ = _density * (cylinderXZ + capsXZ);
btScalar inertiaY = _density * ((volumeCylinder * r2 / 2.0f) + volumeSphere * (2.0f * r2 / 5.0f));
btVector3 inertia(inertiaXZ, inertiaY, inertiaXZ);
btScalar mass = _density * (volumeCylinder + volumeSphere);
_rigidBody->setMassProps(mass, inertia);
}

View file

@ -24,10 +24,38 @@ public:
explicit MyCharacterController(MyAvatar* avatar);
~MyCharacterController ();
virtual void updateShapeIfNecessary() override;
void setDynamicsWorld(btDynamicsWorld* world) override;
void updateShapeIfNecessary() override;
// Sweeping a convex shape through the physics simulation can be expensive when the obstacles are too
// complex (e.g. small 20k triangle static mesh) so instead we cast several rays forward and if they
// don't hit anything we consider it a clean sweep. Hence this "Shotgun" code.
class RayShotgunResult {
public:
void reset();
float hitFraction { 1.0f };
bool walkable { true };
};
/// return true if RayShotgun hits anything
bool testRayShotgun(const glm::vec3& position, const glm::vec3& step, RayShotgunResult& result);
void setDensity(btScalar density) { _density = density; }
protected:
void initRayShotgun(const btCollisionWorld* world);
void updateMassProperties() override;
private:
btConvexHullShape* computeShape() const;
protected:
MyAvatar* _avatar { nullptr };
// shotgun scan data
btAlignedObjectArray<btVector3> _topPoints;
btAlignedObjectArray<btVector3> _bottomPoints;
btScalar _density { 1.0f };
};
#endif // hifi_MyCharacterController_h

View file

@ -44,14 +44,17 @@ glm::quat MyHead::getCameraOrientation() const {
void MyHead::simulate(float deltaTime) {
auto player = DependencyManager::get<recording::Deck>();
// Only use face trackers when not playing back a recording.
if (!player->isPlaying()) {
if (player->isPlaying()) {
Parent::simulate(deltaTime);
} else {
computeAudioLoudness(deltaTime);
FaceTracker* faceTracker = qApp->getActiveFaceTracker();
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
_isFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
if (_isFaceTrackerConnected) {
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
_transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
calculateMouthShapes(deltaTime);
@ -60,17 +63,27 @@ void MyHead::simulate(float deltaTime) {
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
_blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
_blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
_blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
_blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
_blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
_transientBlendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
_transientBlendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
_transientBlendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
_transientBlendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
_transientBlendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
}
applyEyelidOffset(getFinalOrientationInWorldFrame());
}
}
} else {
computeFaceMovement(deltaTime);
}
auto eyeTracker = DependencyManager::get<EyeTracker>();
_isEyeTrackerConnected = eyeTracker->isTracking();
_isEyeTrackerConnected = eyeTracker && eyeTracker->isTracking();
if (_isEyeTrackerConnected) {
// TODO? figure out where EyeTracker data harvested. Move it here?
_saccade = glm::vec3();
} else {
computeEyeMovement(deltaTime);
}
}
Parent::simulate(deltaTime);
}
computeEyePosition();
}

View file

@ -37,7 +37,14 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Head* head = _owningAvatar->getHead();
// make sure lookAt is not too close to face (avoid crosseyes)
glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition();
glm::vec3 lookAt = head->getLookAtPosition();
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
float focusDistance = glm::length(focusOffset);
const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
if (focusDistance < MIN_LOOK_AT_FOCUS_DISTANCE && focusDistance > EPSILON) {
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
}
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
Rig::HeadParameters headParams;
@ -140,6 +147,9 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
auto orientation = myAvatar->getLocalOrientation();
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
// evaluate AnimGraph animation and update jointStates.
Model::updateRig(deltaTime, parentTransform);
Rig::EyeParameters eyeParams;
eyeParams.eyeLookAt = lookAt;
eyeParams.eyeSaccade = head->getSaccade();
@ -149,8 +159,5 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
_rig->updateFromEyeParameters(eyeParams);
// evaluate AnimGraph animation and update jointStates.
Parent::updateRig(deltaTime, parentTransform);
}

View file

@ -11,6 +11,7 @@
#include <thread>
#include <QCommandLineParser>
#include <QtCore/QProcess>
#include <QDebug>
#include <QDir>
#include <QLocalSocket>
@ -20,6 +21,7 @@
#include <BuildInfo.h>
#include <gl/OpenGLVersionChecker.h>
#include <SandboxUtils.h>
#include <SharedUtil.h>
@ -28,7 +30,6 @@
#include "InterfaceLogging.h"
#include "UserActivityLogger.h"
#include "MainWindow.h"
#include <QtCore/QProcess>
#ifdef HAS_BUGSPLAT
#include <BugSplat.h>
@ -50,50 +51,49 @@ int main(int argc, const char* argv[]) {
disableQtBearerPoll(); // Fixes wifi ping spikes
QElapsedTimer startupTime;
startupTime.start();
// Set application infos
QCoreApplication::setApplicationName(BuildInfo::INTERFACE_NAME);
QCoreApplication::setOrganizationName(BuildInfo::MODIFIED_ORGANIZATION);
QCoreApplication::setOrganizationDomain(BuildInfo::ORGANIZATION_DOMAIN);
QCoreApplication::setApplicationVersion(BuildInfo::VERSION);
const QString& applicationName = getInterfaceSharedMemoryName();
bool instanceMightBeRunning = true;
QStringList arguments;
for (int i = 0; i < argc; ++i) {
arguments << argv[i];
}
#ifdef Q_OS_WIN
// Try to create a shared memory block - if it can't be created, there is an instance of
// interface already running. We only do this on Windows for now because of the potential
// for crashed instances to leave behind shared memory instances on unix.
QSharedMemory sharedMemory { applicationName };
instanceMightBeRunning = !sharedMemory.create(1, QSharedMemory::ReadOnly);
#endif
// allow multiple interfaces to run if this environment variable is set.
if (QProcessEnvironment::systemEnvironment().contains("HIFI_ALLOW_MULTIPLE_INSTANCES")) {
instanceMightBeRunning = false;
}
QCommandLineParser parser;
QCommandLineOption urlOption("url", "", "value");
QCommandLineOption noUpdaterOption("no-updater", "Do not show auto-updater");
QCommandLineOption checkMinSpecOption("checkMinSpec", "Check if machine meets minimum specifications");
QCommandLineOption runServerOption("runServer", "Whether to run the server");
QCommandLineOption serverContentPathOption("serverContentPath", "Where to find server content", "serverContentPath");
QCommandLineOption allowMultipleInstancesOption("allowMultipleInstances", "Allow multiple instances to run");
parser.addOption(urlOption);
parser.addOption(noUpdaterOption);
parser.addOption(checkMinSpecOption);
parser.addOption(runServerOption);
parser.addOption(serverContentPathOption);
parser.addOption(allowMultipleInstancesOption);
parser.parse(arguments);
bool runServer = parser.isSet(runServerOption);
bool serverContentPathOptionIsSet = parser.isSet(serverContentPathOption);
QString serverContentPathOptionValue = serverContentPathOptionIsSet ? parser.value(serverContentPathOption) : QString();
bool allowMultipleInstances = parser.isSet(allowMultipleInstancesOption);
const QString& applicationName = getInterfaceSharedMemoryName();
bool instanceMightBeRunning = true;
#ifdef Q_OS_WIN
// Try to create a shared memory block - if it can't be created, there is an instance of
// interface already running. We only do this on Windows for now because of the potential
// for crashed instances to leave behind shared memory instances on unix.
QSharedMemory sharedMemory{ applicationName };
instanceMightBeRunning = !sharedMemory.create(1, QSharedMemory::ReadOnly);
#endif
// allow multiple interfaces to run if this environment variable is set.
bool allowMultipleInstances = parser.isSet(allowMultipleInstancesOption) ||
QProcessEnvironment::systemEnvironment().contains("HIFI_ALLOW_MULTIPLE_INSTANCES");
if (allowMultipleInstances) {
instanceMightBeRunning = false;
}
@ -108,11 +108,6 @@ int main(int argc, const char* argv[]) {
// Try to connect - if we can't connect, interface has probably just gone down
if (socket.waitForConnected(LOCAL_SERVER_TIMEOUT_MS)) {
QCommandLineParser parser;
QCommandLineOption urlOption("url", "", "value");
parser.addOption(urlOption);
parser.process(arguments);
if (parser.isSet(urlOption)) {
QUrl url = QUrl(parser.value(urlOption));
if (url.isValid() && url.scheme() == HIFI_URL_SCHEME) {
@ -156,9 +151,6 @@ int main(int argc, const char* argv[]) {
}
}
QElapsedTimer startupTime;
startupTime.start();
// Debug option to demonstrate that the client's local time does not
// need to be in sync with any other network node. This forces clock
// skew for the individual client
@ -199,7 +191,21 @@ int main(int argc, const char* argv[]) {
int exitCode;
{
Application app(argc, const_cast<char**>(argv), startupTime, runServer, serverContentPathOptionValue);
RunningMarker runningMarker(nullptr, RUNNING_MARKER_FILENAME);
runningMarker.writeRunningMarkerFile();
bool noUpdater = parser.isSet(noUpdaterOption);
bool runServer = parser.isSet(runServerOption);
bool serverContentPathOptionIsSet = parser.isSet(serverContentPathOption);
QString serverContentPath = serverContentPathOptionIsSet ? parser.value(serverContentPathOption) : QString();
if (runServer) {
SandboxUtils::runLocalSandbox(serverContentPath, true, RUNNING_MARKER_FILENAME, noUpdater);
}
Application app(argc, const_cast<char**>(argv), startupTime);
// Now that the main event loop is setup, launch running marker thread
runningMarker.startRunningMarker();
// If we failed the OpenGLVersion check, log it.
if (override) {

View file

@ -11,21 +11,19 @@
#include "AudioClient.h"
#include "AudioDeviceScriptingInterface.h"
#include "SettingsScriptingInterface.h"
AudioDeviceScriptingInterface* AudioDeviceScriptingInterface::getInstance() {
static AudioDeviceScriptingInterface sharedInstance;
return &sharedInstance;
}
QStringList AudioDeviceScriptingInterface::inputAudioDevices() const
{
return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioInput).toList();;
QStringList AudioDeviceScriptingInterface::inputAudioDevices() const {
return _inputAudioDevices;
}
QStringList AudioDeviceScriptingInterface::outputAudioDevices() const
{
return DependencyManager::get<AudioClient>()->getDeviceNames(QAudio::AudioOutput).toList();;
QStringList AudioDeviceScriptingInterface::outputAudioDevices() const {
return _outputAudioDevices;
}
bool AudioDeviceScriptingInterface::muted()
@ -33,11 +31,27 @@ bool AudioDeviceScriptingInterface::muted()
return getMuted();
}
AudioDeviceScriptingInterface::AudioDeviceScriptingInterface() {
AudioDeviceScriptingInterface::AudioDeviceScriptingInterface(): QAbstractListModel(nullptr) {
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::muteToggled,
this, &AudioDeviceScriptingInterface::muteToggled);
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::deviceChanged,
this, &AudioDeviceScriptingInterface::deviceChanged);
this, &AudioDeviceScriptingInterface::onDeviceChanged, Qt::QueuedConnection);
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::currentInputDeviceChanged,
this, &AudioDeviceScriptingInterface::onCurrentInputDeviceChanged, Qt::QueuedConnection);
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::currentOutputDeviceChanged,
this, &AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged, Qt::QueuedConnection);
//fill up model
onDeviceChanged();
//set up previously saved device
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
const QString inDevice = settings->getValue("audio_input_device").toString();
if (inDevice != _currentInputDevice) {
setInputDeviceAsync(inDevice);
}
const QString outDevice = settings->getValue("audio_output_device").toString();
if (outDevice != _currentOutputDevice) {
setOutputDeviceAsync(outDevice);
}
}
bool AudioDeviceScriptingInterface::setInputDevice(const QString& deviceName) {
@ -58,6 +72,43 @@ bool AudioDeviceScriptingInterface::setOutputDevice(const QString& deviceName) {
return result;
}
bool AudioDeviceScriptingInterface::setDeviceFromMenu(const QString& deviceMenuName) {
QAudio::Mode mode;
if (deviceMenuName.indexOf("for Output") != -1) {
mode = QAudio::AudioOutput;
} else if (deviceMenuName.indexOf("for Input") != -1) {
mode = QAudio::AudioInput;
} else {
return false;
}
for (ScriptingAudioDeviceInfo di: _devices) {
if (mode == di.mode && deviceMenuName.contains(di.name)) {
if (mode == QAudio::AudioOutput) {
setOutputDeviceAsync(di.name);
} else {
setInputDeviceAsync(di.name);
}
return true;
}
}
return false;
}
void AudioDeviceScriptingInterface::setInputDeviceAsync(const QString& deviceName) {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchInputToAudioDevice",
Qt::QueuedConnection,
Q_ARG(const QString&, deviceName));
}
void AudioDeviceScriptingInterface::setOutputDeviceAsync(const QString& deviceName) {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "switchOutputToAudioDevice",
Qt::QueuedConnection,
Q_ARG(const QString&, deviceName));
}
QString AudioDeviceScriptingInterface::getInputDevice() {
return DependencyManager::get<AudioClient>()->getDeviceName(QAudio::AudioInput);
}
@ -116,3 +167,105 @@ void AudioDeviceScriptingInterface::setMuted(bool muted)
bool AudioDeviceScriptingInterface::getMuted() {
return DependencyManager::get<AudioClient>()->isMuted();
}
QVariant AudioDeviceScriptingInterface::data(const QModelIndex& index, int role) const {
//sanity
if (!index.isValid() || index.row() >= _devices.size())
return QVariant();
if (role == Qt::DisplayRole || role == DisplayNameRole) {
return _devices.at(index.row()).name;
} else if (role == SelectedRole) {
return _devices.at(index.row()).selected;
} else if (role == AudioModeRole) {
return (int)_devices.at(index.row()).mode;
}
return QVariant();
}
int AudioDeviceScriptingInterface::rowCount(const QModelIndex& parent) const {
Q_UNUSED(parent)
return _devices.size();
}
QHash<int, QByteArray> AudioDeviceScriptingInterface::roleNames() const {
QHash<int, QByteArray> roles;
roles.insert(DisplayNameRole, "devicename");
roles.insert(SelectedRole, "devicechecked");
roles.insert(AudioModeRole, "devicemode");
return roles;
}
void AudioDeviceScriptingInterface::onDeviceChanged()
{
beginResetModel();
_outputAudioDevices.clear();
_devices.clear();
_currentOutputDevice = getOutputDevice();
for (QString name: getOutputDevices()) {
ScriptingAudioDeviceInfo di;
di.name = name;
di.selected = (name == _currentOutputDevice);
di.mode = QAudio::AudioOutput;
_devices.append(di);
_outputAudioDevices.append(name);
}
emit outputAudioDevicesChanged(_outputAudioDevices);
_inputAudioDevices.clear();
_currentInputDevice = getInputDevice();
for (QString name: getInputDevices()) {
ScriptingAudioDeviceInfo di;
di.name = name;
di.selected = (name == _currentInputDevice);
di.mode = QAudio::AudioInput;
_devices.append(di);
_inputAudioDevices.append(name);
}
emit inputAudioDevicesChanged(_inputAudioDevices);
endResetModel();
emit deviceChanged();
}
void AudioDeviceScriptingInterface::onCurrentInputDeviceChanged(const QString& name)
{
currentDeviceUpdate(name, QAudio::AudioInput);
//we got a signal that device changed. Save it now
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
settings->setValue("audio_input_device", name);
emit currentInputDeviceChanged(name);
}
void AudioDeviceScriptingInterface::onCurrentOutputDeviceChanged(const QString& name)
{
currentDeviceUpdate(name, QAudio::AudioOutput);
//we got a signal that device changed. Save it now
SettingsScriptingInterface* settings = SettingsScriptingInterface::getInstance();
settings->setValue("audio_output_device", name);
emit currentOutputDeviceChanged(name);
}
void AudioDeviceScriptingInterface::currentDeviceUpdate(const QString& name, QAudio::Mode mode)
{
QVector<int> role;
role.append(SelectedRole);
for (int i = 0; i < _devices.size(); i++) {
ScriptingAudioDeviceInfo di = _devices.at(i);
if (di.mode != mode) {
continue;
}
if (di.selected && di.name != name ) {
di.selected = false;
_devices[i] = di;
emit dataChanged(index(i, 0), index(i, 0), role);
}
if (di.name == name) {
di.selected = true;
_devices[i] = di;
emit dataChanged(index(i, 0), index(i, 0), role);
}
}
}

View file

@ -15,10 +15,18 @@
#include <QObject>
#include <QString>
#include <QVector>
#include <QAbstractListModel>
#include <QAudio>
class AudioEffectOptions;
class AudioDeviceScriptingInterface : public QObject {
struct ScriptingAudioDeviceInfo {
QString name;
bool selected;
QAudio::Mode mode;
};
class AudioDeviceScriptingInterface : public QAbstractListModel {
Q_OBJECT
Q_PROPERTY(QStringList inputAudioDevices READ inputAudioDevices NOTIFY inputAudioDevicesChanged)
@ -32,9 +40,26 @@ public:
QStringList outputAudioDevices() const;
bool muted();
QVariant data(const QModelIndex& index, int role = Qt::DisplayRole) const override;
int rowCount(const QModelIndex& parent = QModelIndex()) const override;
QHash<int, QByteArray> roleNames() const override;
enum Roles {
DisplayNameRole = Qt::UserRole,
SelectedRole,
AudioModeRole
};
private slots:
void onDeviceChanged();
void onCurrentInputDeviceChanged(const QString& name);
void onCurrentOutputDeviceChanged(const QString& name);
void currentDeviceUpdate(const QString& name, QAudio::Mode mode);
public slots:
bool setInputDevice(const QString& deviceName);
bool setOutputDevice(const QString& deviceName);
bool setDeviceFromMenu(const QString& deviceMenuName);
QString getInputDevice();
QString getOutputDevice();
@ -55,15 +80,28 @@ public slots:
void setMuted(bool muted);
void setInputDeviceAsync(const QString& deviceName);
void setOutputDeviceAsync(const QString& deviceName);
private:
AudioDeviceScriptingInterface();
signals:
void muteToggled();
void deviceChanged();
void currentInputDeviceChanged(const QString& name);
void currentOutputDeviceChanged(const QString& name);
void mutedChanged(bool muted);
void inputAudioDevicesChanged(QStringList inputAudioDevices);
void outputAudioDevicesChanged(QStringList outputAudioDevices);
private:
QVector<ScriptingAudioDeviceInfo> _devices;
QStringList _inputAudioDevices;
QStringList _outputAudioDevices;
QString _currentInputDevice;
QString _currentOutputDevice;
};
#endif // hifi_AudioDeviceScriptingInterface_h

View file

@ -28,11 +28,15 @@ const int MAX_HISTORY_SIZE = 64;
const QString COMMAND_STYLE = "color: #266a9b;";
const QString RESULT_SUCCESS_STYLE = "color: #677373;";
const QString RESULT_INFO_STYLE = "color: #223bd1;";
const QString RESULT_WARNING_STYLE = "color: #d13b22;";
const QString RESULT_ERROR_STYLE = "color: #d13b22;";
const QString GUTTER_PREVIOUS_COMMAND = "<span style=\"color: #57b8bb;\">&lt;</span>";
const QString GUTTER_ERROR = "<span style=\"color: #d13b22;\">X</span>";
const QString JSConsole::_consoleFileName { "about:console" };
JSConsole::JSConsole(QWidget* parent, ScriptEngine* scriptEngine) :
QWidget(parent),
_ui(new Ui::Console),
@ -77,6 +81,8 @@ void JSConsole::setScriptEngine(ScriptEngine* scriptEngine) {
}
if (_scriptEngine != NULL) {
disconnect(_scriptEngine, &ScriptEngine::printedMessage, this, &JSConsole::handlePrint);
disconnect(_scriptEngine, &ScriptEngine::infoMessage, this, &JSConsole::handleInfo);
disconnect(_scriptEngine, &ScriptEngine::warningMessage, this, &JSConsole::handleWarning);
disconnect(_scriptEngine, &ScriptEngine::errorMessage, this, &JSConsole::handleError);
if (_ownScriptEngine) {
_scriptEngine->deleteLater();
@ -84,10 +90,12 @@ void JSConsole::setScriptEngine(ScriptEngine* scriptEngine) {
}
// if scriptEngine is NULL then create one and keep track of it using _ownScriptEngine
_ownScriptEngine = scriptEngine == NULL;
_scriptEngine = _ownScriptEngine ? DependencyManager::get<ScriptEngines>()->loadScript(QString(), false) : scriptEngine;
_ownScriptEngine = (scriptEngine == NULL);
_scriptEngine = _ownScriptEngine ? DependencyManager::get<ScriptEngines>()->loadScript(_consoleFileName, false) : scriptEngine;
connect(_scriptEngine, &ScriptEngine::printedMessage, this, &JSConsole::handlePrint);
connect(_scriptEngine, &ScriptEngine::infoMessage, this, &JSConsole::handleInfo);
connect(_scriptEngine, &ScriptEngine::warningMessage, this, &JSConsole::handleWarning);
connect(_scriptEngine, &ScriptEngine::errorMessage, this, &JSConsole::handleError);
}
@ -107,11 +115,10 @@ void JSConsole::executeCommand(const QString& command) {
QScriptValue JSConsole::executeCommandInWatcher(const QString& command) {
QScriptValue result;
static const QString filename = "JSConcole";
QMetaObject::invokeMethod(_scriptEngine, "evaluate", Qt::ConnectionType::BlockingQueuedConnection,
Q_RETURN_ARG(QScriptValue, result),
Q_ARG(const QString&, command),
Q_ARG(const QString&, filename));
Q_ARG(const QString&, _consoleFileName));
return result;
}
@ -134,16 +141,26 @@ void JSConsole::commandFinished() {
resetCurrentCommandHistory();
}
void JSConsole::handleError(const QString& scriptName, const QString& message) {
void JSConsole::handleError(const QString& message, const QString& scriptName) {
Q_UNUSED(scriptName);
appendMessage(GUTTER_ERROR, "<span style='" + RESULT_ERROR_STYLE + "'>" + message.toHtmlEscaped() + "</span>");
}
void JSConsole::handlePrint(const QString& scriptName, const QString& message) {
void JSConsole::handlePrint(const QString& message, const QString& scriptName) {
Q_UNUSED(scriptName);
appendMessage("", message);
}
void JSConsole::handleInfo(const QString& message, const QString& scriptName) {
Q_UNUSED(scriptName);
appendMessage("", "<span style='" + RESULT_INFO_STYLE + "'>" + message.toHtmlEscaped() + "</span>");
}
void JSConsole::handleWarning(const QString& message, const QString& scriptName) {
Q_UNUSED(scriptName);
appendMessage("", "<span style='" + RESULT_WARNING_STYLE + "'>" + message.toHtmlEscaped() + "</span>");
}
void JSConsole::mouseReleaseEvent(QMouseEvent* event) {
_ui->promptTextEdit->setFocus();
}

View file

@ -47,8 +47,10 @@ protected:
protected slots:
void scrollToBottom();
void resizeTextInput();
void handlePrint(const QString& scriptName, const QString& message);
void handleError(const QString& scriptName, const QString& message);
void handlePrint(const QString& message, const QString& scriptName);
void handleInfo(const QString& message, const QString& scriptName);
void handleWarning(const QString& message, const QString& scriptName);
void handleError(const QString& message, const QString& scriptName);
void commandFinished();
private:
@ -66,6 +68,7 @@ private:
bool _ownScriptEngine;
QString _rootCommand;
ScriptEngine* _scriptEngine;
static const QString _consoleFileName;
};

View file

@ -51,6 +51,7 @@ private:
static void processFrames();
public:
static void saveSnapshotAnimated(QString pathStill, float aspectRatio, Application* app, QSharedPointer<WindowScriptingInterface> dm);
static bool isAlreadyTakingSnapshotAnimated() { return snapshotAnimatedFirstFrameTimestamp != 0; };
static Setting::Handle<bool> alsoTakeAnimatedSnapshot;
static Setting::Handle<float> snapshotAnimatedDuration;
};

View file

@ -408,6 +408,7 @@ RayToOverlayIntersectionResult Overlays::findRayIntersectionInternal(const PickR
const QVector<OverlayID>& overlaysToInclude,
const QVector<OverlayID>& overlaysToDiscard,
bool visibleOnly, bool collidableOnly) {
QReadLocker lock(&_lock);
float bestDistance = std::numeric_limits<float>::max();
bool bestIsFront = false;

View file

@ -225,10 +225,6 @@ void Web3DOverlay::setMaxFPS(uint8_t maxFPS) {
}
void Web3DOverlay::render(RenderArgs* args) {
if (!_visible || !getParentVisible()) {
return;
}
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
QSurface * currentSurface = currentContext->surface();
if (!_webSurface) {
@ -282,6 +278,10 @@ void Web3DOverlay::render(RenderArgs* args) {
_webSurface->resize(QSize(_resolution.x, _resolution.y));
}
if (!_visible || !getParentVisible()) {
return;
}
vec2 halfSize = getSize() / 2.0f;
vec4 color(toGlm(getColor()), getAlpha());

View file

@ -10,7 +10,11 @@
#include "AnimContext.h"
AnimContext::AnimContext(bool enableDebugDrawIKTargets, const glm::mat4& geometryToRigMatrix) :
AnimContext::AnimContext(bool enableDebugDrawIKTargets, bool enableDebugDrawIKConstraints,
const glm::mat4& geometryToRigMatrix, const glm::mat4& rigToWorldMatrix) :
_enableDebugDrawIKTargets(enableDebugDrawIKTargets),
_geometryToRigMatrix(geometryToRigMatrix) {
_enableDebugDrawIKConstraints(enableDebugDrawIKConstraints),
_geometryToRigMatrix(geometryToRigMatrix),
_rigToWorldMatrix(rigToWorldMatrix)
{
}

View file

@ -16,15 +16,20 @@
class AnimContext {
public:
AnimContext(bool enableDebugDrawIKTargets, const glm::mat4& geometryToRigMatrix);
AnimContext(bool enableDebugDrawIKTargets, bool enableDebugDrawIKConstraints,
const glm::mat4& geometryToRigMatrix, const glm::mat4& rigToWorldMatrix);
bool getEnableDebugDrawIKTargets() const { return _enableDebugDrawIKTargets; }
bool getEnableDebugDrawIKConstraints() const { return _enableDebugDrawIKConstraints; }
const glm::mat4& getGeometryToRigMatrix() const { return _geometryToRigMatrix; }
const glm::mat4& getRigToWorldMatrix() const { return _rigToWorldMatrix; }
protected:
bool _enableDebugDrawIKTargets { false };
bool _enableDebugDrawIKConstraints{ false };
glm::mat4 _geometryToRigMatrix;
glm::mat4 _rigToWorldMatrix;
};
#endif // hifi_AnimContext_h

View file

@ -21,6 +21,39 @@
#include "SwingTwistConstraint.h"
#include "AnimationLogging.h"
AnimInverseKinematics::IKTargetVar::IKTargetVar(const QString& jointNameIn, const QString& positionVarIn, const QString& rotationVarIn,
const QString& typeVarIn, const QString& weightVarIn, float weightIn, const std::vector<float>& flexCoefficientsIn) :
jointName(jointNameIn),
positionVar(positionVarIn),
rotationVar(rotationVarIn),
typeVar(typeVarIn),
weightVar(weightVarIn),
weight(weightIn),
numFlexCoefficients(flexCoefficientsIn.size()),
jointIndex(-1)
{
numFlexCoefficients = std::min(numFlexCoefficients, (size_t)MAX_FLEX_COEFFICIENTS);
for (size_t i = 0; i < numFlexCoefficients; i++) {
flexCoefficients[i] = flexCoefficientsIn[i];
}
}
AnimInverseKinematics::IKTargetVar::IKTargetVar(const IKTargetVar& orig) :
jointName(orig.jointName),
positionVar(orig.positionVar),
rotationVar(orig.rotationVar),
typeVar(orig.typeVar),
weightVar(orig.weightVar),
weight(orig.weight),
numFlexCoefficients(orig.numFlexCoefficients),
jointIndex(orig.jointIndex)
{
numFlexCoefficients = std::min(numFlexCoefficients, (size_t)MAX_FLEX_COEFFICIENTS);
for (size_t i = 0; i < numFlexCoefficients; i++) {
flexCoefficients[i] = orig.flexCoefficients[i];
}
}
AnimInverseKinematics::AnimInverseKinematics(const QString& id) : AnimNode(AnimNode::Type::InverseKinematics, id) {
}
@ -60,26 +93,22 @@ void AnimInverseKinematics::computeAbsolutePoses(AnimPoseVec& absolutePoses) con
}
}
void AnimInverseKinematics::setTargetVars(
const QString& jointName,
const QString& positionVar,
const QString& rotationVar,
const QString& typeVar) {
void AnimInverseKinematics::setTargetVars(const QString& jointName, const QString& positionVar, const QString& rotationVar,
const QString& typeVar, const QString& weightVar, float weight, const std::vector<float>& flexCoefficients) {
IKTargetVar targetVar(jointName, positionVar, rotationVar, typeVar, weightVar, weight, flexCoefficients);
// if there are dups, last one wins.
bool found = false;
for (auto& targetVar: _targetVarVec) {
if (targetVar.jointName == jointName) {
// update existing targetVar
targetVar.positionVar = positionVar;
targetVar.rotationVar = rotationVar;
targetVar.typeVar = typeVar;
for (auto& targetVarIter: _targetVarVec) {
if (targetVarIter.jointName == jointName) {
targetVarIter = targetVar;
found = true;
break;
}
}
if (!found) {
// create a new entry
_targetVarVec.push_back(IKTargetVar(jointName, positionVar, rotationVar, typeVar));
_targetVarVec.push_back(targetVar);
}
}
@ -107,10 +136,15 @@ void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::
AnimPose defaultPose = _skeleton->getAbsolutePose(targetVar.jointIndex, underPoses);
glm::quat rotation = animVars.lookupRigToGeometry(targetVar.rotationVar, defaultPose.rot());
glm::vec3 translation = animVars.lookupRigToGeometry(targetVar.positionVar, defaultPose.trans());
float weight = animVars.lookup(targetVar.weightVar, targetVar.weight);
target.setPose(rotation, translation);
target.setIndex(targetVar.jointIndex);
target.setWeight(weight);
target.setFlexCoefficients(targetVar.numFlexCoefficients, targetVar.flexCoefficients);
targets.push_back(target);
if (targetVar.jointIndex > _maxTargetIndex) {
_maxTargetIndex = targetVar.jointIndex;
}
@ -271,6 +305,8 @@ int AnimInverseKinematics::solveTargetWithCCD(const IKTarget& target, AnimPoseVe
// cache tip absolute position
glm::vec3 tipPosition = absolutePoses[tipIndex].trans();
size_t chainDepth = 1;
// descend toward root, pivoting each joint to get tip closer to target position
while (pivotIndex != _hipsIndex && pivotsParentIndex != -1) {
// compute the two lines that should be aligned
@ -312,9 +348,8 @@ int AnimInverseKinematics::solveTargetWithCCD(const IKTarget& target, AnimPoseVe
float angle = acosf(cosAngle);
const float MIN_ADJUSTMENT_ANGLE = 1.0e-4f;
if (angle > MIN_ADJUSTMENT_ANGLE) {
// reduce angle by a fraction (for stability)
const float STABILITY_FRACTION = 0.5f;
angle *= STABILITY_FRACTION;
// reduce angle by a flexCoefficient
angle *= target.getFlexCoefficient(chainDepth);
deltaRotation = glm::angleAxis(angle, axis);
// The swing will re-orient the tip but there will tend to be be a non-zero delta between the tip's
@ -385,6 +420,8 @@ int AnimInverseKinematics::solveTargetWithCCD(const IKTarget& target, AnimPoseVe
pivotIndex = pivotsParentIndex;
pivotsParentIndex = _skeleton->getParentIndex(pivotIndex);
chainDepth++;
}
return lowestMovedIndex;
}
@ -399,6 +436,13 @@ const AnimPoseVec& AnimInverseKinematics::evaluate(const AnimVariantMap& animVar
//virtual
const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars, const AnimContext& context, float dt, Triggers& triggersOut, const AnimPoseVec& underPoses) {
// allows solutionSource to be overridden by an animVar
auto solutionSource = animVars.lookup(_solutionSourceVar, (int)_solutionSource);
if (context.getEnableDebugDrawIKConstraints()) {
debugDrawConstraints(context);
}
const float MAX_OVERLAY_DT = 1.0f / 30.0f; // what to clamp delta-time to in AnimInverseKinematics::overlay
if (dt > MAX_OVERLAY_DT) {
dt = MAX_OVERLAY_DT;
@ -410,25 +454,7 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
PROFILE_RANGE_EX(simulation_animation, "ik/relax", 0xffff00ff, 0);
// relax toward underPoses
// HACK: this relaxation needs to be constant per-frame rather than per-realtime
// in order to prevent IK "flutter" for bad FPS. The bad news is that the good parts
// of this relaxation will be FPS dependent (low FPS will make the limbs align slower
// in real-time), however most people will not notice this and this problem is less
// annoying than the flutter.
const float blend = (1.0f / 60.0f) / (0.25f); // effectively: dt / RELAXATION_TIMESCALE
int numJoints = (int)_relativePoses.size();
for (int i = 0; i < numJoints; ++i) {
float dotSign = copysignf(1.0f, glm::dot(_relativePoses[i].rot(), underPoses[i].rot()));
if (_accumulators[i].isDirty()) {
// this joint is affected by IK --> blend toward underPose rotation
_relativePoses[i].rot() = glm::normalize(glm::lerp(_relativePoses[i].rot(), dotSign * underPoses[i].rot(), blend));
} else {
// this joint is NOT affected by IK --> slam to underPose rotation
_relativePoses[i].rot() = underPoses[i].rot();
}
_relativePoses[i].trans() = underPoses[i].trans();
}
initRelativePosesFromSolutionSource((SolutionSource)solutionSource, underPoses);
if (!underPoses.empty()) {
// Sometimes the underpose itself can violate the constraints. Rather than
@ -604,9 +630,9 @@ void AnimInverseKinematics::clearIKJointLimitHistory() {
}
}
RotationConstraint* AnimInverseKinematics::getConstraint(int index) {
RotationConstraint* AnimInverseKinematics::getConstraint(int index) const {
RotationConstraint* constraint = nullptr;
std::map<int, RotationConstraint*>::iterator constraintItr = _constraints.find(index);
std::map<int, RotationConstraint*>::const_iterator constraintItr = _constraints.find(index);
if (constraintItr != _constraints.end()) {
constraint = constraintItr->second;
}
@ -622,17 +648,19 @@ void AnimInverseKinematics::clearConstraints() {
_constraints.clear();
}
// set up swing limits around a swingTwistConstraint in an ellipse, where lateralSwingTheta is the swing limit for lateral swings (side to side)
// anteriorSwingTheta is swing limit for forward and backward swings. (where x-axis of reference rotation is sideways and -z-axis is forward)
static void setEllipticalSwingLimits(SwingTwistConstraint* stConstraint, float lateralSwingTheta, float anteriorSwingTheta) {
// set up swing limits around a swingTwistConstraint in an ellipse, where lateralSwingPhi is the swing limit for lateral swings (side to side)
// anteriorSwingPhi is swing limit for forward and backward swings. (where x-axis of reference rotation is sideways and -z-axis is forward)
static void setEllipticalSwingLimits(SwingTwistConstraint* stConstraint, float lateralSwingPhi, float anteriorSwingPhi) {
assert(stConstraint);
const int NUM_SUBDIVISIONS = 8;
const int NUM_SUBDIVISIONS = 16;
std::vector<float> minDots;
minDots.reserve(NUM_SUBDIVISIONS);
float dTheta = TWO_PI / NUM_SUBDIVISIONS;
float theta = 0.0f;
for (int i = 0; i < NUM_SUBDIVISIONS; i++) {
minDots.push_back(cosf(glm::length(glm::vec2(anteriorSwingTheta * cosf(theta), lateralSwingTheta * sinf(theta)))));
float theta_prime = atanf((lateralSwingPhi / anteriorSwingPhi) * tanf(theta));
float phi = (cosf(2.0f * theta_prime) * ((lateralSwingPhi - anteriorSwingPhi) / 2.0f)) + ((lateralSwingPhi + anteriorSwingPhi) / 2.0f);
minDots.push_back(cosf(phi));
theta += dTheta;
}
stConstraint->setSwingLimits(minDots);
@ -640,7 +668,6 @@ static void setEllipticalSwingLimits(SwingTwistConstraint* stConstraint, float l
void AnimInverseKinematics::initConstraints() {
if (!_skeleton) {
return;
}
// We create constraints for the joints shown here
// (and their Left counterparts if applicable).
@ -744,30 +771,27 @@ void AnimInverseKinematics::initConstraints() {
std::vector<glm::vec3> swungDirections;
float deltaTheta = PI / 4.0f;
float theta = 0.0f;
swungDirections.push_back(glm::vec3(mirror * cosf(theta), 0.25f, sinf(theta)));
swungDirections.push_back(glm::vec3(mirror * cosf(theta), -0.25f, sinf(theta)));
theta += deltaTheta;
swungDirections.push_back(glm::vec3(mirror * cosf(theta), 0.0f, sinf(theta)));
theta += deltaTheta;
swungDirections.push_back(glm::vec3(mirror * cosf(theta), -0.25f, sinf(theta))); // posterior
swungDirections.push_back(glm::vec3(mirror * cosf(theta), 0.25f, sinf(theta))); // posterior
theta += deltaTheta;
swungDirections.push_back(glm::vec3(mirror * cosf(theta), 0.0f, sinf(theta)));
theta += deltaTheta;
swungDirections.push_back(glm::vec3(mirror * cosf(theta), 0.25f, sinf(theta)));
swungDirections.push_back(glm::vec3(mirror * cosf(theta), -0.25f, sinf(theta)));
theta += deltaTheta;
swungDirections.push_back(glm::vec3(mirror * cosf(theta), 0.5f, sinf(theta)));
swungDirections.push_back(glm::vec3(mirror * cosf(theta), -0.5f, sinf(theta)));
theta += deltaTheta;
swungDirections.push_back(glm::vec3(mirror * cosf(theta), 0.5f, sinf(theta))); // anterior
swungDirections.push_back(glm::vec3(mirror * cosf(theta), -0.5f, sinf(theta))); // anterior
theta += deltaTheta;
swungDirections.push_back(glm::vec3(mirror * cosf(theta), 0.5f, sinf(theta)));
swungDirections.push_back(glm::vec3(mirror * cosf(theta), -0.5f, sinf(theta)));
// rotate directions into joint-frame
glm::quat invAbsoluteRotation = glm::inverse(absolutePoses[i].rot());
int numDirections = (int)swungDirections.size();
for (int j = 0; j < numDirections; ++j) {
swungDirections[j] = invAbsoluteRotation * swungDirections[j];
std::vector<float> minDots;
for (size_t i = 0; i < swungDirections.size(); i++) {
minDots.push_back(glm::dot(glm::normalize(swungDirections[i]), Vectors::UNIT_Y));
}
stConstraint->setSwingLimits(swungDirections);
stConstraint->setSwingLimits(minDots);
constraint = static_cast<RotationConstraint*>(stConstraint);
} else if (0 == baseName.compare("Hand", Qt::CaseSensitive)) {
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
@ -819,7 +843,7 @@ void AnimInverseKinematics::initConstraints() {
stConstraint->setTwistLimits(-MAX_SHOULDER_TWIST, MAX_SHOULDER_TWIST);
std::vector<float> minDots;
const float MAX_SHOULDER_SWING = PI / 20.0f;
const float MAX_SHOULDER_SWING = PI / 16.0f;
minDots.push_back(cosf(MAX_SHOULDER_SWING));
stConstraint->setSwingLimits(minDots);
@ -957,6 +981,32 @@ void AnimInverseKinematics::initConstraints() {
}
}
void AnimInverseKinematics::initLimitCenterPoses() {
assert(_skeleton);
_limitCenterPoses.reserve(_skeleton->getNumJoints());
for (int i = 0; i < _skeleton->getNumJoints(); i++) {
AnimPose pose = _skeleton->getRelativeDefaultPose(i);
RotationConstraint* constraint = getConstraint(i);
if (constraint) {
pose.rot() = constraint->computeCenterRotation();
}
_limitCenterPoses.push_back(pose);
}
// The limit center rotations for the LeftArm and RightArm form a t-pose.
// In order for the elbows to look more natural, we rotate them down by the avatar's sides
const float UPPER_ARM_THETA = PI / 3.0f; // 60 deg
int leftArmIndex = _skeleton->nameToJointIndex("LeftArm");
const glm::quat armRot = glm::angleAxis(UPPER_ARM_THETA, Vectors::UNIT_X);
if (leftArmIndex >= 0 && leftArmIndex < (int)_limitCenterPoses.size()) {
_limitCenterPoses[leftArmIndex].rot() = _limitCenterPoses[leftArmIndex].rot() * armRot;
}
int rightArmIndex = _skeleton->nameToJointIndex("RightArm");
if (rightArmIndex >= 0 && rightArmIndex < (int)_limitCenterPoses.size()) {
_limitCenterPoses[rightArmIndex].rot() = _limitCenterPoses[rightArmIndex].rot() * armRot;
}
}
void AnimInverseKinematics::setSkeletonInternal(AnimSkeleton::ConstPointer skeleton) {
AnimNode::setSkeletonInternal(skeleton);
@ -973,6 +1023,7 @@ void AnimInverseKinematics::setSkeletonInternal(AnimSkeleton::ConstPointer skele
if (skeleton) {
initConstraints();
initLimitCenterPoses();
_headIndex = _skeleton->nameToJointIndex("Head");
_hipsIndex = _skeleton->nameToJointIndex("Hips");
@ -989,3 +1040,170 @@ void AnimInverseKinematics::setSkeletonInternal(AnimSkeleton::ConstPointer skele
_hipsParentIndex = -1;
}
}
static glm::vec3 sphericalToCartesian(float phi, float theta) {
float cos_phi = cosf(phi);
float sin_phi = sinf(phi);
return glm::vec3(sin_phi * cosf(theta), cos_phi, -sin_phi * sinf(theta));
}
void AnimInverseKinematics::debugDrawConstraints(const AnimContext& context) const {
if (_skeleton) {
const vec4 RED(1.0f, 0.0f, 0.0f, 1.0f);
const vec4 GREEN(0.0f, 1.0f, 0.0f, 1.0f);
const vec4 BLUE(0.0f, 0.0f, 1.0f, 1.0f);
const vec4 PURPLE(0.5f, 0.0f, 1.0f, 1.0f);
const vec4 CYAN(0.0f, 1.0f, 1.0f, 1.0f);
const vec4 GRAY(0.2f, 0.2f, 0.2f, 1.0f);
const vec4 MAGENTA(1.0f, 0.0f, 1.0f, 1.0f);
const float AXIS_LENGTH = 2.0f; // cm
const float TWIST_LENGTH = 4.0f; // cm
const float HINGE_LENGTH = 6.0f; // cm
const float SWING_LENGTH = 5.0f; // cm
AnimPoseVec poses = _skeleton->getRelativeDefaultPoses();
// copy reference rotations into the relative poses
for (int i = 0; i < (int)poses.size(); i++) {
const RotationConstraint* constraint = getConstraint(i);
if (constraint) {
poses[i].rot() = constraint->getReferenceRotation();
}
}
// convert relative poses to absolute
_skeleton->convertRelativePosesToAbsolute(poses);
mat4 geomToWorldMatrix = context.getRigToWorldMatrix() * context.getGeometryToRigMatrix();
// draw each pose and constraint
for (int i = 0; i < (int)poses.size(); i++) {
// transform local axes into world space.
auto pose = poses[i];
glm::vec3 xAxis = transformVectorFast(geomToWorldMatrix, pose.rot() * Vectors::UNIT_X);
glm::vec3 yAxis = transformVectorFast(geomToWorldMatrix, pose.rot() * Vectors::UNIT_Y);
glm::vec3 zAxis = transformVectorFast(geomToWorldMatrix, pose.rot() * Vectors::UNIT_Z);
glm::vec3 pos = transformPoint(geomToWorldMatrix, pose.trans());
DebugDraw::getInstance().drawRay(pos, pos + AXIS_LENGTH * xAxis, RED);
DebugDraw::getInstance().drawRay(pos, pos + AXIS_LENGTH * yAxis, GREEN);
DebugDraw::getInstance().drawRay(pos, pos + AXIS_LENGTH * zAxis, BLUE);
// draw line to parent
int parentIndex = _skeleton->getParentIndex(i);
if (parentIndex != -1) {
glm::vec3 parentPos = transformPoint(geomToWorldMatrix, poses[parentIndex].trans());
DebugDraw::getInstance().drawRay(pos, parentPos, GRAY);
}
glm::quat parentAbsRot;
if (parentIndex != -1) {
parentAbsRot = poses[parentIndex].rot();
}
const RotationConstraint* constraint = getConstraint(i);
if (constraint) {
glm::quat refRot = constraint->getReferenceRotation();
const ElbowConstraint* elbowConstraint = dynamic_cast<const ElbowConstraint*>(constraint);
if (elbowConstraint) {
glm::vec3 hingeAxis = transformVectorFast(geomToWorldMatrix, parentAbsRot * refRot * elbowConstraint->getHingeAxis());
DebugDraw::getInstance().drawRay(pos, pos + HINGE_LENGTH * hingeAxis, MAGENTA);
// draw elbow constraints
glm::quat minRot = glm::angleAxis(elbowConstraint->getMinAngle(), elbowConstraint->getHingeAxis());
glm::quat maxRot = glm::angleAxis(elbowConstraint->getMaxAngle(), elbowConstraint->getHingeAxis());
const int NUM_SWING_STEPS = 10;
for (int i = 0; i < NUM_SWING_STEPS + 1; i++) {
glm::quat rot = glm::normalize(glm::lerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS)));
glm::vec3 axis = transformVectorFast(geomToWorldMatrix, parentAbsRot * rot * refRot * Vectors::UNIT_Y);
DebugDraw::getInstance().drawRay(pos, pos + TWIST_LENGTH * axis, CYAN);
}
} else {
const SwingTwistConstraint* swingTwistConstraint = dynamic_cast<const SwingTwistConstraint*>(constraint);
if (swingTwistConstraint) {
// twist constraints
glm::vec3 hingeAxis = transformVectorFast(geomToWorldMatrix, parentAbsRot * refRot * Vectors::UNIT_Y);
DebugDraw::getInstance().drawRay(pos, pos + HINGE_LENGTH * hingeAxis, MAGENTA);
glm::quat minRot = glm::angleAxis(swingTwistConstraint->getMinTwist(), Vectors::UNIT_Y);
glm::quat maxRot = glm::angleAxis(swingTwistConstraint->getMaxTwist(), Vectors::UNIT_Y);
const int NUM_SWING_STEPS = 10;
for (int i = 0; i < NUM_SWING_STEPS + 1; i++) {
glm::quat rot = glm::normalize(glm::lerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS)));
glm::vec3 axis = transformVectorFast(geomToWorldMatrix, parentAbsRot * rot * refRot * Vectors::UNIT_X);
DebugDraw::getInstance().drawRay(pos, pos + TWIST_LENGTH * axis, CYAN);
}
// draw swing constraints.
const size_t NUM_MIN_DOTS = swingTwistConstraint->getMinDots().size();
const float D_THETA = TWO_PI / (NUM_MIN_DOTS - 1);
float theta = 0.0f;
for (size_t i = 0, j = NUM_MIN_DOTS - 2; i < NUM_MIN_DOTS - 1; j = i, i++, theta += D_THETA) {
// compute swing rotation from theta and phi angles.
float phi = acosf(swingTwistConstraint->getMinDots()[i]);
glm::vec3 swungAxis = sphericalToCartesian(phi, theta);
glm::vec3 worldSwungAxis = transformVectorFast(geomToWorldMatrix, parentAbsRot * refRot * swungAxis);
glm::vec3 swingTip = pos + SWING_LENGTH * worldSwungAxis;
float prevPhi = acos(swingTwistConstraint->getMinDots()[j]);
float prevTheta = theta - D_THETA;
glm::vec3 prevSwungAxis = sphericalToCartesian(prevPhi, prevTheta);
glm::vec3 prevWorldSwungAxis = transformVectorFast(geomToWorldMatrix, parentAbsRot * refRot * prevSwungAxis);
glm::vec3 prevSwingTip = pos + SWING_LENGTH * prevWorldSwungAxis;
DebugDraw::getInstance().drawRay(pos, swingTip, PURPLE);
DebugDraw::getInstance().drawRay(prevSwingTip, swingTip, PURPLE);
}
}
}
pose.rot() = constraint->computeCenterRotation();
}
}
}
}
// for bones under IK, blend between previous solution (_relativePoses) to targetPoses
// for bones NOT under IK, copy directly from underPoses.
// mutates _relativePoses.
void AnimInverseKinematics::blendToPoses(const AnimPoseVec& targetPoses, const AnimPoseVec& underPoses, float blendFactor) {
// relax toward poses
int numJoints = (int)_relativePoses.size();
for (int i = 0; i < numJoints; ++i) {
float dotSign = copysignf(1.0f, glm::dot(_relativePoses[i].rot(), targetPoses[i].rot()));
if (_accumulators[i].isDirty()) {
// this joint is affected by IK --> blend toward the targetPoses rotation
_relativePoses[i].rot() = glm::normalize(glm::lerp(_relativePoses[i].rot(), dotSign * targetPoses[i].rot(), blendFactor));
} else {
// this joint is NOT affected by IK --> slam to underPoses rotation
_relativePoses[i].rot() = underPoses[i].rot();
}
_relativePoses[i].trans() = underPoses[i].trans();
}
}
void AnimInverseKinematics::initRelativePosesFromSolutionSource(SolutionSource solutionSource, const AnimPoseVec& underPoses) {
const float RELAX_BLEND_FACTOR = (1.0f / 16.0f);
const float COPY_BLEND_FACTOR = 1.0f;
switch (solutionSource) {
default:
case SolutionSource::RelaxToUnderPoses:
blendToPoses(underPoses, underPoses, RELAX_BLEND_FACTOR);
break;
case SolutionSource::RelaxToLimitCenterPoses:
blendToPoses(_limitCenterPoses, underPoses, RELAX_BLEND_FACTOR);
break;
case SolutionSource::PreviousSolution:
// do nothing... _relativePoses is already the previous solution
break;
case SolutionSource::UnderPoses:
_relativePoses = underPoses;
break;
case SolutionSource::LimitCenterPoses:
// essentially copy limitCenterPoses over to _relativePoses.
blendToPoses(_limitCenterPoses, underPoses, COPY_BLEND_FACTOR);
break;
}
}

View file

@ -32,7 +32,8 @@ public:
void loadPoses(const AnimPoseVec& poses);
void computeAbsolutePoses(AnimPoseVec& absolutePoses) const;
void setTargetVars(const QString& jointName, const QString& positionVar, const QString& rotationVar, const QString& typeVar);
void setTargetVars(const QString& jointName, const QString& positionVar, const QString& rotationVar,
const QString& typeVar, const QString& weightVar, float weight, const std::vector<float>& flexCoefficients);
virtual const AnimPoseVec& evaluate(const AnimVariantMap& animVars, const AnimContext& context, float dt, AnimNode::Triggers& triggersOut) override;
virtual const AnimPoseVec& overlay(const AnimVariantMap& animVars, const AnimContext& context, float dt, Triggers& triggersOut, const AnimPoseVec& underPoses) override;
@ -43,40 +44,54 @@ public:
float getMaxErrorOnLastSolve() { return _maxErrorOnLastSolve; }
enum class SolutionSource {
RelaxToUnderPoses = 0,
RelaxToLimitCenterPoses,
PreviousSolution,
UnderPoses,
LimitCenterPoses,
NumSolutionSources,
};
void setSolutionSource(SolutionSource solutionSource) { _solutionSource = solutionSource; }
void setSolutionSourceVar(const QString& solutionSourceVar) { _solutionSourceVar = solutionSourceVar; }
protected:
void computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses);
void solveWithCyclicCoordinateDescent(const std::vector<IKTarget>& targets);
int solveTargetWithCCD(const IKTarget& target, AnimPoseVec& absolutePoses);
virtual void setSkeletonInternal(AnimSkeleton::ConstPointer skeleton) override;
void debugDrawConstraints(const AnimContext& context) const;
void initRelativePosesFromSolutionSource(SolutionSource solutionSource, const AnimPoseVec& underPose);
void blendToPoses(const AnimPoseVec& targetPoses, const AnimPoseVec& underPose, float blendFactor);
// for AnimDebugDraw rendering
virtual const AnimPoseVec& getPosesInternal() const override { return _relativePoses; }
RotationConstraint* getConstraint(int index);
RotationConstraint* getConstraint(int index) const;
void clearConstraints();
void initConstraints();
void initLimitCenterPoses();
void computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt);
// no copies
AnimInverseKinematics(const AnimInverseKinematics&) = delete;
AnimInverseKinematics& operator=(const AnimInverseKinematics&) = delete;
enum FlexCoefficients { MAX_FLEX_COEFFICIENTS = 10 };
struct IKTargetVar {
IKTargetVar(const QString& jointNameIn,
const QString& positionVarIn,
const QString& rotationVarIn,
const QString& typeVarIn) :
positionVar(positionVarIn),
rotationVar(rotationVarIn),
typeVar(typeVarIn),
jointName(jointNameIn),
jointIndex(-1)
{}
IKTargetVar(const QString& jointNameIn, const QString& positionVarIn, const QString& rotationVarIn,
const QString& typeVarIn, const QString& weightVarIn, float weightIn, const std::vector<float>& flexCoefficientsIn);
IKTargetVar(const IKTargetVar& orig);
QString jointName;
QString positionVar;
QString rotationVar;
QString typeVar;
QString jointName;
QString weightVar;
float weight;
float flexCoefficients[MAX_FLEX_COEFFICIENTS];
size_t numFlexCoefficients;
int jointIndex; // cached joint index
};
@ -85,6 +100,7 @@ protected:
std::vector<IKTargetVar> _targetVarVec;
AnimPoseVec _defaultRelativePoses; // poses of the relaxed state
AnimPoseVec _relativePoses; // current relative poses
AnimPoseVec _limitCenterPoses; // relative
// experimental data for moving hips during IK
glm::vec3 _hipsOffset { Vectors::ZERO };
@ -100,6 +116,8 @@ protected:
float _maxErrorOnLastSolve { FLT_MAX };
bool _previousEnableDebugIKTargets { false };
SolutionSource _solutionSource { SolutionSource::RelaxToUnderPoses };
QString _solutionSourceVar;
};
#endif // hifi_AnimInverseKinematics_h

View file

@ -173,6 +173,13 @@ static NodeProcessFunc animNodeTypeToProcessFunc(AnimNode::Type type) {
} \
float NAME = (float)NAME##_VAL.toDouble()
#define READ_OPTIONAL_FLOAT(NAME, JSON_OBJ, DEFAULT) \
auto NAME##_VAL = JSON_OBJ.value(#NAME); \
float NAME = (float)DEFAULT; \
if (NAME##_VAL.isDouble()) { \
NAME = (float)NAME##_VAL.toDouble(); \
} \
do {} while (0)
static AnimNode::Pointer loadNode(const QJsonObject& jsonObj, const QUrl& jsonUrl) {
auto idVal = jsonObj.value("id");
@ -352,6 +359,23 @@ static AnimOverlay::BoneSet stringToBoneSetEnum(const QString& str) {
return AnimOverlay::NumBoneSets;
}
static const char* solutionSourceStrings[(int)AnimInverseKinematics::SolutionSource::NumSolutionSources] = {
"relaxToUnderPoses",
"relaxToLimitCenterPoses",
"previousSolution",
"underPoses",
"limitCenterPoses"
};
static AnimInverseKinematics::SolutionSource stringToSolutionSourceEnum(const QString& str) {
for (int i = 0; i < (int)AnimInverseKinematics::SolutionSource::NumSolutionSources; i++) {
if (str == solutionSourceStrings[i]) {
return (AnimInverseKinematics::SolutionSource)i;
}
}
return AnimInverseKinematics::SolutionSource::NumSolutionSources;
}
static AnimNode::Pointer loadOverlayNode(const QJsonObject& jsonObj, const QString& id, const QUrl& jsonUrl) {
READ_STRING(boneSet, jsonObj, id, jsonUrl, nullptr);
@ -453,10 +477,40 @@ AnimNode::Pointer loadInverseKinematicsNode(const QJsonObject& jsonObj, const QS
READ_STRING(positionVar, targetObj, id, jsonUrl, nullptr);
READ_STRING(rotationVar, targetObj, id, jsonUrl, nullptr);
READ_OPTIONAL_STRING(typeVar, targetObj);
READ_OPTIONAL_STRING(weightVar, targetObj);
READ_OPTIONAL_FLOAT(weight, targetObj, 1.0f);
node->setTargetVars(jointName, positionVar, rotationVar, typeVar);
auto flexCoefficientsValue = targetObj.value("flexCoefficients");
if (!flexCoefficientsValue.isArray()) {
qCCritical(animation) << "AnimNodeLoader, bad or missing flexCoefficients array in \"targets\", id =" << id << ", url =" << jsonUrl.toDisplayString();
return nullptr;
}
auto flexCoefficientsArray = flexCoefficientsValue.toArray();
std::vector<float> flexCoefficients;
for (const auto& value : flexCoefficientsArray) {
flexCoefficients.push_back((float)value.toDouble());
}
node->setTargetVars(jointName, positionVar, rotationVar, typeVar, weightVar, weight, flexCoefficients);
};
READ_OPTIONAL_STRING(solutionSource, jsonObj);
if (!solutionSource.isEmpty()) {
AnimInverseKinematics::SolutionSource solutionSourceType = stringToSolutionSourceEnum(solutionSource);
if (solutionSourceType != AnimInverseKinematics::SolutionSource::NumSolutionSources) {
node->setSolutionSource(solutionSourceType);
} else {
qCWarning(animation) << "AnimNodeLoader, bad solutionSourceType in \"solutionSource\", id = " << id << ", url = " << jsonUrl.toDisplayString();
}
}
READ_OPTIONAL_STRING(solutionSourceVar, jsonObj);
if (!solutionSourceVar.isEmpty()) {
node->setSolutionSourceVar(solutionSourceVar);
}
return node;
}

View file

@ -39,7 +39,7 @@ glm::vec3 AnimPose::xformPoint(const glm::vec3& rhs) const {
return *this * rhs;
}
// really slow
// really slow, but accurate for transforms with non-uniform scale
glm::vec3 AnimPose::xformVector(const glm::vec3& rhs) const {
glm::vec3 xAxis = _rot * glm::vec3(_scale.x, 0.0f, 0.0f);
glm::vec3 yAxis = _rot * glm::vec3(0.0f, _scale.y, 0.0f);
@ -49,6 +49,11 @@ glm::vec3 AnimPose::xformVector(const glm::vec3& rhs) const {
return transInvMat * rhs;
}
// faster, but does not handle non-uniform scale correctly.
glm::vec3 AnimPose::xformVectorFast(const glm::vec3& rhs) const {
return _rot * (_scale * rhs);
}
AnimPose AnimPose::operator*(const AnimPose& rhs) const {
glm::mat4 result;
glm_mat4u_mul(*this, rhs, result);

View file

@ -25,7 +25,8 @@ public:
static const AnimPose identity;
glm::vec3 xformPoint(const glm::vec3& rhs) const;
glm::vec3 xformVector(const glm::vec3& rhs) const; // really slow
glm::vec3 xformVector(const glm::vec3& rhs) const; // really slow, but accurate for transforms with non-uniform scale
glm::vec3 xformVectorFast(const glm::vec3& rhs) const; // faster, but does not handle non-uniform scale correctly.
glm::vec3 operator*(const glm::vec3& rhs) const; // same as xformPoint
AnimPose operator*(const AnimPose& rhs) const;

View file

@ -33,6 +33,23 @@ void blend(size_t numPoses, const AnimPose* a, const AnimPose* b, float alpha, A
}
}
glm::quat averageQuats(size_t numQuats, const glm::quat* quats) {
if (numQuats == 0) {
return glm::quat();
}
glm::quat accum = quats[0];
glm::quat firstRot = quats[0];
for (size_t i = 1; i < numQuats; i++) {
glm::quat rot = quats[i];
float dot = glm::dot(firstRot, rot);
if (dot < 0.0f) {
rot = -rot;
}
accum += rot;
}
return glm::normalize(accum);
}
float accumulateTime(float startFrame, float endFrame, float timeScale, float currentFrame, float dt, bool loopFlag,
const QString& id, AnimNode::Triggers& triggersOut) {

View file

@ -16,9 +16,9 @@
// this is where the magic happens
void blend(size_t numPoses, const AnimPose* a, const AnimPose* b, float alpha, AnimPose* result);
glm::quat averageQuats(size_t numQuats, const glm::quat* quats);
float accumulateTime(float startFrame, float endFrame, float timeScale, float currentFrame, float dt, bool loopFlag,
const QString& id, AnimNode::Triggers& triggersOut);
#endif

View file

@ -13,6 +13,7 @@
#include <GeometryUtil.h>
#include <NumericalConstants.h>
#include "AnimUtil.h"
ElbowConstraint::ElbowConstraint() :
_minAngle(-PI),
@ -77,3 +78,10 @@ bool ElbowConstraint::apply(glm::quat& rotation) const {
return false;
}
glm::quat ElbowConstraint::computeCenterRotation() const {
const size_t NUM_LIMITS = 2;
glm::quat limits[NUM_LIMITS];
limits[0] = glm::angleAxis(_minAngle, _axis) * _referenceRotation;
limits[1] = glm::angleAxis(_maxAngle, _axis) * _referenceRotation;
return averageQuats(NUM_LIMITS, limits);
}

View file

@ -18,6 +18,12 @@ public:
void setHingeAxis(const glm::vec3& axis);
void setAngleLimits(float minAngle, float maxAngle);
virtual bool apply(glm::quat& rotation) const override;
virtual glm::quat computeCenterRotation() const override;
glm::vec3 getHingeAxis() const { return _axis; }
float getMinAngle() const { return _minAngle; }
float getMaxAngle() const { return _maxAngle; }
protected:
glm::vec3 _axis;
glm::vec3 _perpAxis;

View file

@ -14,6 +14,23 @@ void IKTarget::setPose(const glm::quat& rotation, const glm::vec3& translation)
_pose.trans() = translation;
}
void IKTarget::setFlexCoefficients(size_t numFlexCoefficientsIn, const float* flexCoefficientsIn) {
_numFlexCoefficients = std::min(numFlexCoefficientsIn, (size_t)MAX_FLEX_COEFFICIENTS);
for (size_t i = 0; i < _numFlexCoefficients; i++) {
_flexCoefficients[i] = flexCoefficientsIn[i];
}
}
float IKTarget::getFlexCoefficient(size_t chainDepth) const {
const float DEFAULT_FLEX_COEFFICIENT = 0.5f;
if (chainDepth < _numFlexCoefficients) {
return _flexCoefficients[chainDepth];
} else {
return DEFAULT_FLEX_COEFFICIENT;
}
}
void IKTarget::setType(int type) {
switch (type) {
case (int)Type::RotationAndPosition:

View file

@ -35,15 +35,21 @@ public:
void setPose(const glm::quat& rotation, const glm::vec3& translation);
void setIndex(int index) { _index = index; }
void setType(int);
void setFlexCoefficients(size_t numFlexCoefficientsIn, const float* flexCoefficientsIn);
float getFlexCoefficient(size_t chainDepth) const;
// HACK: give HmdHead targets more "weight" during IK algorithm
float getWeight() const { return _type == Type::HmdHead ? HACK_HMD_TARGET_WEIGHT : 1.0f; }
void setWeight(float weight) { _weight = weight; }
float getWeight() const { return _weight; }
enum FlexCoefficients { MAX_FLEX_COEFFICIENTS = 10 };
private:
AnimPose _pose;
int _index{-1};
Type _type{Type::RotationAndPosition};
float _weight;
float _flexCoefficients[MAX_FLEX_COEFFICIENTS];
size_t _numFlexCoefficients;
};
#endif // hifi_IKTarget_h

View file

@ -305,30 +305,35 @@ void Rig::clearJointAnimationPriority(int index) {
}
}
void Rig::clearIKJointLimitHistory() {
std::shared_ptr<AnimInverseKinematics> Rig::getAnimInverseKinematicsNode() const {
std::shared_ptr<AnimInverseKinematics> result;
if (_animNode) {
_animNode->traverse([&](AnimNode::Pointer node) {
// only report clip nodes as valid roles.
auto ikNode = std::dynamic_pointer_cast<AnimInverseKinematics>(node);
if (ikNode) {
ikNode->clearIKJointLimitHistory();
result = ikNode;
return false;
} else {
return true;
}
return true;
});
}
return result;
}
void Rig::clearIKJointLimitHistory() {
auto ikNode = getAnimInverseKinematicsNode();
if (ikNode) {
ikNode->clearIKJointLimitHistory();
}
}
void Rig::setMaxHipsOffsetLength(float maxLength) {
_maxHipsOffsetLength = maxLength;
if (_animNode) {
_animNode->traverse([&](AnimNode::Pointer node) {
auto ikNode = std::dynamic_pointer_cast<AnimInverseKinematics>(node);
if (ikNode) {
ikNode->setMaxHipsOffsetLength(_maxHipsOffsetLength);
}
return true;
});
auto ikNode = getAnimInverseKinematicsNode();
if (ikNode) {
ikNode->setMaxHipsOffsetLength(_maxHipsOffsetLength);
}
}
@ -936,7 +941,7 @@ void Rig::updateAnimationStateHandlers() { // called on avatar update thread (wh
}
}
void Rig::updateAnimations(float deltaTime, glm::mat4 rootTransform) {
void Rig::updateAnimations(float deltaTime, const glm::mat4& rootTransform, const glm::mat4& rigToWorldTransform) {
PROFILE_RANGE_EX(simulation_animation_detail, __FUNCTION__, 0xffff00ff, 0);
PerformanceTimer perfTimer("updateAnimations");
@ -949,7 +954,8 @@ void Rig::updateAnimations(float deltaTime, glm::mat4 rootTransform) {
updateAnimationStateHandlers();
_animVars.setRigToGeometryTransform(_rigToGeometryTransform);
AnimContext context(_enableDebugDrawIKTargets, getGeometryToRigTransform());
AnimContext context(_enableDebugDrawIKTargets, _enableDebugDrawIKConstraints,
getGeometryToRigTransform(), rigToWorldTransform);
// evaluate the animation
AnimNode::Triggers triggersOut;
@ -1025,10 +1031,12 @@ void Rig::updateFromHeadParameters(const HeadParameters& params, float dt) {
_animVars.set("notIsTalking", !params.isTalking);
if (params.hipsEnabled) {
_animVars.set("solutionSource", (int)AnimInverseKinematics::SolutionSource::RelaxToLimitCenterPoses);
_animVars.set("hipsType", (int)IKTarget::Type::RotationAndPosition);
_animVars.set("hipsPosition", extractTranslation(params.hipsMatrix));
_animVars.set("hipsRotation", glmExtractRotation(params.hipsMatrix));
} else {
_animVars.set("solutionSource", (int)AnimInverseKinematics::SolutionSource::RelaxToUnderPoses);
_animVars.set("hipsType", (int)IKTarget::Type::Unknown);
}
@ -1080,10 +1088,12 @@ void Rig::updateHeadAnimVars(const HeadParameters& params) {
// Since there is an explicit hips ik target, switch the head to use the more generic RotationAndPosition IK chain type.
// this will allow the spine to bend more, ensuring that it can reach the head target position.
_animVars.set("headType", (int)IKTarget::Type::RotationAndPosition);
_animVars.unset("headWeight"); // use the default weight for this target.
} else {
// When there is no hips IK target, use the HmdHead IK chain type. This will make the spine very stiff,
// but because the IK _hipsOffset is enabled, the hips will naturally follow underneath the head.
_animVars.set("headType", (int)IKTarget::Type::HmdHead);
_animVars.set("headWeight", 8.0f);
}
} else {
_animVars.unset("headPosition");
@ -1392,22 +1402,24 @@ void Rig::computeAvatarBoundingCapsule(
AnimInverseKinematics ikNode("boundingShape");
ikNode.setSkeleton(_animSkeleton);
// AJT: FIX ME!!!!! ensure that empty weights vector does something reasonable....
ikNode.setTargetVars("LeftHand",
"leftHandPosition",
"leftHandRotation",
"leftHandType");
"leftHandType", "leftHandWeight", 1.0f, {});
ikNode.setTargetVars("RightHand",
"rightHandPosition",
"rightHandRotation",
"rightHandType");
"rightHandType", "rightHandWeight", 1.0f, {});
ikNode.setTargetVars("LeftFoot",
"leftFootPosition",
"leftFootRotation",
"leftFootType");
"leftFootType", "leftFootWeight", 1.0f, {});
ikNode.setTargetVars("RightFoot",
"rightFootPosition",
"rightFootRotation",
"rightFootType");
"rightFootType", "rightFootWeight", 1.0f, {});
AnimPose geometryToRig = _modelOffset * _geometryOffset;
@ -1440,7 +1452,7 @@ void Rig::computeAvatarBoundingCapsule(
// call overlay twice: once to verify AnimPoseVec joints and again to do the IK
AnimNode::Triggers triggersOut;
AnimContext context(false, glm::mat4());
AnimContext context(false, false, glm::mat4(), glm::mat4());
float dt = 1.0f; // the value of this does not matter
ikNode.overlay(animVars, context, dt, triggersOut, _animSkeleton->getRelativeBindPoses());
AnimPoseVec finalPoses = ikNode.overlay(animVars, context, dt, triggersOut, _animSkeleton->getRelativeBindPoses());

View file

@ -26,6 +26,7 @@
#include "SimpleMovingAverage.h"
class Rig;
class AnimInverseKinematics;
typedef std::shared_ptr<Rig> RigPointer;
// Rig instances are reentrant.
@ -111,6 +112,8 @@ public:
void clearJointStates();
void clearJointAnimationPriority(int index);
std::shared_ptr<AnimInverseKinematics> getAnimInverseKinematicsNode() const;
void clearIKJointLimitHistory();
void setMaxHipsOffsetLength(float maxLength);
float getMaxHipsOffsetLength() const;
@ -159,7 +162,7 @@ public:
void computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity, const glm::quat& worldRotation, CharacterControllerState ccState);
// Regardless of who started the animations or how many, update the joints.
void updateAnimations(float deltaTime, glm::mat4 rootTransform);
void updateAnimations(float deltaTime, const glm::mat4& rootTransform, const glm::mat4& rigToWorldTransform);
// legacy
void inverseKinematics(int endIndex, glm::vec3 targetPosition, const glm::quat& targetRotation, float priority,
@ -228,6 +231,7 @@ public:
const glm::mat4& getGeometryToRigTransform() const { return _geometryToRigTransform; }
void setEnableDebugDrawIKTargets(bool enableDebugDrawIKTargets) { _enableDebugDrawIKTargets = enableDebugDrawIKTargets; }
void setEnableDebugDrawIKConstraints(bool enableDebugDrawIKConstraints) { _enableDebugDrawIKConstraints = enableDebugDrawIKConstraints; }
// input assumed to be in rig space
void computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const;
@ -338,6 +342,7 @@ protected:
float _maxHipsOffsetLength { 1.0f };
bool _enableDebugDrawIKTargets { false };
bool _enableDebugDrawIKConstraints { false };
private:
QMap<int, StateHandler> _stateHandlers;

View file

@ -38,6 +38,9 @@ public:
/// \brief reset any remembered joint limit history
virtual void clearHistory() {};
/// \brief return the rotation that lies at the "center" of all the joint limits.
virtual glm::quat computeCenterRotation() const = 0;
protected:
glm::quat _referenceRotation = glm::quat();
};

View file

@ -15,6 +15,7 @@
#include <GeometryUtil.h>
#include <GLMHelpers.h>
#include <NumericalConstants.h>
#include "AnimUtil.h"
const float MIN_MINDOT = -0.999f;
@ -430,3 +431,33 @@ void SwingTwistConstraint::dynamicallyAdjustLimits(const glm::quat& rotation) {
void SwingTwistConstraint::clearHistory() {
_lastTwistBoundary = LAST_CLAMP_NO_BOUNDARY;
}
glm::quat SwingTwistConstraint::computeCenterRotation() const {
const size_t NUM_TWIST_LIMITS = 2;
const size_t NUM_MIN_DOTS = getMinDots().size();
std::vector<glm::quat> swingLimits;
swingLimits.reserve(NUM_MIN_DOTS);
glm::quat twistLimits[NUM_TWIST_LIMITS];
if (_minTwist != _maxTwist) {
// to ensure that twists do not flip the center rotation, we devide twist angle by 2.
twistLimits[0] = glm::angleAxis(_minTwist / 2.0f, _referenceRotation * Vectors::UNIT_Y);
twistLimits[1] = glm::angleAxis(_maxTwist / 2.0f, _referenceRotation * Vectors::UNIT_Y);
}
const float D_THETA = TWO_PI / (NUM_MIN_DOTS - 1);
float theta = 0.0f;
for (size_t i = 0; i < NUM_MIN_DOTS - 1; i++, theta += D_THETA) {
// compute swing rotation from theta and phi angles.
float phi = acos(getMinDots()[i]);
float cos_phi = getMinDots()[i];
float sin_phi = sinf(phi);
glm::vec3 swungAxis(sin_phi * cosf(theta), cos_phi, -sin_phi * sinf(theta));
// to ensure that swings > 90 degrees do not flip the center rotation, we devide phi / 2
glm::quat swing = glm::angleAxis(phi / 2, glm::normalize(glm::cross(Vectors::UNIT_Y, swungAxis)));
swingLimits.push_back(swing);
}
glm::quat averageSwing = averageQuats(swingLimits.size(), &swingLimits[0]);
glm::quat averageTwist = averageQuats(2, twistLimits);
return averageSwing * averageTwist * _referenceRotation;
}

View file

@ -58,7 +58,7 @@ public:
virtual void dynamicallyAdjustLimits(const glm::quat& rotation) override;
// for testing purposes
const std::vector<float>& getMinDots() { return _swingLimitFunction.getMinDots(); }
const std::vector<float>& getMinDots() const { return _swingLimitFunction.getMinDots(); }
// SwingLimitFunction is an implementation of the constraint check described in the paper:
// "The Parameterization of Joint Rotation with the Unit Quaternion" by Quang Liu and Edmond C. Prakash
@ -81,7 +81,7 @@ public:
float getMinDot(float theta) const;
// for testing purposes
const std::vector<float>& getMinDots() { return _minDots; }
const std::vector<float>& getMinDots() const { return _minDots; }
private:
// the limits are stored in a lookup table with cyclic boundary conditions
@ -99,6 +99,11 @@ public:
void clearHistory() override;
virtual glm::quat computeCenterRotation() const override;
float getMinTwist() const { return _minTwist; }
float getMaxTwist() const { return _maxTwist; }
private:
float handleTwistBoundaryConditions(float twistAngle) const;

View file

@ -76,42 +76,58 @@ using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
static Mutex _deviceMutex;
// background thread that continuously polls for device changes
class CheckDevicesThread : public QThread {
class BackgroundThread : public QThread {
public:
const unsigned long DEVICE_CHECK_INTERVAL_MSECS = 2 * 1000;
BackgroundThread(AudioClient* client) : QThread((QObject*)client), _client(client) {}
virtual void join() = 0;
protected:
AudioClient* _client;
};
CheckDevicesThread(AudioClient* audioClient)
: _audioClient(audioClient) {
}
void beforeAboutToQuit() {
Lock lock(_checkDevicesMutex);
_quit = true;
// background thread continuously polling device changes
class CheckDevicesThread : public BackgroundThread {
public:
CheckDevicesThread(AudioClient* client) : BackgroundThread(client) {}
void join() override {
_shouldQuit = true;
std::unique_lock<std::mutex> lock(_joinMutex);
_joinCondition.wait(lock, [&]{ return !_isRunning; });
}
protected:
void run() override {
while (true) {
{
Lock lock(_checkDevicesMutex);
if (_quit) {
break;
}
_audioClient->checkDevices();
}
while (!_shouldQuit) {
_client->checkDevices();
const unsigned long DEVICE_CHECK_INTERVAL_MSECS = 2 * 1000;
QThread::msleep(DEVICE_CHECK_INTERVAL_MSECS);
}
std::lock_guard<std::mutex> lock(_joinMutex);
_isRunning = false;
_joinCondition.notify_one();
}
private:
AudioClient* _audioClient { nullptr };
Mutex _checkDevicesMutex;
bool _quit { false };
std::atomic<bool> _shouldQuit { false };
bool _isRunning { true };
std::mutex _joinMutex;
std::condition_variable _joinCondition;
};
void AudioInjectorsThread::prepare() {
_audio->prepareLocalAudioInjectors();
}
// background thread buffering local injectors
class LocalInjectorsThread : public BackgroundThread {
Q_OBJECT
public:
LocalInjectorsThread(AudioClient* client) : BackgroundThread(client) {}
void join() override { return; }
private slots:
void prepare() { _client->prepareLocalAudioInjectors(); }
};
#include "AudioClient.moc"
static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) {
for (int i = 0; i < numSamples/2; i++) {
@ -179,7 +195,6 @@ AudioClient::AudioClient() :
_inputToNetworkResampler(NULL),
_networkToOutputResampler(NULL),
_localToOutputResampler(NULL),
_localAudioThread(this),
_audioLimiter(AudioConstants::SAMPLE_RATE, OUTPUT_CHANNEL_COUNT),
_outgoingAvatarAudioSequenceNumber(0),
_audioOutputIODevice(_localInjectorsStream, _receivedAudioStream, this),
@ -210,13 +225,14 @@ AudioClient::AudioClient() :
// start a thread to detect any device changes
_checkDevicesThread = new CheckDevicesThread(this);
_checkDevicesThread->setObjectName("CheckDevices Thread");
_checkDevicesThread->setObjectName("AudioClient CheckDevices Thread");
_checkDevicesThread->setPriority(QThread::LowPriority);
_checkDevicesThread->start();
// start a thread to process local injectors
_localAudioThread.setObjectName("LocalAudio Thread");
_localAudioThread.start();
_localInjectorsThread = new LocalInjectorsThread(this);
_localInjectorsThread->setObjectName("AudioClient LocalInjectors Thread");
_localInjectorsThread->start();
configureReverb();
@ -231,18 +247,32 @@ AudioClient::AudioClient() :
}
AudioClient::~AudioClient() {
delete _checkDevicesThread;
stop();
if (_codec && _encoder) {
_codec->releaseEncoder(_encoder);
_encoder = nullptr;
}
}
void AudioClient::beforeAboutToQuit() {
static_cast<CheckDevicesThread*>(_checkDevicesThread)->beforeAboutToQuit();
void AudioClient::customDeleter() {
deleteLater();
}
void AudioClient::cleanupBeforeQuit() {
// FIXME: this should be put in customDeleter, but there is still a reference to this when it is called,
// so this must be explicitly, synchronously stopped
stop();
if (_checkDevicesThread) {
static_cast<BackgroundThread*>(_checkDevicesThread)->join();
delete _checkDevicesThread;
}
if (_localInjectorsThread) {
static_cast<BackgroundThread*>(_localInjectorsThread)->join();
delete _localInjectorsThread;
}
}
void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
qCDebug(audioclient) << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
@ -769,7 +799,8 @@ QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) {
QVector<QString> AudioClient::getDeviceNames(QAudio::Mode mode) {
QVector<QString> deviceNames;
foreach(QAudioDeviceInfo audioDevice, getAvailableDevices(mode)) {
const QList<QAudioDeviceInfo> &availableDevice = getAvailableDevices(mode);
foreach(const QAudioDeviceInfo &audioDevice, availableDevice) {
deviceNames << audioDevice.deviceName().trimmed();
}
return deviceNames;
@ -1096,11 +1127,19 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
handleAudioInput(audioBuffer);
}
void AudioClient::prepareLocalAudioInjectors() {
void AudioClient::prepareLocalAudioInjectors(std::unique_ptr<Lock> localAudioLock) {
bool doSynchronously = localAudioLock.operator bool();
if (!localAudioLock) {
localAudioLock.reset(new Lock(_localAudioMutex));
}
int samplesNeeded = std::numeric_limits<int>::max();
while (samplesNeeded > 0) {
// unlock between every write to allow device switching
Lock lock(_localAudioMutex);
if (!doSynchronously) {
// unlock between every write to allow device switching
localAudioLock->unlock();
localAudioLock->lock();
}
// in case of a device switch, consider bufferCapacity volatile across iterations
if (_outputPeriod == 0) {
@ -1154,16 +1193,16 @@ void AudioClient::prepareLocalAudioInjectors() {
}
bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
QVector<AudioInjector*> injectorsToRemove;
// lock the injector vector
Lock lock(_injectorsMutex);
if (_activeLocalAudioInjectors.size() == 0) {
// check the flag for injectors before attempting to lock
if (!_localInjectorsAvailable.load(std::memory_order_acquire)) {
return false;
}
// lock the injectors
Lock lock(_injectorsMutex);
QVector<AudioInjector*> injectorsToRemove;
memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float));
for (AudioInjector* injector : _activeLocalAudioInjectors) {
@ -1242,6 +1281,9 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
_activeLocalAudioInjectors.removeOne(injector);
}
// update the flag
_localInjectorsAvailable.exchange(!_activeLocalAudioInjectors.empty(), std::memory_order_release);
return true;
}
@ -1328,11 +1370,14 @@ bool AudioClient::outputLocalInjector(AudioInjector* injector) {
// move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop())
injectorBuffer->setParent(nullptr);
injectorBuffer->moveToThread(&_localAudioThread);
injectorBuffer->moveToThread(_localInjectorsThread);
// update the flag
_localInjectorsAvailable.exchange(true, std::memory_order_release);
} else {
qCDebug(audioclient) << "injector exists in active list already";
}
return true;
} else {
@ -1358,7 +1403,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
_audioInput->stop();
_inputDevice = NULL;
delete _audioInput;
_audioInput->deleteLater();
_audioInput = NULL;
_numInputCallbackBytes = 0;
@ -1374,6 +1419,7 @@ bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceIn
if (!inputDeviceInfo.isNull()) {
qCDebug(audioclient) << "The audio input device " << inputDeviceInfo.deviceName() << "is available.";
_inputAudioDeviceName = inputDeviceInfo.deviceName().trimmed();
emit currentInputDeviceChanged(_inputAudioDeviceName);
if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
qCDebug(audioclient) << "The format to be used for audio input is" << _inputFormat;
@ -1455,18 +1501,21 @@ void AudioClient::outputNotify() {
bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) {
bool supportedFormat = false;
Lock lock(_localAudioMutex);
Lock localAudioLock(_localAudioMutex);
_localSamplesAvailable.exchange(0, std::memory_order_release);
// cleanup any previously initialized device
if (_audioOutput) {
_audioOutputIODevice.close();
_audioOutput->stop();
delete _audioOutput;
//must be deleted in next eventloop cycle when its called from notify()
_audioOutput->deleteLater();
_audioOutput = NULL;
_loopbackOutputDevice = NULL;
delete _loopbackAudioOutput;
//must be deleted in next eventloop cycle when its called from notify()
_loopbackAudioOutput->deleteLater();
_loopbackAudioOutput = NULL;
delete[] _outputMixBuffer;
@ -1491,6 +1540,7 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
if (!outputDeviceInfo.isNull()) {
qCDebug(audioclient) << "The audio output device " << outputDeviceInfo.deviceName() << "is available.";
_outputAudioDeviceName = outputDeviceInfo.deviceName().trimmed();
emit currentOutputDeviceChanged(_outputAudioDeviceName);
if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
qCDebug(audioclient) << "The format to be used for audio output is" << _outputFormat;
@ -1525,14 +1575,23 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
connect(_audioOutput, &QAudioOutput::stateChanged, [&, frameSize, requestedSize](QAudio::State state) {
if (state == QAudio::ActiveState) {
// restrict device callback to _outputPeriod samples
_outputPeriod = (_audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE) * 2;
_outputPeriod = _audioOutput->periodSize() / AudioConstants::SAMPLE_SIZE;
// device callback may exceed reported period, so double it to avoid stutter
_outputPeriod *= 2;
_outputMixBuffer = new float[_outputPeriod];
_outputScratchBuffer = new int16_t[_outputPeriod];
// size local output mix buffer based on resampled network frame size
_networkPeriod = _localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
_localOutputMixBuffer = new float[_networkPeriod];
int networkPeriod = _localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
_localOutputMixBuffer = new float[networkPeriod];
// local period should be at least twice the output period,
// in case two device reads happen before more data can be read (worst case)
int localPeriod = _outputPeriod * 2;
// round up to an exact multiple of networkPeriod
localPeriod = ((localPeriod + networkPeriod - 1) / networkPeriod) * networkPeriod;
// this ensures lowest latency without stutter from underrun
_localInjectorsStream.resizeForFrameSize(localPeriod);
int bufferSize = _audioOutput->bufferSize();
@ -1547,6 +1606,9 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
qCDebug(audioclient) << "local buffer (samples):" << localPeriod;
disconnect(_audioOutput, &QAudioOutput::stateChanged, 0, 0);
// unlock to avoid a deadlock with the device callback (which always succeeds this initialization)
localAudioLock.unlock();
}
});
connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify);
@ -1685,12 +1747,24 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
int injectorSamplesPopped = 0;
{
bool append = networkSamplesPopped > 0;
// this does not require a lock as of the only two functions adding to _localSamplesAvailable (samples count):
// check the samples we have available locklessly; this is possible because only two functions add to the count:
// - prepareLocalAudioInjectors will only increase samples count
// - switchOutputToAudioDevice will zero samples count
// stop the device, so that readData will exhaust the existing buffer or see a zeroed samples count
// and start the device, which can only see a zeroed samples count
samplesRequested = std::min(samplesRequested, _audio->_localSamplesAvailable.load(std::memory_order_acquire));
// - switchOutputToAudioDevice will zero samples count,
// stop the device - so that readData will exhaust the existing buffer or see a zeroed samples count,
// and start the device - which can then only see a zeroed samples count
int samplesAvailable = _audio->_localSamplesAvailable.load(std::memory_order_acquire);
// if we do not have enough samples buffered despite having injectors, buffer them synchronously
if (samplesAvailable < samplesRequested && _audio->_localInjectorsAvailable.load(std::memory_order_acquire)) {
// try_to_lock, in case the device is being shut down already
std::unique_ptr<Lock> localAudioLock(new Lock(_audio->_localAudioMutex, std::try_to_lock));
if (localAudioLock->owns_lock()) {
_audio->prepareLocalAudioInjectors(std::move(localAudioLock));
samplesAvailable = _audio->_localSamplesAvailable.load(std::memory_order_acquire);
}
}
samplesRequested = std::min(samplesRequested, samplesAvailable);
if ((injectorSamplesPopped = _localInjectorsStream.appendSamples(mixBuffer, samplesRequested, append)) > 0) {
_audio->_localSamplesAvailable.fetch_sub(injectorSamplesPopped, std::memory_order_release);
qCDebug(audiostream, "Read %d samples from injectors (%d available, %d requested)", injectorSamplesPopped, _localInjectorsStream.samplesAvailable(), samplesRequested);
@ -1698,7 +1772,7 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
}
// prepare injectors for the next callback
QMetaObject::invokeMethod(&_audio->_localAudioThread, "prepare", Qt::QueuedConnection);
QMetaObject::invokeMethod(_audio->_localInjectorsThread, "prepare", Qt::QueuedConnection);
int samplesPopped = std::max(networkSamplesPopped, injectorSamplesPopped);
int framesPopped = samplesPopped / AudioConstants::STEREO;

View file

@ -71,19 +71,6 @@ class QIODevice;
class Transform;
class NLPacket;
class AudioInjectorsThread : public QThread {
Q_OBJECT
public:
AudioInjectorsThread(AudioClient* audio) : _audio(audio) {}
public slots :
void prepare();
private:
AudioClient* _audio;
};
class AudioClient : public AbstractAudioInterface, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
@ -107,7 +94,6 @@ public:
_audio(audio), _unfulfilledReads(0) {}
void start() { open(QIODevice::ReadOnly | QIODevice::Unbuffered); }
void stop() { close(); }
qint64 readData(char * data, qint64 maxSize) override;
qint64 writeData(const char * data, qint64 maxSize) override { return 0; }
int getRecentUnfulfilledReads() { int unfulfilledReads = _unfulfilledReads; _unfulfilledReads = 0; return unfulfilledReads; }
@ -158,7 +144,7 @@ public:
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
void checkDevices();
bool outputLocalInjector(AudioInjector* injector) override;
static const float CALLBACK_ACCELERATOR_RATIO;
@ -169,6 +155,7 @@ public:
public slots:
void start();
void stop();
void cleanupBeforeQuit();
void handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message);
void handleAudioDataPacket(QSharedPointer<ReceivedMessage> message);
@ -184,8 +171,6 @@ public slots:
void audioMixerKilled();
void toggleMute();
void beforeAboutToQuit();
virtual void setIsStereoInput(bool stereo) override;
void toggleAudioNoiseReduction() { _isNoiseGateEnabled = !_isNoiseGateEnabled; }
@ -198,8 +183,6 @@ public slots:
int setOutputBufferSize(int numFrames, bool persist = true);
void prepareLocalAudioInjectors();
bool outputLocalInjector(AudioInjector* injector) override;
bool shouldLoopbackInjectors() override { return _shouldEchoToServer; }
bool switchInputToAudioDevice(const QString& inputDeviceName);
@ -238,17 +221,23 @@ signals:
void muteEnvironmentRequested(glm::vec3 position, float radius);
void currentOutputDeviceChanged(const QString& name);
void currentInputDeviceChanged(const QString& name);
protected:
AudioClient();
~AudioClient();
virtual void customDeleter() override {
deleteLater();
}
virtual void customDeleter() override;
private:
friend class CheckDevicesThread;
friend class LocalInjectorsThread;
void outputFormatChanged();
void handleAudioInput(QByteArray& audioBuffer);
void checkDevices();
void prepareLocalAudioInjectors(std::unique_ptr<Lock> localAudioLock = nullptr);
bool mixLocalAudioInjectors(float* mixBuffer);
float azimuthForSource(const glm::vec3& relativePosition);
float gainForSource(float distance, float volume);
@ -295,8 +284,9 @@ private:
AudioRingBuffer _inputRingBuffer;
LocalInjectorsStream _localInjectorsStream;
// In order to use _localInjectorsStream as a lock-free pipe,
// use it with a single producer/consumer, and track available samples
// use it with a single producer/consumer, and track available samples and injectors
std::atomic<int> _localSamplesAvailable { 0 };
std::atomic<bool> _localInjectorsAvailable { false };
MixedProcessedAudioStream _receivedAudioStream;
bool _isStereoInput;
@ -337,19 +327,17 @@ private:
// for network audio (used by network audio thread)
int16_t _networkScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
// for local audio (used by audio injectors thread)
int _networkPeriod { 0 };
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };
AudioInjectorsThread _localAudioThread;
Mutex _localAudioMutex;
// for output audio (used by this thread)
int _outputPeriod { 0 };
float* _outputMixBuffer { NULL };
int16_t* _outputScratchBuffer { NULL };
// for local audio (used by audio injectors thread)
float _localMixBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };
Mutex _localAudioMutex;
AudioLimiter _audioLimiter;
// Adds Reverb
@ -392,12 +380,13 @@ private:
QString _selectedCodecName;
Encoder* _encoder { nullptr }; // for outbound mic stream
QThread* _checkDevicesThread { nullptr };
RateCounter<> _silentOutbound;
RateCounter<> _audioOutbound;
RateCounter<> _silentInbound;
RateCounter<> _audioInbound;
QThread* _checkDevicesThread { nullptr };
QThread* _localInjectorsThread { nullptr };
};

View file

@ -32,12 +32,12 @@ public:
const Transform& transform, glm::vec3 avatarBoundingBoxCorner, glm::vec3 avatarBoundingBoxScale,
PacketType packetType, QString codecName = QString(""));
public slots:
// threadsafe
// moves injector->getLocalBuffer() to another thread (so removes its parent)
// take care to delete it when ~AudioInjector, as parenting Qt semantics will not work
virtual bool outputLocalInjector(AudioInjector* injector) = 0;
public slots:
virtual bool shouldLoopbackInjectors() { return false; }
virtual void setIsStereoInput(bool stereo) = 0;

View file

@ -1,6 +1,6 @@
set(TARGET_NAME avatars-renderer)
AUTOSCRIBE_SHADER_LIB(gpu model render render-utils)
setup_hifi_library(Widgets Network Script)
link_hifi_libraries(shared gpu model animation physics model-networking script-engine render image render-utils)
link_hifi_libraries(shared gpu model animation model-networking script-engine render image render-utils)
target_bullet()

View file

@ -338,54 +338,28 @@ void Avatar::simulate(float deltaTime, bool inView) {
_simulationInViewRate.increment();
}
if (!isMyAvatar()) {
if (_smoothPositionTimer < _smoothPositionTime) {
// Smooth the remote avatar movement.
_smoothPositionTimer += deltaTime;
if (_smoothPositionTimer < _smoothPositionTime) {
AvatarData::setPosition(
lerp(_smoothPositionInitial,
_smoothPositionTarget,
easeInOutQuad(glm::clamp(_smoothPositionTimer / _smoothPositionTime, 0.0f, 1.0f)))
);
updateAttitude();
}
}
if (_smoothOrientationTimer < _smoothOrientationTime) {
// Smooth the remote avatar movement.
_smoothOrientationTimer += deltaTime;
if (_smoothOrientationTimer < _smoothOrientationTime) {
AvatarData::setOrientation(
slerp(_smoothOrientationInitial,
_smoothOrientationTarget,
easeInOutQuad(glm::clamp(_smoothOrientationTimer / _smoothOrientationTime, 0.0f, 1.0f)))
);
updateAttitude();
}
}
}
PerformanceTimer perfTimer("simulate");
{
PROFILE_RANGE(simulation, "updateJoints");
if (inView && _hasNewJointData) {
_skeletonModel->getRig()->copyJointsFromJointData(_jointData);
glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset());
_skeletonModel->getRig()->computeExternalPoses(rootTransform);
_jointDataSimulationRate.increment();
_skeletonModel->simulate(deltaTime, true);
locationChanged(); // joints changed, so if there are any children, update them.
_hasNewJointData = false;
glm::vec3 headPosition = getPosition();
if (!_skeletonModel->getHeadPosition(headPosition)) {
headPosition = getPosition();
}
if (inView) {
Head* head = getHead();
head->setPosition(headPosition);
if (_hasNewJointData) {
_skeletonModel->getRig()->copyJointsFromJointData(_jointData);
glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset());
_skeletonModel->getRig()->computeExternalPoses(rootTransform);
_jointDataSimulationRate.increment();
_skeletonModel->simulate(deltaTime, true);
locationChanged(); // joints changed, so if there are any children, update them.
_hasNewJointData = false;
glm::vec3 headPosition = getPosition();
if (!_skeletonModel->getHeadPosition(headPosition)) {
headPosition = getPosition();
}
head->setPosition(headPosition);
}
head->setScale(getUniformScale());
head->simulate(deltaTime);
} else {
@ -1291,6 +1265,17 @@ void Avatar::getCapsule(glm::vec3& start, glm::vec3& end, float& radius) {
radius = halfExtents.x;
}
float Avatar::computeMass() {
float radius;
glm::vec3 start, end;
getCapsule(start, end, radius);
// NOTE:
// volumeOfCapsule = volumeOfCylinder + volumeOfSphere
// volumeOfCapsule = (2PI * R^2 * H) + (4PI * R^3 / 3)
// volumeOfCapsule = 2PI * R^2 * (H + 2R/3)
return _density * TWO_PI * radius * radius * (glm::length(end - start) + 2.0f * radius / 3.0f);
}
// virtual
void Avatar::rebuildCollisionShape() {
addPhysicsFlags(Simulation::DIRTY_SHAPE);
@ -1369,31 +1354,13 @@ glm::quat Avatar::getUncachedRightPalmRotation() const {
}
void Avatar::setPosition(const glm::vec3& position) {
if (isMyAvatar()) {
// This is the local avatar, no need to handle any position smoothing.
AvatarData::setPosition(position);
updateAttitude();
return;
}
// Whether or not there is an existing smoothing going on, just reset the smoothing timer and set the starting position as the avatar's current position, then smooth to the new position.
_smoothPositionInitial = getPosition();
_smoothPositionTarget = position;
_smoothPositionTimer = 0.0f;
AvatarData::setPosition(position);
updateAttitude();
}
void Avatar::setOrientation(const glm::quat& orientation) {
if (isMyAvatar()) {
// This is the local avatar, no need to handle any position smoothing.
AvatarData::setOrientation(orientation);
updateAttitude();
return;
}
// Whether or not there is an existing smoothing going on, just reset the smoothing timer and set the starting position as the avatar's current position, then smooth to the new position.
_smoothOrientationInitial = getOrientation();
_smoothOrientationTarget = orientation;
_smoothOrientationTimer = 0.0f;
AvatarData::setOrientation(orientation);
updateAttitude();
}
void Avatar::updatePalms() {

View file

@ -36,7 +36,6 @@ namespace render {
}
static const float SCALING_RATIO = .05f;
static const float SMOOTHING_RATIO = .05f; // 0 < ratio < 1
extern const float CHAT_MESSAGE_SCALE;
extern const float CHAT_MESSAGE_HEIGHT;
@ -196,6 +195,7 @@ public:
virtual void computeShapeInfo(ShapeInfo& shapeInfo);
void getCapsule(glm::vec3& start, glm::vec3& end, float& radius);
float computeMass();
using SpatiallyNestable::setPosition;
virtual void setPosition(const glm::vec3& position) override;
@ -239,17 +239,8 @@ public:
bool hasNewJointData() const { return _hasNewJointData; }
inline float easeInOutQuad(float lerpValue) {
assert(!((lerpValue < 0.0f) || (lerpValue > 1.0f)));
if (lerpValue < 0.5f) {
return (2.0f * lerpValue * lerpValue);
}
return (lerpValue*(4.0f - 2.0f * lerpValue) - 1.0f);
}
float getBoundingRadius() const;
void addToScene(AvatarSharedPointer self, const render::ScenePointer& scene);
void ensureInScene(AvatarSharedPointer self, const render::ScenePointer& scene);
bool isInScene() const { return render::Item::isValidID(_renderItemID); }
@ -271,9 +262,6 @@ public slots:
void setModelURLFinished(bool success);
protected:
const float SMOOTH_TIME_POSITION = 0.125f;
const float SMOOTH_TIME_ORIENTATION = 0.075f;
virtual const QString& getSessionDisplayNameForTransport() const override { return _empty; } // Save a tiny bit of bandwidth. Mixer won't look at what we send.
QString _empty{};
virtual void maybeUpdateSessionDisplayNameFromTransport(const QString& sessionDisplayName) override { _sessionDisplayName = sessionDisplayName; } // don't use no-op setter!
@ -336,16 +324,6 @@ protected:
RateCounter<> _skeletonModelSimulationRate;
RateCounter<> _jointDataSimulationRate;
// Smoothing data for blending from one position/orientation to another on remote agents.
float _smoothPositionTime { SMOOTH_TIME_POSITION };
float _smoothPositionTimer { std::numeric_limits<float>::max() };
float _smoothOrientationTime { SMOOTH_TIME_ORIENTATION };
float _smoothOrientationTimer { std::numeric_limits<float>::max() };
glm::vec3 _smoothPositionInitial;
glm::vec3 _smoothPositionTarget;
glm::quat _smoothOrientationInitial;
glm::quat _smoothOrientationTarget;
private:
class AvatarEntityDataHash {
public:

View file

@ -23,9 +23,10 @@
#include "Avatar.h"
const float NORMAL_HZ = 60.0f; // the update rate the constant values were tuned for
using namespace std;
static bool fixGaze { false };
static bool disableEyelidAdjustment { false };
Head::Head(Avatar* owningAvatar) :
@ -42,17 +43,11 @@ void Head::reset() {
_baseYaw = _basePitch = _baseRoll = 0.0f;
}
void Head::simulate(float deltaTime) {
const float NORMAL_HZ = 60.0f; // the update rate the constant values were tuned for
void Head::computeAudioLoudness(float deltaTime) {
// grab the audio loudness from the owning avatar, if we have one
float audioLoudness = 0.0f;
float audioLoudness = _owningAvatar ? _owningAvatar->getAudioLoudness() : 0.0f;
if (_owningAvatar) {
audioLoudness = _owningAvatar->getAudioLoudness();
}
// Update audio trailing average for rendering facial animations
// Update audio trailing average for rendering facial animations
const float AUDIO_AVERAGING_SECS = 0.05f;
const float AUDIO_LONG_TERM_AVERAGING_SECS = 30.0f;
_averageLoudness = glm::mix(_averageLoudness, audioLoudness, glm::min(deltaTime / AUDIO_AVERAGING_SECS, 1.0f));
@ -63,117 +58,114 @@ void Head::simulate(float deltaTime) {
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
}
if (!_isFaceTrackerConnected) {
if (!_isEyeTrackerConnected) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
_audioAttack = audioAttackAveragingRate * _audioAttack +
(1.0f - audioAttackAveragingRate) * fabs((audioLoudness - _longTermAverageLoudness) - _lastLoudness);
_lastLoudness = (audioLoudness - _longTermAverageLoudness);
}
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
} else {
_saccade = glm::vec3();
}
void Head::computeEyeMovement(float deltaTime) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f;
const float BLINK_AFTER_TALKING = 0.25f;
_timeWithoutTalking += deltaTime;
if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) {
_timeWithoutTalking = 0.0f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
} else if (_timeWithoutTalking < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) {
forceBlink = true;
}
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f;
const float BLINK_AFTER_TALKING = 0.25f;
_timeWithoutTalking += deltaTime;
if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) {
_timeWithoutTalking = 0.0f;
} else if (_timeWithoutTalking - deltaTime < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) {
forceBlink = true;
}
// Update audio attack data for facial animation (eyebrows and mouth)
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
_audioAttack = audioAttackAveragingRate * _audioAttack +
(1.0f - audioAttackAveragingRate) * fabs((audioLoudness - _longTermAverageLoudness) - _lastLoudness);
_lastLoudness = (audioLoudness - _longTermAverageLoudness);
const float BROW_LIFT_THRESHOLD = 100.0f;
if (_audioAttack > BROW_LIFT_THRESHOLD) {
_browAudioLift += sqrtf(_audioAttack) * 0.01f;
}
_browAudioLift = glm::clamp(_browAudioLift *= 0.7f, 0.0f, 1.0f);
const float BLINK_SPEED = 10.0f;
const float BLINK_SPEED_VARIABILITY = 1.0f;
const float BLINK_START_VARIABILITY = 0.25f;
const float FULLY_OPEN = 0.0f;
const float FULLY_CLOSED = 1.0f;
if (_leftEyeBlinkVelocity == 0.0f && _rightEyeBlinkVelocity == 0.0f) {
// no blinking when brows are raised; blink less with increasing loudness
const float BASE_BLINK_RATE = 15.0f / 60.0f;
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
_leftEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
_rightEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
if (randFloat() < 0.5f) {
_leftEyeBlink = BLINK_START_VARIABILITY;
} else {
_rightEyeBlink = BLINK_START_VARIABILITY;
}
}
} else {
_leftEyeBlink = glm::clamp(_leftEyeBlink + _leftEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
_rightEyeBlink = glm::clamp(_rightEyeBlink + _rightEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
if (_leftEyeBlink == FULLY_CLOSED) {
_leftEyeBlinkVelocity = -BLINK_SPEED;
} else if (_leftEyeBlink == FULLY_OPEN) {
_leftEyeBlinkVelocity = 0.0f;
}
if (_rightEyeBlink == FULLY_CLOSED) {
_rightEyeBlinkVelocity = -BLINK_SPEED;
} else if (_rightEyeBlink == FULLY_OPEN) {
_rightEyeBlinkVelocity = 0.0f;
const float BLINK_SPEED = 10.0f;
const float BLINK_SPEED_VARIABILITY = 1.0f;
const float BLINK_START_VARIABILITY = 0.25f;
const float FULLY_OPEN = 0.0f;
const float FULLY_CLOSED = 1.0f;
if (_leftEyeBlinkVelocity == 0.0f && _rightEyeBlinkVelocity == 0.0f) {
// no blinking when brows are raised; blink less with increasing loudness
const float BASE_BLINK_RATE = 15.0f / 60.0f;
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
_leftEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
_rightEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
if (randFloat() < 0.5f) {
_leftEyeBlink = BLINK_START_VARIABILITY;
} else {
_rightEyeBlink = BLINK_START_VARIABILITY;
}
}
// use data to update fake Faceshift blendshape coefficients
calculateMouthShapes(deltaTime);
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_blendshapeCoefficients);
applyEyelidOffset(getOrientation());
} else {
_saccade = glm::vec3();
}
if (fixGaze) { // if debug menu turns off, use no saccade
_saccade = glm::vec3();
_leftEyeBlink = glm::clamp(_leftEyeBlink + _leftEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
_rightEyeBlink = glm::clamp(_rightEyeBlink + _rightEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
if (_leftEyeBlink == FULLY_CLOSED) {
_leftEyeBlinkVelocity = -BLINK_SPEED;
} else if (_leftEyeBlink == FULLY_OPEN) {
_leftEyeBlinkVelocity = 0.0f;
}
if (_rightEyeBlink == FULLY_CLOSED) {
_rightEyeBlinkVelocity = -BLINK_SPEED;
} else if (_rightEyeBlink == FULLY_OPEN) {
_rightEyeBlinkVelocity = 0.0f;
}
}
applyEyelidOffset(getOrientation());
}
void Head::computeFaceMovement(float deltaTime) {
// Update audio attack data for facial animation (eyebrows and mouth)
const float BROW_LIFT_THRESHOLD = 100.0f;
if (_audioAttack > BROW_LIFT_THRESHOLD) {
_browAudioLift += sqrtf(_audioAttack) * 0.01f;
}
_browAudioLift = glm::clamp(_browAudioLift *= 0.7f, 0.0f, 1.0f);
// use data to update fake Faceshift blendshape coefficients
calculateMouthShapes(deltaTime);
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_transientBlendshapeCoefficients);
}
void Head::computeEyePosition() {
_leftEyePosition = _rightEyePosition = getPosition();
_eyePosition = getPosition();
if (_owningAvatar) {
auto skeletonModel = static_cast<Avatar*>(_owningAvatar)->getSkeletonModel();
if (skeletonModel) {
skeletonModel->getEyePositions(_leftEyePosition, _rightEyePosition);
}
}
_eyePosition = 0.5f * (_leftEyePosition + _rightEyePosition);
}
_eyePosition = calculateAverageEyePosition();
void Head::simulate(float deltaTime) {
computeAudioLoudness(deltaTime);
computeFaceMovement(deltaTime);
computeEyeMovement(deltaTime);
computeEyePosition();
}
void Head::calculateMouthShapes(float deltaTime) {
@ -203,6 +195,13 @@ void Head::calculateMouthShapes(float deltaTime) {
float trailingAudioJawOpenRatio = (100.0f - deltaTime * NORMAL_HZ) / 100.0f; // --> 0.99 at 60 Hz
_trailingAudioJawOpen = glm::mix(_trailingAudioJawOpen, _audioJawOpen, trailingAudioJawOpenRatio);
// truncate _mouthTime when mouth goes quiet to prevent floating point error on increment
const float SILENT_TRAILING_JAW_OPEN = 0.0002f;
const float MAX_SILENT_MOUTH_TIME = 10.0f;
if (_trailingAudioJawOpen < SILENT_TRAILING_JAW_OPEN && _mouthTime > MAX_SILENT_MOUTH_TIME) {
_mouthTime = 0.0f;
}
// Advance time at a rate proportional to loudness, and move the mouth shapes through
// a cycle at differing speeds to create a continuous random blend of shapes.
_mouthTime += sqrtf(_averageLoudness) * TIMESTEP_CONSTANT * deltaTimeRatio;
@ -228,15 +227,15 @@ void Head::applyEyelidOffset(glm::quat headOrientation) {
for (int i = 0; i < 2; i++) {
const int LEFT_EYE = 8;
float eyeCoefficient = _blendshapeCoefficients[i] - _blendshapeCoefficients[LEFT_EYE + i]; // Raw value
float eyeCoefficient = _transientBlendshapeCoefficients[i] - _transientBlendshapeCoefficients[LEFT_EYE + i];
eyeCoefficient = glm::clamp(eyelidOffset + eyeCoefficient * (1.0f - eyelidOffset), -1.0f, 1.0f);
if (eyeCoefficient > 0.0f) {
_blendshapeCoefficients[i] = eyeCoefficient;
_blendshapeCoefficients[LEFT_EYE + i] = 0.0f;
_transientBlendshapeCoefficients[i] = eyeCoefficient;
_transientBlendshapeCoefficients[LEFT_EYE + i] = 0.0f;
} else {
_blendshapeCoefficients[i] = 0.0f;
_blendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient;
_transientBlendshapeCoefficients[i] = 0.0f;
_transientBlendshapeCoefficients[LEFT_EYE + i] = -eyeCoefficient;
}
}
}

View file

@ -83,7 +83,10 @@ public:
float getTimeWithoutTalking() const { return _timeWithoutTalking; }
protected:
glm::vec3 calculateAverageEyePosition() const { return _leftEyePosition + (_rightEyePosition - _leftEyePosition ) * 0.5f; }
void computeAudioLoudness(float deltaTime);
void computeEyeMovement(float deltaTime);
void computeFaceMovement(float deltaTime);
void computeEyePosition();
// disallow copies of the Head, copy of owning Avatar is disallowed too
Head(const Head&);

View file

@ -14,7 +14,7 @@
class OtherAvatar : public Avatar {
public:
explicit OtherAvatar(QThread* thread, RigPointer rig = nullptr);
void instantiableAvatar() {};
virtual void instantiableAvatar() override {};
};
#endif // hifi_OtherAvatar_h

View file

@ -73,12 +73,13 @@ void SkeletonModel::initJointStates() {
// Called within Model::simulate call, below.
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
assert(!_owningAvatar->isMyAvatar());
const FBXGeometry& geometry = getFBXGeometry();
Head* head = _owningAvatar->getHead();
// make sure lookAt is not too close to face (avoid crosseyes)
glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition();
glm::vec3 lookAt = head->getCorrectedLookAtPosition();
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
float focusDistance = glm::length(focusOffset);
const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
@ -86,41 +87,36 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
}
if (!_owningAvatar->isMyAvatar()) {
// no need to call Model::updateRig() because otherAvatars get their joint state
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
_needsUpdateClusterMatrices = true;
// no need to call Model::updateRig() because otherAvatars get their joint state
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
_needsUpdateClusterMatrices = true;
// This is a little more work than we really want.
//
// Other avatars joint, including their eyes, should already be set just like any other joints
// from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
//
// Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
// We will revisit that as priorities allow, and particularly after the new rig/animation/joints.
// This is a little more work than we really want.
//
// Other avatars joint, including their eyes, should already be set just like any other joints
// from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
//
// Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
// We will revisit that as priorities allow, and particularly after the new rig/animation/joints.
// If the head is not positioned, updateEyeJoints won't get the math right
glm::quat headOrientation;
_rig->getJointRotation(geometry.headJointIndex, headOrientation);
glm::vec3 eulers = safeEulerAngles(headOrientation);
head->setBasePitch(glm::degrees(-eulers.x));
head->setBaseYaw(glm::degrees(eulers.y));
head->setBaseRoll(glm::degrees(-eulers.z));
// If the head is not positioned, updateEyeJoints won't get the math right
glm::quat headOrientation;
_rig->getJointRotation(geometry.headJointIndex, headOrientation);
glm::vec3 eulers = safeEulerAngles(headOrientation);
head->setBasePitch(glm::degrees(-eulers.x));
head->setBaseYaw(glm::degrees(eulers.y));
head->setBaseRoll(glm::degrees(-eulers.z));
Rig::EyeParameters eyeParams;
eyeParams.eyeLookAt = lookAt;
eyeParams.eyeSaccade = glm::vec3(0.0f);
eyeParams.modelRotation = getRotation();
eyeParams.modelTranslation = getTranslation();
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
Rig::EyeParameters eyeParams;
eyeParams.eyeLookAt = lookAt;
eyeParams.eyeSaccade = glm::vec3(0.0f);
eyeParams.modelRotation = getRotation();
eyeParams.modelTranslation = getTranslation();
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
_rig->updateFromEyeParameters(eyeParams);
}
// evaluate AnimGraph animation and update jointStates.
Parent::updateRig(deltaTime, parentTransform);
_rig->updateFromEyeParameters(eyeParams);
}
void SkeletonModel::updateAttitude() {

View file

@ -52,6 +52,7 @@ const QString AvatarData::FRAME_NAME = "com.highfidelity.recording.AvatarData";
static const int TRANSLATION_COMPRESSION_RADIX = 12;
static const int SENSOR_TO_WORLD_SCALE_RADIX = 10;
static const float AUDIO_LOUDNESS_SCALE = 1024.0f;
static const float DEFAULT_AVATAR_DENSITY = 1000.0f; // density of water
#define ASSERT(COND) do { if (!(COND)) { abort(); } } while(0)
@ -65,7 +66,8 @@ AvatarData::AvatarData() :
_headData(NULL),
_errorLogExpiry(0),
_owningAvatarMixer(),
_targetVelocity(0.0f)
_targetVelocity(0.0f),
_density(DEFAULT_AVATAR_DENSITY)
{
setBodyPitch(0.0f);
setBodyYaw(-90.0f);
@ -170,13 +172,13 @@ QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail) {
AvatarDataPacket::HasFlags hasFlagsOut;
auto lastSentTime = _lastToByteArray;
_lastToByteArray = usecTimestampNow();
return AvatarData::toByteArray(dataDetail, lastSentTime, getLastSentJointData(),
return AvatarData::toByteArray(dataDetail, lastSentTime, getLastSentJointData(),
hasFlagsOut, false, false, glm::vec3(0), nullptr,
&_outboundDataRate);
}
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime, const QVector<JointData>& lastSentJointData,
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust,
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust,
glm::vec3 viewerPosition, QVector<JointData>* sentJointDataOut, AvatarDataRate* outboundDataRateOut) const {
bool cullSmallChanges = (dataDetail == CullSmallData);
@ -199,7 +201,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
// FIXME -
//
// BUG -- if you enter a space bubble, and then back away, the avatar has wrong orientation until "send all" happens...
// BUG -- if you enter a space bubble, and then back away, the avatar has wrong orientation until "send all" happens...
// this is an iFrame issue... what to do about that?
//
// BUG -- Resizing avatar seems to "take too long"... the avatar doesn't redraw at smaller size right away
@ -310,7 +312,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
if (hasAvatarOrientation) {
auto startSection = destinationBuffer;
auto localOrientation = getLocalOrientation();
auto localOrientation = getOrientationOutbound();
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, localOrientation);
int numBytes = destinationBuffer - startSection;
@ -445,17 +447,17 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
if (hasFaceTrackerInfo) {
auto startSection = destinationBuffer;
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
const auto& blendshapeCoefficients = _headData->getSummedBlendshapeCoefficients();
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
faceTrackerInfo->rightEyeBlink = _headData->_rightEyeBlink;
faceTrackerInfo->averageLoudness = _headData->_averageLoudness;
faceTrackerInfo->browAudioLift = _headData->_browAudioLift;
faceTrackerInfo->numBlendshapeCoefficients = _headData->_blendshapeCoefficients.size();
faceTrackerInfo->numBlendshapeCoefficients = blendshapeCoefficients.size();
destinationBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
// followed by a variable number of float coefficients
memcpy(destinationBuffer, _headData->_blendshapeCoefficients.data(), _headData->_blendshapeCoefficients.size() * sizeof(float));
destinationBuffer += _headData->_blendshapeCoefficients.size() * sizeof(float);
memcpy(destinationBuffer, blendshapeCoefficients.data(), blendshapeCoefficients.size() * sizeof(float));
destinationBuffer += blendshapeCoefficients.size() * sizeof(float);
int numBytes = destinationBuffer - startSection;
if (outboundDataRateOut) {
@ -965,7 +967,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
const int coefficientsSize = sizeof(float) * numCoefficients;
PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize);
_headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy!
_headData->_baseBlendshapeCoefficients.resize(numCoefficients);
_headData->_transientBlendshapeCoefficients.resize(numCoefficients);
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize);
sourceBuffer += coefficientsSize;
int numBytesRead = sourceBuffer - startSection;
@ -1471,13 +1473,13 @@ QStringList AvatarData::getJointNames() const {
void AvatarData::parseAvatarIdentityPacket(const QByteArray& data, Identity& identityOut) {
QDataStream packetStream(data);
packetStream >> identityOut.uuid
>> identityOut.skeletonModelURL
packetStream >> identityOut.uuid
>> identityOut.skeletonModelURL
>> identityOut.attachmentData
>> identityOut.displayName
>> identityOut.sessionDisplayName
>> identityOut.avatarEntityData
>> identityOut.updatedAt;
>> identityOut.sequenceId;
#ifdef WANT_DEBUG
qCDebug(avatars) << __FUNCTION__
@ -1489,6 +1491,10 @@ void AvatarData::parseAvatarIdentityPacket(const QByteArray& data, Identity& ide
}
glm::quat AvatarData::getOrientationOutbound() const {
return (getLocalOrientation());
}
static const QUrl emptyURL("");
QUrl AvatarData::cannonicalSkeletonModelURL(const QUrl& emptyURL) const {
// We don't put file urls on the wire, but instead convert to empty.
@ -1497,11 +1503,13 @@ QUrl AvatarData::cannonicalSkeletonModelURL(const QUrl& emptyURL) const {
void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityChanged, bool& displayNameChanged) {
if (identity.updatedAt < _identityUpdatedAt) {
qCDebug(avatars) << "Ignoring late identity packet for avatar " << getSessionUUID()
<< "identity.updatedAt:" << identity.updatedAt << "_identityUpdatedAt:" << _identityUpdatedAt;
if (identity.sequenceId < _identitySequenceId) {
qCDebug(avatars) << "Ignoring older identity packet for avatar" << getSessionUUID()
<< "_identitySequenceId (" << _identitySequenceId << ") is greater than" << identity.sequenceId;
return;
}
// otherwise, set the identitySequenceId to match the incoming identity
_identitySequenceId = identity.sequenceId;
if (_firstSkeletonCheck || (identity.skeletonModelURL != cannonicalSkeletonModelURL(emptyURL))) {
setSkeletonModelURL(identity.skeletonModelURL);
@ -1533,9 +1541,6 @@ void AvatarData::processAvatarIdentity(const Identity& identity, bool& identityC
identityChanged = true;
}
// use the timestamp from this identity, since we want to honor the updated times in "server clock"
// this will overwrite any changes we made locally to this AvatarData's _identityUpdatedAt
_identityUpdatedAt = identity.updatedAt;
}
QByteArray AvatarData::identityByteArray() const {
@ -1544,13 +1549,13 @@ QByteArray AvatarData::identityByteArray() const {
const QUrl& urlToSend = cannonicalSkeletonModelURL(emptyURL); // depends on _skeletonModelURL
_avatarEntitiesLock.withReadLock([&] {
identityStream << getSessionUUID()
<< urlToSend
identityStream << getSessionUUID()
<< urlToSend
<< _attachmentData
<< _displayName
<< getSessionDisplayNameForTransport() // depends on _sessionDisplayName
<< _avatarEntityData
<< _identityUpdatedAt;
<< _identitySequenceId;
});
return identityData;
@ -1576,8 +1581,6 @@ void AvatarData::setDisplayName(const QString& displayName) {
_displayName = displayName;
_sessionDisplayName = "";
sendIdentityPacket();
qCDebug(avatars) << "Changing display name for avatar to" << displayName;
markIdentityDataChanged();
}
@ -2044,11 +2047,13 @@ void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
setSkeletonModelURL(bodyModelURL);
}
}
QString newDisplayName = "";
if (json.contains(JSON_AVATAR_DISPLAY_NAME)) {
auto newDisplayName = json[JSON_AVATAR_DISPLAY_NAME].toString();
if (newDisplayName != getDisplayName()) {
setDisplayName(newDisplayName);
}
newDisplayName = json[JSON_AVATAR_DISPLAY_NAME].toString();
}
if (newDisplayName != getDisplayName()) {
setDisplayName(newDisplayName);
}
auto currentBasis = getRecordingBasis();
@ -2078,14 +2083,16 @@ void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
setTargetScale((float)json[JSON_AVATAR_SCALE].toDouble());
}
QVector<AttachmentData> attachments;
if (json.contains(JSON_AVATAR_ATTACHMENTS) && json[JSON_AVATAR_ATTACHMENTS].isArray()) {
QJsonArray attachmentsJson = json[JSON_AVATAR_ATTACHMENTS].toArray();
QVector<AttachmentData> attachments;
for (auto attachmentJson : attachmentsJson) {
AttachmentData attachment;
attachment.fromJson(attachmentJson.toObject());
attachments.push_back(attachment);
}
}
if (attachments != getAttachmentData()) {
setAttachmentData(attachments);
}
@ -2461,11 +2468,11 @@ QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEnt
if (!jsonEntityProperties.isObject()) {
qCDebug(avatars) << "bad AvatarEntityData in AvatarEntityMap" << QString(entityProperties.toHex());
}
QVariant variantEntityProperties = jsonEntityProperties.toVariant();
QVariantMap entityPropertiesMap = variantEntityProperties.toMap();
QScriptValue scriptEntityProperties = variantMapToScriptValue(entityPropertiesMap, *engine);
QString key = entityID.toString();
obj.setProperty(key, scriptEntityProperties);
}
@ -2477,12 +2484,12 @@ void AvatarEntityMapFromScriptValue(const QScriptValue& object, AvatarEntityMap&
while (itr.hasNext()) {
itr.next();
QUuid EntityID = QUuid(itr.name());
QScriptValue scriptEntityProperties = itr.value();
QVariant variantEntityProperties = scriptEntityProperties.toVariant();
QJsonDocument jsonEntityProperties = QJsonDocument::fromVariant(variantEntityProperties);
QByteArray binaryEntityProperties = jsonEntityProperties.toBinaryData();
value[EntityID] = binaryEntityProperties;
}
}

View file

@ -165,15 +165,15 @@ namespace AvatarDataPacket {
const size_t AVATAR_ORIENTATION_SIZE = 6;
PACKED_BEGIN struct AvatarScale {
SmallFloat scale; // avatar's scale, compressed by packFloatRatioToTwoByte()
SmallFloat scale; // avatar's scale, compressed by packFloatRatioToTwoByte()
} PACKED_END;
const size_t AVATAR_SCALE_SIZE = 2;
PACKED_BEGIN struct LookAtPosition {
float lookAtPosition[3]; // world space position that eyes are focusing on.
// FIXME - unless the person has an eye tracker, this is simulated...
// FIXME - unless the person has an eye tracker, this is simulated...
// a) maybe we can just have the client calculate this
// b) at distance this will be hard to discern and can likely be
// b) at distance this will be hard to discern and can likely be
// descimated or dropped completely
//
// POTENTIAL SAVINGS - 12 bytes
@ -376,10 +376,10 @@ public:
glm::vec3 getHandPosition() const;
void setHandPosition(const glm::vec3& handPosition);
typedef enum {
typedef enum {
NoData,
PALMinimum,
MinimumData,
MinimumData,
CullSmallData,
IncludeSmallData,
SendAllData
@ -388,7 +388,7 @@ public:
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail);
virtual QByteArray toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime, const QVector<JointData>& lastSentJointData,
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
QVector<JointData>* sentJointDataOut, AvatarDataRate* outboundDataRateOut = nullptr) const;
virtual void doneEncoding(bool cullSmallChanges);
@ -417,23 +417,23 @@ public:
void nextAttitude(glm::vec3 position, glm::quat orientation); // Can be safely called at any time.
virtual void updateAttitude() {} // Tell skeleton mesh about changes
glm::quat getHeadOrientation() const {
glm::quat getHeadOrientation() const {
lazyInitHeadData();
return _headData->getOrientation();
return _headData->getOrientation();
}
void setHeadOrientation(const glm::quat& orientation) {
void setHeadOrientation(const glm::quat& orientation) {
if (_headData) {
_headData->setOrientation(orientation);
}
}
void setLookAtPosition(const glm::vec3& lookAtPosition) {
void setLookAtPosition(const glm::vec3& lookAtPosition) {
if (_headData) {
_headData->setLookAtPosition(lookAtPosition);
_headData->setLookAtPosition(lookAtPosition);
}
}
void setBlendshapeCoefficients(const QVector<float>& blendshapeCoefficients) {
void setBlendshapeCoefficients(const QVector<float>& blendshapeCoefficients) {
if (_headData) {
_headData->setBlendshapeCoefficients(blendshapeCoefficients);
}
@ -470,7 +470,7 @@ public:
void setDomainMinimumScale(float domainMinimumScale)
{ _domainMinimumScale = glm::clamp(domainMinimumScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE); _scaleChanged = usecTimestampNow(); }
void setDomainMaximumScale(float domainMaximumScale)
void setDomainMaximumScale(float domainMaximumScale)
{ _domainMaximumScale = glm::clamp(domainMaximumScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE); _scaleChanged = usecTimestampNow(); }
// Hand State
@ -531,7 +531,7 @@ public:
QString displayName;
QString sessionDisplayName;
AvatarEntityMap avatarEntityData;
quint64 updatedAt;
quint64 sequenceId;
};
static void parseAvatarIdentityPacket(const QByteArray& data, Identity& identityOut);
@ -548,8 +548,8 @@ public:
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
virtual void setDisplayName(const QString& displayName);
virtual void setSessionDisplayName(const QString& sessionDisplayName) {
_sessionDisplayName = sessionDisplayName;
virtual void setSessionDisplayName(const QString& sessionDisplayName) {
_sessionDisplayName = sessionDisplayName;
markIdentityDataChanged();
}
@ -604,6 +604,9 @@ public:
return _lastSentJointData;
}
// A method intended to be overriden by MyAvatar for polling orientation for network transmission.
virtual glm::quat getOrientationOutbound() const;
static const float OUT_OF_VIEW_PENALTY;
static void sortAvatars(
@ -623,9 +626,11 @@ public:
bool getIdentityDataChanged() const { return _identityDataChanged; } // has the identity data changed since the last time sendIdentityPacket() was called
void markIdentityDataChanged() {
_identityDataChanged = true;
_identityUpdatedAt = usecTimestampNow();
_identitySequenceId++;
}
float getDensity() const { return _density; }
signals:
void displayNameChanged();
@ -781,7 +786,8 @@ protected:
float _audioAverageLoudness { 0.0f };
bool _identityDataChanged { false };
quint64 _identityUpdatedAt { 0 };
quint64 _identitySequenceId { 0 };
float _density;
private:
friend void avatarStateFromFrame(const QByteArray& frameData, AvatarData* _avatar);

View file

@ -148,6 +148,7 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage>
auto avatar = newOrExistingAvatar(identity.uuid, sendingNode);
bool identityChanged = false;
bool displayNameChanged = false;
// In this case, the "sendingNode" is the Avatar Mixer.
avatar->processAvatarIdentity(identity, identityChanged, displayNameChanged);
}
}

View file

@ -28,14 +28,9 @@ HeadData::HeadData(AvatarData* owningAvatar) :
_basePitch(0.0f),
_baseRoll(0.0f),
_lookAtPosition(0.0f, 0.0f, 0.0f),
_isFaceTrackerConnected(false),
_isEyeTrackerConnected(false),
_leftEyeBlink(0.0f),
_rightEyeBlink(0.0f),
_averageLoudness(0.0f),
_browAudioLift(0.0f),
_baseBlendshapeCoefficients(QVector<float>(0, 0.0f)),
_currBlendShapeCoefficients(QVector<float>(0, 0.0f)),
_blendshapeCoefficients(QVector<float>(0, 0.0f)),
_transientBlendshapeCoefficients(QVector<float>(0, 0.0f)),
_summedBlendshapeCoefficients(QVector<float>(0, 0.0f)),
_owningAvatar(owningAvatar)
{
@ -85,22 +80,22 @@ static const QMap<QString, int>& getBlendshapesLookupMap() {
}
const QVector<float>& HeadData::getSummedBlendshapeCoefficients() {
int maxSize = std::max(_baseBlendshapeCoefficients.size(), _blendshapeCoefficients.size());
if (_currBlendShapeCoefficients.size() != maxSize) {
_currBlendShapeCoefficients.resize(maxSize);
int maxSize = std::max(_blendshapeCoefficients.size(), _transientBlendshapeCoefficients.size());
if (_summedBlendshapeCoefficients.size() != maxSize) {
_summedBlendshapeCoefficients.resize(maxSize);
}
for (int i = 0; i < maxSize; i++) {
if (i >= _baseBlendshapeCoefficients.size()) {
_currBlendShapeCoefficients[i] = _blendshapeCoefficients[i];
} else if (i >= _blendshapeCoefficients.size()) {
_currBlendShapeCoefficients[i] = _baseBlendshapeCoefficients[i];
if (i >= _blendshapeCoefficients.size()) {
_summedBlendshapeCoefficients[i] = _transientBlendshapeCoefficients[i];
} else if (i >= _transientBlendshapeCoefficients.size()) {
_summedBlendshapeCoefficients[i] = _blendshapeCoefficients[i];
} else {
_currBlendShapeCoefficients[i] = _baseBlendshapeCoefficients[i] + _blendshapeCoefficients[i];
_summedBlendshapeCoefficients[i] = _blendshapeCoefficients[i] + _transientBlendshapeCoefficients[i];
}
}
return _currBlendShapeCoefficients;
return _summedBlendshapeCoefficients;
}
void HeadData::setBlendshape(QString name, float val) {
@ -112,10 +107,10 @@ void HeadData::setBlendshape(QString name, float val) {
if (_blendshapeCoefficients.size() <= it.value()) {
_blendshapeCoefficients.resize(it.value() + 1);
}
if (_baseBlendshapeCoefficients.size() <= it.value()) {
_baseBlendshapeCoefficients.resize(it.value() + 1);
if (_transientBlendshapeCoefficients.size() <= it.value()) {
_transientBlendshapeCoefficients.resize(it.value() + 1);
}
_baseBlendshapeCoefficients[it.value()] = val;
_blendshapeCoefficients[it.value()] = val;
}
}
@ -131,14 +126,16 @@ QJsonObject HeadData::toJson() const {
QJsonObject blendshapesJson;
for (auto name : blendshapeLookupMap.keys()) {
auto index = blendshapeLookupMap[name];
if (index >= _blendshapeCoefficients.size()) {
continue;
float value = 0.0f;
if (index < _blendshapeCoefficients.size()) {
value += _blendshapeCoefficients[index];
}
auto value = _blendshapeCoefficients[index];
if (value == 0.0f) {
continue;
if (index < _transientBlendshapeCoefficients.size()) {
value += _transientBlendshapeCoefficients[index];
}
if (value != 0.0f) {
blendshapesJson[name] = value;
}
blendshapesJson[name] = value;
}
if (!blendshapesJson.isEmpty()) {
headJson[JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS] = blendshapesJson;
@ -163,8 +160,8 @@ void HeadData::fromJson(const QJsonObject& json) {
QJsonArray blendshapeCoefficientsJson = jsonValue.toArray();
for (const auto& blendshapeCoefficient : blendshapeCoefficientsJson) {
blendshapeCoefficients.push_back((float)blendshapeCoefficient.toDouble());
setBlendshapeCoefficients(blendshapeCoefficients);
}
setBlendshapeCoefficients(blendshapeCoefficients);
} else if (jsonValue.isObject()) {
QJsonObject blendshapeCoefficientsJson = jsonValue.toObject();
for (const QString& name : blendshapeCoefficientsJson.keys()) {

View file

@ -63,7 +63,7 @@ public:
void setBlendshapeCoefficients(const QVector<float>& blendshapeCoefficients) { _blendshapeCoefficients = blendshapeCoefficients; }
const glm::vec3& getLookAtPosition() const { return _lookAtPosition; }
void setLookAtPosition(const glm::vec3& lookAtPosition) {
void setLookAtPosition(const glm::vec3& lookAtPosition) {
if (_lookAtPosition != lookAtPosition) {
_lookAtPositionChanged = usecTimestampNow();
}
@ -85,16 +85,16 @@ protected:
glm::vec3 _lookAtPosition;
quint64 _lookAtPositionChanged { 0 };
bool _isFaceTrackerConnected;
bool _isEyeTrackerConnected;
float _leftEyeBlink;
float _rightEyeBlink;
float _averageLoudness;
float _browAudioLift;
bool _isFaceTrackerConnected { false };
bool _isEyeTrackerConnected { false };
float _leftEyeBlink { 0.0f };
float _rightEyeBlink { 0.0f };
float _averageLoudness { 0.0f };
float _browAudioLift { 0.0f };
QVector<float> _blendshapeCoefficients;
QVector<float> _baseBlendshapeCoefficients;
QVector<float> _currBlendShapeCoefficients;
QVector<float> _transientBlendshapeCoefficients;
QVector<float> _summedBlendshapeCoefficients;
AvatarData* _owningAvatar;
private:

View file

@ -22,20 +22,20 @@
#include <BuildInfo.h>
#include <GLMHelpers.h>
QString SAVE_DIRECTORY = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation) + "/" + BuildInfo::MODIFIED_ORGANIZATION + "/" + BuildInfo::INTERFACE_NAME + "/hifi-input-recordings/";
QString FILE_PREFIX_NAME = "input-recording-";
QString COMPRESS_EXTENSION = ".tar.gz";
QString COMPRESS_EXTENSION = "json.gz";
namespace controller {
QJsonObject poseToJsonObject(const Pose pose) {
QJsonObject newPose;
QJsonArray translation;
translation.append(pose.translation.x);
translation.append(pose.translation.y);
translation.append(pose.translation.z);
QJsonArray rotation;
rotation.append(pose.rotation.x);
rotation.append(pose.rotation.y);
@ -69,7 +69,7 @@ namespace controller {
QJsonArray angularVelocity = object["angularVelocity"].toArray();
pose.valid = object["valid"].toBool();
pose.translation.x = translation[0].toDouble();
pose.translation.y = translation[1].toDouble();
pose.translation.z = translation[2].toDouble();
@ -89,13 +89,13 @@ namespace controller {
return pose;
}
void exportToFile(QJsonObject& object) {
if (!QDir(SAVE_DIRECTORY).exists()) {
QDir().mkdir(SAVE_DIRECTORY);
}
QString timeStamp = QDateTime::currentDateTime().toString(Qt::ISODate);
timeStamp.replace(":", "-");
QString fileName = SAVE_DIRECTORY + FILE_PREFIX_NAME + timeStamp + COMPRESS_EXTENSION;
@ -124,7 +124,7 @@ namespace controller {
status = true;
return object;
}
InputRecorder::InputRecorder() {}
InputRecorder::~InputRecorder() {}
@ -185,41 +185,42 @@ namespace controller {
filePath.remove(0,8);
QFileInfo info(filePath);
QString extension = info.suffix();
if (extension != "gz") {
qWarning() << "can not load file with exentsion of " << extension;
return;
}
bool success = false;
QJsonObject data = openFile(info.absoluteFilePath(), success);
if (success) {
_framesRecorded = data["frameCount"].toInt();
QJsonArray actionArrayList = data["actionList"].toArray();
QJsonArray poseArrayList = data["poseList"].toArray();
for (int actionIndex = 0; actionIndex < actionArrayList.size(); actionIndex++) {
QJsonArray actionState = actionArrayList[actionIndex].toArray();
for (int index = 0; index < actionState.size(); index++) {
_currentFrameActions[index] = actionState[index].toInt();
}
_actionStateList.push_back(_currentFrameActions);
_currentFrameActions = ActionStates(toInt(Action::NUM_ACTIONS));
}
for (int poseIndex = 0; poseIndex < poseArrayList.size(); poseIndex++) {
QJsonArray poseState = poseArrayList[poseIndex].toArray();
for (int index = 0; index < poseState.size(); index++) {
_currentFramePoses[index] = jsonObjectToPose(poseState[index].toObject());
}
_poseStateList.push_back(_currentFramePoses);
_currentFramePoses = PoseStates(toInt(Action::NUM_ACTIONS));
}
}
if (extension != "gz") {
qWarning() << "can not load file with exentsion of " << extension;
return;
}
bool success = false;
QJsonObject data = openFile(info.absoluteFilePath(), success);
if (success) {
_framesRecorded = data["frameCount"].toInt();
QJsonArray actionArrayList = data["actionList"].toArray();
QJsonArray poseArrayList = data["poseList"].toArray();
_loading = false;
for (int actionIndex = 0; actionIndex < actionArrayList.size(); actionIndex++) {
QJsonArray actionState = actionArrayList[actionIndex].toArray();
for (int index = 0; index < actionState.size(); index++) {
_currentFrameActions[index] = actionState[index].toDouble();
}
_actionStateList.push_back(_currentFrameActions);
_currentFrameActions = ActionStates(toInt(Action::NUM_ACTIONS));
}
for (int poseIndex = 0; poseIndex < poseArrayList.size(); poseIndex++) {
QJsonArray poseState = poseArrayList[poseIndex].toArray();
for (int index = 0; index < poseState.size(); index++) {
_currentFramePoses[index] = jsonObjectToPose(poseState[index].toObject());
}
_poseStateList.push_back(_currentFramePoses);
_currentFramePoses = PoseStates(toInt(Action::NUM_ACTIONS));
}
}
_loading = false;
}
void InputRecorder::stopRecording() {
_recording = false;
_framesRecorded = (int)_actionStateList.size();
}
void InputRecorder::startPlayback() {
@ -250,13 +251,13 @@ namespace controller {
for(auto& channel : _currentFramePoses) {
channel = Pose();
}
for(auto& channel : _currentFrameActions) {
channel = 0.0f;
}
}
}
float InputRecorder::getActionState(controller::Action action) {
if (_actionStateList.size() > 0 ) {
return _actionStateList[_playCount][toInt(action)];
@ -282,7 +283,7 @@ namespace controller {
if (_playback) {
_playCount++;
if (_playCount == _framesRecorded) {
if (_playCount == (_framesRecorded - 1)) {
_playCount = 0;
}
}

View file

@ -43,14 +43,14 @@
| |
+--------------------+ +-----------------------+
| | | |
| ObjectActionSpring | | ObjectConstraintHinge |
| ObjectActionTractor| | ObjectConstraintHinge |
| (physics) | | (physics) |
+--------------------+ +-----------------------+
A dynamic is a callback which is registered with bullet. A dynamic is called-back every physics
simulation step and can do whatever it wants with the various datastructures it has available. An
simulation step and can do whatever it wants with the various datastructures it has available. A
dynamic, for example, can pull an EntityItem toward a point as if that EntityItem were connected to that
point by a spring.
@ -60,7 +60,7 @@ script or when receiving information via an EntityTree data-stream (either over
svo file).
In the interface, if an EntityItem has dynamics, this EntityItem will have pointers to ObjectDynamic
subclass (like ObjectDynamicSpring) instantiations. Code in the entities library affects a dynamic-object
subclass (like ObjectDynamicTractor) instantiations. Code in the entities library affects a dynamic-object
via the EntityDynamicInterface (which knows nothing about bullet). When the ObjectDynamic subclass
instance is created, it is registered as a dynamic with bullet. Bullet will call into code in this
instance with the btDynamicInterface every physics-simulation step.
@ -75,11 +75,11 @@ right now the AssignmentDynamic class is a place-holder.
The dynamic-objects are instantiated by singleton (dependecy) subclasses of EntityDynamicFactoryInterface.
In the interface, the subclass is an InterfaceDynamicFactory and it will produce things like
ObjectDynamicSpring. In an entity-server the subclass is an AssignmentDynamicFactory and it always
ObjectDynamicTractor. In an entity-server the subclass is an AssignmentDynamicFactory and it always
produces AssignmentDynamics.
Depending on the dynamic's type, it will have various arguments. When a script changes an argument of an
dynamic, the argument-holding member-variables of ObjectDynamicSpring (in this example) are updated and
dynamic, the argument-holding member-variables of ObjectDynamicTractor (in this example) are updated and
also serialized into _dynamicData in the EntityItem. Each subclass of ObjectDynamic knows how to serialize
and deserialize its own arguments. _dynamicData is what gets sent over the wire or saved in an svo file.
When a packet-reader receives data for _dynamicData, it will save it in the EntityItem; this causes the
@ -103,7 +103,10 @@ EntityDynamicType EntityDynamicInterface::dynamicTypeFromString(QString dynamicT
return DYNAMIC_TYPE_OFFSET;
}
if (normalizedDynamicTypeString == "spring") {
return DYNAMIC_TYPE_SPRING;
return DYNAMIC_TYPE_TRACTOR;
}
if (normalizedDynamicTypeString == "tractor") {
return DYNAMIC_TYPE_TRACTOR;
}
if (normalizedDynamicTypeString == "hold") {
return DYNAMIC_TYPE_HOLD;
@ -139,7 +142,8 @@ QString EntityDynamicInterface::dynamicTypeToString(EntityDynamicType dynamicTyp
case DYNAMIC_TYPE_OFFSET:
return "offset";
case DYNAMIC_TYPE_SPRING:
return "spring";
case DYNAMIC_TYPE_TRACTOR:
return "tractor";
case DYNAMIC_TYPE_HOLD:
return "hold";
case DYNAMIC_TYPE_TRAVEL_ORIENTED:

View file

@ -17,6 +17,7 @@
#include <glm/glm.hpp>
class EntityItem;
class EntityItemID;
class EntitySimulation;
using EntityItemPointer = std::shared_ptr<EntityItem>;
using EntityItemWeakPointer = std::weak_ptr<EntityItem>;
@ -28,6 +29,7 @@ enum EntityDynamicType {
DYNAMIC_TYPE_NONE = 0,
DYNAMIC_TYPE_OFFSET = 1000,
DYNAMIC_TYPE_SPRING = 2000,
DYNAMIC_TYPE_TRACTOR = 2100,
DYNAMIC_TYPE_HOLD = 3000,
DYNAMIC_TYPE_TRAVEL_ORIENTED = 4000,
DYNAMIC_TYPE_HINGE = 5000,
@ -44,6 +46,9 @@ public:
virtual ~EntityDynamicInterface() { }
const QUuid& getID() const { return _id; }
EntityDynamicType getType() const { return _type; }
virtual void remapIDs(QHash<EntityItemID, EntityItemID>& map) = 0;
virtual bool isAction() const { return false; }
virtual bool isConstraint() const { return false; }
virtual bool isReadyForAdd() const { return true; }
@ -89,15 +94,6 @@ public:
QString argumentName, bool& ok, bool required = true);
protected:
virtual glm::vec3 getPosition() = 0;
// virtual void setPosition(glm::vec3 position) = 0;
virtual glm::quat getRotation() = 0;
// virtual void setRotation(glm::quat rotation) = 0;
virtual glm::vec3 getLinearVelocity() = 0;
virtual void setLinearVelocity(glm::vec3 linearVelocity) = 0;
virtual glm::vec3 getAngularVelocity() = 0;
virtual void setAngularVelocity(glm::vec3 angularVelocity) = 0;
QUuid _id;
EntityDynamicType _type;
bool _active { false };

View file

@ -1691,14 +1691,20 @@ void EntityItem::updateVelocity(const glm::vec3& value) {
setLocalVelocity(Vectors::ZERO);
}
} else {
const float MIN_LINEAR_SPEED = 0.001f;
if (glm::length(value) < MIN_LINEAR_SPEED) {
velocity = ENTITY_ITEM_ZERO_VEC3;
} else {
velocity = value;
float speed = glm::length(value);
if (!glm::isnan(speed)) {
const float MIN_LINEAR_SPEED = 0.001f;
const float MAX_LINEAR_SPEED = 270.0f; // 3m per step at 90Hz
if (speed < MIN_LINEAR_SPEED) {
velocity = ENTITY_ITEM_ZERO_VEC3;
} else if (speed > MAX_LINEAR_SPEED) {
velocity = (MAX_LINEAR_SPEED / speed) * value;
} else {
velocity = value;
}
setLocalVelocity(velocity);
_dirtyFlags |= Simulation::DIRTY_LINEAR_VELOCITY;
}
setLocalVelocity(velocity);
_dirtyFlags |= Simulation::DIRTY_LINEAR_VELOCITY;
}
}
}
@ -1723,8 +1729,16 @@ void EntityItem::updateGravity(const glm::vec3& value) {
if (getShapeType() == SHAPE_TYPE_STATIC_MESH) {
_gravity = Vectors::ZERO;
} else {
_gravity = value;
_dirtyFlags |= Simulation::DIRTY_LINEAR_VELOCITY;
float magnitude = glm::length(value);
if (!glm::isnan(magnitude)) {
const float MAX_ACCELERATION_OF_GRAVITY = 10.0f * 9.8f; // 10g
if (magnitude > MAX_ACCELERATION_OF_GRAVITY) {
_gravity = (MAX_ACCELERATION_OF_GRAVITY / magnitude) * value;
} else {
_gravity = value;
}
_dirtyFlags |= Simulation::DIRTY_LINEAR_VELOCITY;
}
}
}
}
@ -1735,14 +1749,20 @@ void EntityItem::updateAngularVelocity(const glm::vec3& value) {
if (getShapeType() == SHAPE_TYPE_STATIC_MESH) {
setLocalAngularVelocity(Vectors::ZERO);
} else {
const float MIN_ANGULAR_SPEED = 0.0002f;
if (glm::length(value) < MIN_ANGULAR_SPEED) {
angularVelocity = ENTITY_ITEM_ZERO_VEC3;
} else {
angularVelocity = value;
float speed = glm::length(value);
if (!glm::isnan(speed)) {
const float MIN_ANGULAR_SPEED = 0.0002f;
const float MAX_ANGULAR_SPEED = 9.0f * TWO_PI; // 1/10 rotation per step at 90Hz
if (speed < MIN_ANGULAR_SPEED) {
angularVelocity = ENTITY_ITEM_ZERO_VEC3;
} else if (speed > MAX_ANGULAR_SPEED) {
angularVelocity = (MAX_ANGULAR_SPEED / speed) * value;
} else {
angularVelocity = value;
}
setLocalAngularVelocity(angularVelocity);
_dirtyFlags |= Simulation::DIRTY_ANGULAR_VELOCITY;
}
setLocalAngularVelocity(angularVelocity);
_dirtyFlags |= Simulation::DIRTY_ANGULAR_VELOCITY;
}
}
}

View file

@ -662,6 +662,25 @@ QVector<QUuid> EntityScriptingInterface::findEntitiesInFrustum(QVariantMap frust
return result;
}
QVector<QUuid> EntityScriptingInterface::findEntitiesByType(const QString entityType, const glm::vec3& center, float radius) const {
EntityTypes::EntityType type = EntityTypes::getEntityTypeFromName(entityType);
QVector<QUuid> result;
if (_entityTree) {
QVector<EntityItemPointer> entities;
_entityTree->withReadLock([&] {
_entityTree->findEntities(center, radius, entities);
});
foreach(EntityItemPointer entity, entities) {
if (entity->getType() == type) {
result << entity->getEntityItemID();
}
}
}
return result;
}
RayToEntityIntersectionResult EntityScriptingInterface::findRayIntersection(const PickRay& ray, bool precisionPicking,
const QScriptValue& entityIdsToInclude, const QScriptValue& entityIdsToDiscard, bool visibleOnly, bool collidableOnly) {
PROFILE_RANGE(script_entities, __FUNCTION__);

View file

@ -212,9 +212,16 @@ public slots:
/// - orientation
/// - projection
/// - centerRadius
/// this function will not find any models in script engine contexts which don't have access to models
/// this function will not find any models in script engine contexts which don't have access to entities
Q_INVOKABLE QVector<QUuid> findEntitiesInFrustum(QVariantMap frustum) const;
/// finds entities of the indicated type within a sphere given by the center point and radius
/// @param {QString} string representation of entity type
/// @param {vec3} center point
/// @param {float} radius to search
/// this function will not find any entities in script engine contexts which don't have access to entities
Q_INVOKABLE QVector<QUuid> findEntitiesByType(const QString entityType, const glm::vec3& center, float radius) const;
/// If the scripting context has visible entities, this will determine a ray intersection, the results
/// may be inaccurate if the engine is unable to access the visible entities, in which case result.accurate
/// will be false.

View file

@ -25,6 +25,8 @@
#include "RecurseOctreeToMapOperator.h"
#include "LogHandler.h"
#include "EntityEditFilters.h"
#include "EntityDynamicFactoryInterface.h"
static const quint64 DELETED_ENTITIES_EXTRA_USECS_TO_CONSIDER = USECS_PER_MSEC * 50;
const float EntityTree::DEFAULT_MAX_TMP_ENTITY_LIFETIME = 60 * 60; // 1 hour
@ -450,6 +452,13 @@ void EntityTree::deleteEntity(const EntityItemID& entityID, bool force, bool ign
// NOTE: callers must lock the tree before using this method
DeleteEntityOperator theOperator(getThisPointer(), entityID);
existingEntity->forEachDescendant([&](SpatiallyNestablePointer descendant) {
auto descendantID = descendant->getID();
theOperator.addEntityIDToDeleteList(descendantID);
emit deletingEntity(descendantID);
});
recurseTreeWithOperator(&theOperator);
processRemovedEntities(theOperator);
_isDirty = true;
@ -1527,6 +1536,48 @@ void EntityTree::pruneTree() {
recurseTreeWithOperator(&theOperator);
}
QByteArray EntityTree::remapActionDataIDs(QByteArray actionData, QHash<EntityItemID, EntityItemID>& map) {
if (actionData.isEmpty()) {
return actionData;
}
QDataStream serializedActionsStream(actionData);
QVector<QByteArray> serializedActions;
serializedActionsStream >> serializedActions;
auto actionFactory = DependencyManager::get<EntityDynamicFactoryInterface>();
QHash<QUuid, EntityDynamicPointer> remappedActions;
foreach(QByteArray serializedAction, serializedActions) {
QDataStream serializedActionStream(serializedAction);
EntityDynamicType actionType;
QUuid oldActionID;
serializedActionStream >> actionType;
serializedActionStream >> oldActionID;
EntityDynamicPointer action = actionFactory->factoryBA(nullptr, serializedAction);
if (action) {
action->remapIDs(map);
remappedActions[action->getID()] = action;
}
}
QVector<QByteArray> remappedSerializedActions;
QHash<QUuid, EntityDynamicPointer>::const_iterator i = remappedActions.begin();
while (i != remappedActions.end()) {
EntityDynamicPointer action = i.value();
QByteArray bytesForAction = action->serialize();
remappedSerializedActions << bytesForAction;
i++;
}
QByteArray result;
QDataStream remappedSerializedActionsStream(&result, QIODevice::WriteOnly);
remappedSerializedActionsStream << remappedSerializedActions;
return result;
}
QVector<EntityItemID> EntityTree::sendEntities(EntityEditPacketSender* packetSender, EntityTreePointer localTree,
float x, float y, float z) {
SendEntitiesOperationArgs args;
@ -1543,71 +1594,67 @@ QVector<EntityItemID> EntityTree::sendEntities(EntityEditPacketSender* packetSen
});
packetSender->releaseQueuedMessages();
// the values from map are used as the list of successfully "sent" entities. If some didn't actually make it,
// pull them out. Bogus entries could happen if part of the imported data makes some reference to an entity
// that isn't in the data being imported.
QHash<EntityItemID, EntityItemID>::iterator i = map.begin();
while (i != map.end()) {
EntityItemID newID = i.value();
if (localTree->findEntityByEntityItemID(newID)) {
i++;
} else {
i = map.erase(i);
}
}
return map.values().toVector();
}
bool EntityTree::sendEntitiesOperation(OctreeElementPointer element, void* extraData) {
SendEntitiesOperationArgs* args = static_cast<SendEntitiesOperationArgs*>(extraData);
EntityTreeElementPointer entityTreeElement = std::static_pointer_cast<EntityTreeElement>(element);
std::function<const EntityItemID(EntityItemPointer&)> getMapped = [&](EntityItemPointer& item) -> const EntityItemID {
EntityItemID oldID = item->getEntityItemID();
if (args->map->contains(oldID)) { // Already been handled (e.g., as a parent of somebody that we've processed).
return args->map->value(oldID);
}
EntityItemID newID = QUuid::createUuid();
args->map->insert(oldID, newID);
auto getMapped = [&args](EntityItemID oldID) {
if (oldID.isNull()) {
return EntityItemID();
}
QHash<EntityItemID, EntityItemID>::iterator iter = args->map->find(oldID);
if (iter == args->map->end()) {
EntityItemID newID = QUuid::createUuid();
args->map->insert(oldID, newID);
return newID;
}
return iter.value();
};
entityTreeElement->forEachEntity([&args, &getMapped, &element](EntityItemPointer item) {
EntityItemID oldID = item->getEntityItemID();
EntityItemID newID = getMapped(oldID);
EntityItemProperties properties = item->getProperties();
EntityItemID oldParentID = properties.getParentID();
if (oldParentID.isInvalidID()) { // no parent
properties.setPosition(properties.getPosition() + args->root);
} else {
EntityItemPointer parentEntity = args->ourTree->findEntityByEntityItemID(oldParentID);
if (parentEntity) { // map the parent
// Warning: (non-tail) recursion of getMapped could blow the call stack if the parent hierarchy is VERY deep.
properties.setParentID(getMapped(parentEntity));
properties.setParentID(getMapped(parentEntity->getID()));
// But do not add root offset in this case.
} else { // Should not happen, but let's try to be helpful...
item->globalizeProperties(properties, "Cannot find %3 parent of %2 %1", args->root);
}
}
if (!properties.getXNNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getXNNeighborID());
if (neighborEntity) {
properties.setXNNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getXPNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getXPNeighborID());
if (neighborEntity) {
properties.setXPNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getYNNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getYNNeighborID());
if (neighborEntity) {
properties.setYNNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getYPNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getYPNeighborID());
if (neighborEntity) {
properties.setYPNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getZNNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getZNNeighborID());
if (neighborEntity) {
properties.setZNNeighborID(getMapped(neighborEntity));
}
}
if (!properties.getZPNeighborID().isInvalidID()) {
auto neighborEntity = args->ourTree->findEntityByEntityItemID(properties.getZPNeighborID());
if (neighborEntity) {
properties.setZPNeighborID(getMapped(neighborEntity));
}
}
properties.setXNNeighborID(getMapped(properties.getXNNeighborID()));
properties.setXPNeighborID(getMapped(properties.getXPNeighborID()));
properties.setYNNeighborID(getMapped(properties.getYNNeighborID()));
properties.setYPNeighborID(getMapped(properties.getYPNeighborID()));
properties.setZNNeighborID(getMapped(properties.getZNNeighborID()));
properties.setZPNeighborID(getMapped(properties.getZPNeighborID()));
QByteArray actionData = properties.getActionData();
properties.setActionData(remapActionDataIDs(actionData, *args->map));
// set creation time to "now" for imported entities
properties.setCreated(usecTimestampNow());
@ -1623,13 +1670,13 @@ bool EntityTree::sendEntitiesOperation(OctreeElementPointer element, void* extra
// also update the local tree instantly (note: this is not our tree, but an alternate tree)
if (args->otherTree) {
args->otherTree->withWriteLock([&] {
args->otherTree->addEntity(newID, properties);
EntityItemPointer entity = args->otherTree->addEntity(newID, properties);
entity->deserializeActions();
});
}
return newID;
};
});
entityTreeElement->forEachEntity(getMapped);
return true;
}

View file

@ -205,6 +205,8 @@ public:
virtual void dumpTree() override;
virtual void pruneTree() override;
static QByteArray remapActionDataIDs(QByteArray actionData, QHash<EntityItemID, EntityItemID>& map);
QVector<EntityItemID> sendEntities(EntityEditPacketSender* packetSender, EntityTreePointer localTree,
float x, float y, float z);

View file

@ -273,10 +273,9 @@ std::tuple<bool, QByteArray> requestData(QUrl& url) {
return std::make_tuple(false, QByteArray());
}
request->send();
QEventLoop loop;
QObject::connect(request, &ResourceRequest::finished, &loop, &QEventLoop::quit);
request->send();
loop.exec();
if (request->getResult() == ResourceRequest::Success) {

View file

@ -1,5 +1,5 @@
set(TARGET_NAME gpu-gl)
setup_hifi_library()
setup_hifi_library(Concurrent)
link_hifi_libraries(shared gl gpu)
if (UNIX)
target_link_libraries(${TARGET_NAME} pthread)

View file

@ -149,6 +149,10 @@ void GLBackend::resetUniformStage() {
void GLBackend::do_setUniformBuffer(const Batch& batch, size_t paramOffset) {
GLuint slot = batch._params[paramOffset + 3]._uint;
if (slot >(GLuint)MAX_NUM_UNIFORM_BUFFERS) {
qCDebug(gpugllogging) << "GLBackend::do_setUniformBuffer: Trying to set a uniform Buffer at slot #" << slot << " which doesn't exist. MaxNumUniformBuffers = " << getMaxNumUniformBuffers();
return;
}
BufferPointer uniformBuffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
GLintptr rangeStart = batch._params[paramOffset + 1]._uint;
GLsizeiptr rangeSize = batch._params[paramOffset + 0]._uint;
@ -203,7 +207,7 @@ void GLBackend::resetResourceStage() {
void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
GLuint slot = batch._params[paramOffset + 1]._uint;
if (slot >= (GLuint)MAX_NUM_RESOURCE_BUFFERS) {
// "GLBackend::do_setResourceBuffer: Trying to set a resource Buffer at slot #" + slot + " which doesn't exist. MaxNumResourceBuffers = " + getMaxNumResourceBuffers());
qCDebug(gpugllogging) << "GLBackend::do_setResourceBuffer: Trying to set a resource Buffer at slot #" << slot << " which doesn't exist. MaxNumResourceBuffers = " << getMaxNumResourceBuffers();
return;
}
@ -233,7 +237,7 @@ void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
void GLBackend::do_setResourceTexture(const Batch& batch, size_t paramOffset) {
GLuint slot = batch._params[paramOffset + 1]._uint;
if (slot >= (GLuint) MAX_NUM_RESOURCE_TEXTURES) {
// "GLBackend::do_setResourceTexture: Trying to set a resource Texture at slot #" + slot + " which doesn't exist. MaxNumResourceTextures = " + getMaxNumResourceTextures());
qCDebug(gpugllogging) << "GLBackend::do_setResourceTexture: Trying to set a resource Texture at slot #" << slot << " which doesn't exist. MaxNumResourceTextures = " << getMaxNumResourceTextures();
return;
}

View file

@ -160,8 +160,6 @@ const uvec3 GLVariableAllocationSupport::INITIAL_MIP_TRANSFER_DIMENSIONS { 64, 6
WorkQueue GLVariableAllocationSupport::_transferQueue;
WorkQueue GLVariableAllocationSupport::_promoteQueue;
WorkQueue GLVariableAllocationSupport::_demoteQueue;
TexturePointer GLVariableAllocationSupport::_currentTransferTexture;
TransferJobPointer GLVariableAllocationSupport::_currentTransferJob;
size_t GLVariableAllocationSupport::_frameTexturesCreated { 0 };
#define OVERSUBSCRIBED_PRESSURE_VALUE 0.95f
@ -176,30 +174,19 @@ const uvec3 GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS { 1024, 1024, 1
const size_t GLVariableAllocationSupport::MAX_TRANSFER_SIZE = GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.x * GLVariableAllocationSupport::MAX_TRANSFER_DIMENSIONS.y * 4;
#if THREADED_TEXTURE_BUFFERING
std::shared_ptr<std::thread> TransferJob::_bufferThread { nullptr };
std::atomic<bool> TransferJob::_shutdownBufferingThread { false };
Mutex TransferJob::_mutex;
TransferJob::VoidLambdaQueue TransferJob::_bufferLambdaQueue;
void TransferJob::startTransferLoop() {
if (_bufferThread) {
return;
}
_shutdownBufferingThread = false;
_bufferThread = std::make_shared<std::thread>([] {
TransferJob::bufferLoop();
TexturePointer GLVariableAllocationSupport::_currentTransferTexture;
TransferJobPointer GLVariableAllocationSupport::_currentTransferJob;
QThreadPool* TransferJob::_bufferThreadPool { nullptr };
void TransferJob::startBufferingThread() {
static std::once_flag once;
std::call_once(once, [&] {
_bufferThreadPool = new QThreadPool(qApp);
_bufferThreadPool->setMaxThreadCount(1);
});
}
void TransferJob::stopTransferLoop() {
if (!_bufferThread) {
return;
}
_shutdownBufferingThread = true;
_bufferThread->join();
_bufferThread.reset();
_shutdownBufferingThread = false;
}
#endif
TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t targetMip, uint8_t face, uint32_t lines, uint32_t lineOffset)
@ -233,7 +220,6 @@ TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t t
// Buffering can invoke disk IO, so it should be off of the main and render threads
_bufferingLambda = [=] {
_mipData = _parent._gpuObject.accessStoredMipFace(sourceMip, face)->createView(_transferSize, _transferOffset);
_bufferingCompleted = true;
};
_transferLambda = [=] {
@ -243,65 +229,66 @@ TransferJob::TransferJob(const GLTexture& parent, uint16_t sourceMip, uint16_t t
}
TransferJob::TransferJob(const GLTexture& parent, std::function<void()> transferLambda)
: _parent(parent), _bufferingCompleted(true), _transferLambda(transferLambda) {
: _parent(parent), _bufferingRequired(false), _transferLambda(transferLambda) {
}
TransferJob::~TransferJob() {
Backend::updateTextureTransferPendingSize(_transferSize, 0);
}
bool TransferJob::tryTransfer() {
// Disable threaded texture transfer for now
#if THREADED_TEXTURE_BUFFERING
// Are we ready to transfer
if (_bufferingCompleted) {
_transferLambda();
if (!bufferingCompleted()) {
startBuffering();
return false;
}
#else
if (_bufferingRequired) {
_bufferingLambda();
}
#endif
_transferLambda();
return true;
}
#if THREADED_TEXTURE_BUFFERING
bool TransferJob::bufferingRequired() const {
if (!_bufferingRequired) {
return false;
}
// The default state of a QFuture is with status Canceled | Started | Finished,
// so we have to check isCancelled before we check the actual state
if (_bufferingStatus.isCanceled()) {
return true;
}
startBuffering();
return false;
#else
if (!_bufferingCompleted) {
_bufferingLambda();
_bufferingCompleted = true;
}
_transferLambda();
return true;
#endif
return !_bufferingStatus.isStarted();
}
#if THREADED_TEXTURE_BUFFERING
bool TransferJob::bufferingCompleted() const {
if (!_bufferingRequired) {
return true;
}
// The default state of a QFuture is with status Canceled | Started | Finished,
// so we have to check isCancelled before we check the actual state
if (_bufferingStatus.isCanceled()) {
return false;
}
return _bufferingStatus.isFinished();
}
void TransferJob::startBuffering() {
if (_bufferingStarted) {
return;
}
_bufferingStarted = true;
{
Lock lock(_mutex);
_bufferLambdaQueue.push(_bufferingLambda);
}
}
void TransferJob::bufferLoop() {
while (!_shutdownBufferingThread) {
VoidLambdaQueue workingQueue;
{
Lock lock(_mutex);
_bufferLambdaQueue.swap(workingQueue);
}
if (workingQueue.empty()) {
QThread::msleep(5);
continue;
}
while (!workingQueue.empty()) {
workingQueue.front()();
workingQueue.pop();
}
if (bufferingRequired()) {
assert(_bufferingStatus.isCanceled());
_bufferingStatus = QtConcurrent::run(_bufferThreadPool, [=] {
_bufferingLambda();
});
assert(!_bufferingStatus.isCanceled());
assert(_bufferingStatus.isStarted());
}
}
#endif
@ -316,7 +303,9 @@ GLVariableAllocationSupport::~GLVariableAllocationSupport() {
void GLVariableAllocationSupport::addMemoryManagedTexture(const TexturePointer& texturePointer) {
_memoryManagedTextures.push_back(texturePointer);
addToWorkQueue(texturePointer);
if (MemoryPressureState::Idle != _memoryPressureState) {
addToWorkQueue(texturePointer);
}
}
void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePointer) {
@ -345,10 +334,8 @@ void GLVariableAllocationSupport::addToWorkQueue(const TexturePointer& texturePo
break;
case MemoryPressureState::Idle:
break;
default:
Q_UNREACHABLE();
break;
}
}
@ -364,10 +351,10 @@ WorkQueue& GLVariableAllocationSupport::getActiveWorkQueue() {
case MemoryPressureState::Transfer:
return _transferQueue;
default:
case MemoryPressureState::Idle:
Q_UNREACHABLE();
break;
}
Q_UNREACHABLE();
return empty;
}
@ -460,16 +447,11 @@ void GLVariableAllocationSupport::updateMemoryPressure() {
}
if (newState != _memoryPressureState) {
_memoryPressureState = newState;
#if THREADED_TEXTURE_BUFFERING
if (MemoryPressureState::Transfer == _memoryPressureState) {
TransferJob::stopTransferLoop();
TransferJob::startBufferingThread();
}
_memoryPressureState = newState;
if (MemoryPressureState::Transfer == _memoryPressureState) {
TransferJob::startTransferLoop();
}
#else
_memoryPressureState = newState;
#endif
// Clear the existing queue
_transferQueue = WorkQueue();
@ -487,49 +469,111 @@ void GLVariableAllocationSupport::updateMemoryPressure() {
}
}
TexturePointer GLVariableAllocationSupport::getNextWorkQueueItem(WorkQueue& workQueue) {
while (!workQueue.empty()) {
auto workTarget = workQueue.top();
auto texture = workTarget.first.lock();
if (!texture) {
workQueue.pop();
continue;
}
// Check whether the resulting texture can actually have work performed
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
switch (_memoryPressureState) {
case MemoryPressureState::Oversubscribed:
if (vartexture->canDemote()) {
return texture;
}
break;
case MemoryPressureState::Undersubscribed:
if (vartexture->canPromote()) {
return texture;
}
break;
case MemoryPressureState::Transfer:
if (vartexture->hasPendingTransfers()) {
return texture;
}
break;
case MemoryPressureState::Idle:
Q_UNREACHABLE();
break;
}
// If we got here, then the texture has no work to do in the current state,
// so pop it off the queue and continue
workQueue.pop();
}
return TexturePointer();
}
void GLVariableAllocationSupport::processWorkQueue(WorkQueue& workQueue) {
if (workQueue.empty()) {
return;
}
// Get the front of the work queue to perform work
auto texture = getNextWorkQueueItem(workQueue);
if (!texture) {
return;
}
// Grab the first item off the demote queue
PROFILE_RANGE(render_gpu_gl, __FUNCTION__);
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
switch (_memoryPressureState) {
case MemoryPressureState::Oversubscribed:
vartexture->demote();
workQueue.pop();
addToWorkQueue(texture);
break;
case MemoryPressureState::Undersubscribed:
vartexture->promote();
workQueue.pop();
addToWorkQueue(texture);
break;
case MemoryPressureState::Transfer:
if (vartexture->executeNextTransfer(texture)) {
workQueue.pop();
addToWorkQueue(texture);
#if THREADED_TEXTURE_BUFFERING
// Eagerly start the next buffering job if possible
texture = getNextWorkQueueItem(workQueue);
if (texture) {
gltexture = Backend::getGPUObject<GLTexture>(*texture);
vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
vartexture->executeNextBuffer(texture);
}
#endif
}
break;
case MemoryPressureState::Idle:
Q_UNREACHABLE();
break;
}
}
void GLVariableAllocationSupport::processWorkQueues() {
if (MemoryPressureState::Idle == _memoryPressureState) {
return;
}
auto& workQueue = getActiveWorkQueue();
PROFILE_RANGE(render_gpu_gl, __FUNCTION__);
while (!workQueue.empty()) {
auto workTarget = workQueue.top();
workQueue.pop();
auto texture = workTarget.first.lock();
if (!texture) {
continue;
}
// Grab the first item off the demote queue
GLTexture* gltexture = Backend::getGPUObject<GLTexture>(*texture);
GLVariableAllocationSupport* vartexture = dynamic_cast<GLVariableAllocationSupport*>(gltexture);
if (MemoryPressureState::Oversubscribed == _memoryPressureState) {
if (!vartexture->canDemote()) {
continue;
}
vartexture->demote();
_memoryPressureStateStale = true;
} else if (MemoryPressureState::Undersubscribed == _memoryPressureState) {
if (!vartexture->canPromote()) {
continue;
}
vartexture->promote();
_memoryPressureStateStale = true;
} else if (MemoryPressureState::Transfer == _memoryPressureState) {
if (!vartexture->hasPendingTransfers()) {
continue;
}
vartexture->executeNextTransfer(texture);
} else {
Q_UNREACHABLE();
}
// Reinject into the queue if more work to be done
addToWorkQueue(texture);
break;
}
// Do work on the front of the queue
processWorkQueue(workQueue);
if (workQueue.empty()) {
_memoryPressureState = MemoryPressureState::Idle;
@ -543,28 +587,83 @@ void GLVariableAllocationSupport::manageMemory() {
processWorkQueues();
}
bool GLVariableAllocationSupport::executeNextTransfer(const TexturePointer& currentTexture) {
#if THREADED_TEXTURE_BUFFERING
// If a transfer job is active on the buffering thread, but has not completed it's buffering lambda,
// then we need to exit early, since we don't want to have the transfer job leave scope while it's
// being used in another thread -- See https://highfidelity.fogbugz.com/f/cases/4626
if (_currentTransferJob && !_currentTransferJob->bufferingCompleted()) {
return false;
}
#endif
void GLVariableAllocationSupport::executeNextTransfer(const TexturePointer& currentTexture) {
if (_populatedMip <= _allocatedMip) {
#if THREADED_TEXTURE_BUFFERING
_currentTransferJob.reset();
_currentTransferTexture.reset();
#endif
return true;
}
// If the transfer queue is empty, rebuild it
if (_pendingTransfers.empty()) {
populateTransferQueue();
}
bool result = false;
if (!_pendingTransfers.empty()) {
#if THREADED_TEXTURE_BUFFERING
// If there is a current transfer, but it's not the top of the pending transfer queue, then it's an orphan, so we want to abandon it.
if (_currentTransferJob && _currentTransferJob != _pendingTransfers.front()) {
_currentTransferJob.reset();
}
if (!_currentTransferJob) {
// Keeping hold of a strong pointer to the transfer job ensures that if the pending transfer queue is rebuilt, the transfer job
// doesn't leave scope, causing a crash in the buffering thread
_currentTransferJob = _pendingTransfers.front();
// Keeping hold of a strong pointer during the transfer ensures that the transfer thread cannot try to access a destroyed texture
_currentTransferTexture = currentTexture;
}
// transfer jobs use asynchronous buffering of the texture data because it may involve disk IO, so we execute a try here to determine if the buffering
// is complete
if (_currentTransferJob->tryTransfer()) {
_pendingTransfers.pop();
// Once a given job is finished, release the shared pointers keeping them alive
_currentTransferTexture.reset();
_currentTransferJob.reset();
result = true;
}
#else
if (_pendingTransfers.front()->tryTransfer()) {
_pendingTransfers.pop();
result = true;
}
#endif
}
return result;
}
#if THREADED_TEXTURE_BUFFERING
void GLVariableAllocationSupport::executeNextBuffer(const TexturePointer& currentTexture) {
if (_currentTransferJob && !_currentTransferJob->bufferingCompleted()) {
return;
}
// If the transfer queue is empty, rebuild it
if (_pendingTransfers.empty()) {
populateTransferQueue();
}
if (!_pendingTransfers.empty()) {
// Keeping hold of a strong pointer during the transfer ensures that the transfer thread cannot try to access a destroyed texture
_currentTransferTexture = currentTexture;
// Keeping hold of a strong pointer to the transfer job ensures that if the pending transfer queue is rebuilt, the transfer job
// doesn't leave scope, causing a crash in the buffering thread
_currentTransferJob = _pendingTransfers.front();
// transfer jobs use asynchronous buffering of the texture data because it may involve disk IO, so we execute a try here to determine if the buffering
// is complete
if (_currentTransferJob->tryTransfer()) {
_pendingTransfers.pop();
_currentTransferTexture.reset();
_currentTransferJob.reset();
if (!_currentTransferJob) {
_currentTransferJob = _pendingTransfers.front();
_currentTransferTexture = currentTexture;
}
_currentTransferJob->startBuffering();
}
}
#endif

View file

@ -8,6 +8,9 @@
#ifndef hifi_gpu_gl_GLTexture_h
#define hifi_gpu_gl_GLTexture_h
#include <QtCore/QThreadPool>
#include <QtConcurrent>
#include "GLShared.h"
#include "GLBackend.h"
#include "GLTexelFormat.h"
@ -47,24 +50,19 @@ public:
class TransferJob {
using VoidLambda = std::function<void()>;
using VoidLambdaQueue = std::queue<VoidLambda>;
using ThreadPointer = std::shared_ptr<std::thread>;
const GLTexture& _parent;
Texture::PixelsPointer _mipData;
size_t _transferOffset { 0 };
size_t _transferSize { 0 };
// Indicates if a transfer from backing storage to interal storage has started
bool _bufferingStarted { false };
bool _bufferingCompleted { false };
bool _bufferingRequired { true };
VoidLambda _transferLambda;
VoidLambda _bufferingLambda;
#if THREADED_TEXTURE_BUFFERING
static Mutex _mutex;
static VoidLambdaQueue _bufferLambdaQueue;
static ThreadPointer _bufferThread;
static std::atomic<bool> _shutdownBufferingThread;
static void bufferLoop();
// Indicates if a transfer from backing storage to interal storage has started
QFuture<void> _bufferingStatus;
static QThreadPool* _bufferThreadPool;
#endif
public:
@ -75,14 +73,13 @@ public:
bool tryTransfer();
#if THREADED_TEXTURE_BUFFERING
static void startTransferLoop();
static void stopTransferLoop();
void startBuffering();
bool bufferingRequired() const;
bool bufferingCompleted() const;
static void startBufferingThread();
#endif
private:
#if THREADED_TEXTURE_BUFFERING
void startBuffering();
#endif
void transfer();
};
@ -100,8 +97,10 @@ protected:
static WorkQueue _transferQueue;
static WorkQueue _promoteQueue;
static WorkQueue _demoteQueue;
#if THREADED_TEXTURE_BUFFERING
static TexturePointer _currentTransferTexture;
static TransferJobPointer _currentTransferJob;
#endif
static const uvec3 INITIAL_MIP_TRANSFER_DIMENSIONS;
static const uvec3 MAX_TRANSFER_DIMENSIONS;
static const size_t MAX_TRANSFER_SIZE;
@ -109,6 +108,8 @@ protected:
static void updateMemoryPressure();
static void processWorkQueues();
static void processWorkQueue(WorkQueue& workQueue);
static TexturePointer getNextWorkQueueItem(WorkQueue& workQueue);
static void addToWorkQueue(const TexturePointer& texture);
static WorkQueue& getActiveWorkQueue();
@ -118,7 +119,10 @@ protected:
bool canPromote() const { return _allocatedMip > _minAllocatedMip; }
bool canDemote() const { return _allocatedMip < _maxAllocatedMip; }
bool hasPendingTransfers() const { return _populatedMip > _allocatedMip; }
void executeNextTransfer(const TexturePointer& currentTexture);
#if THREADED_TEXTURE_BUFFERING
void executeNextBuffer(const TexturePointer& currentTexture);
#endif
bool executeNextTransfer(const TexturePointer& currentTexture);
virtual void populateTransferQueue() = 0;
virtual void promote() = 0;
virtual void demote() = 0;

View file

@ -17,7 +17,6 @@
#include <thread>
#define INCREMENTAL_TRANSFER 0
#define THREADED_TEXTURE_BUFFERING 1
#define GPU_SSBO_TRANSFORM_OBJECT 1
namespace gpu { namespace gl45 {

Some files were not shown because too many files have changed in this diff Show more