mirror of
https://github.com/overte-org/overte.git
synced 2025-08-08 19:16:56 +02:00
Merge remote-tracking branch 'highfidelity/master'
This commit is contained in:
commit
a49c4756dd
13 changed files with 298 additions and 84 deletions
93
examples/example/misc/collectHifiStats.js
Normal file
93
examples/example/misc/collectHifiStats.js
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
//
|
||||||
|
// collectHifiStats.js
|
||||||
|
//
|
||||||
|
// Created by Thijs Wenker on 24 Sept 2015
|
||||||
|
// Additions by James B. Pollack @imgntn on 25 Sept 2015
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Collects stats for analysis to a REST endpoint. Defaults to batching stats, but can be customized.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
// The url where the data will be posted.
|
||||||
|
var ENDPOINT_URL = "";
|
||||||
|
|
||||||
|
var BATCH_STATS = true;
|
||||||
|
var BATCH_SIZE = 5;
|
||||||
|
|
||||||
|
var batch = [];
|
||||||
|
|
||||||
|
if (BATCH_STATS) {
|
||||||
|
|
||||||
|
var RECORD_EVERY = 1000; // 1 seconds
|
||||||
|
var batchCount = 0;
|
||||||
|
|
||||||
|
Script.setInterval(function() {
|
||||||
|
|
||||||
|
if (batchCount === BATCH_SIZE) {
|
||||||
|
sendBatchToEndpoint(batch);
|
||||||
|
batchCount = 0;
|
||||||
|
}
|
||||||
|
Stats.forceUpdateStats();
|
||||||
|
batch.push(getBatchStats());
|
||||||
|
batchCount++;
|
||||||
|
}, RECORD_EVERY);
|
||||||
|
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// Send the data every:
|
||||||
|
var SEND_EVERY = 30000; // 30 seconds
|
||||||
|
|
||||||
|
Script.setInterval(function() {
|
||||||
|
Stats.forceUpdateStats();
|
||||||
|
var req = new XMLHttpRequest();
|
||||||
|
req.open("POST", ENDPOINT_URL, false);
|
||||||
|
req.send(getStats());
|
||||||
|
}, SEND_EVERY);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getStats() {
|
||||||
|
return JSON.stringify({
|
||||||
|
username: GlobalServices.username,
|
||||||
|
location: Window.location.hostname,
|
||||||
|
framerate: Stats.framerate,
|
||||||
|
simrate: Stats.simrate,
|
||||||
|
ping: {
|
||||||
|
audio: Stats.audioPing,
|
||||||
|
avatar: Stats.avatarPing,
|
||||||
|
entities: Stats.entitiesPing,
|
||||||
|
asset: Stats.assetPing
|
||||||
|
},
|
||||||
|
position: Camera.position,
|
||||||
|
yaw: Stats.yaw,
|
||||||
|
rotation: Camera.orientation.x + ',' + Camera.orientation.y + ',' + Camera.orientation.z + ',' + Camera.orientation.w
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function getBatchStats() {
|
||||||
|
// print('GET BATCH STATS');
|
||||||
|
return {
|
||||||
|
username: GlobalServices.username,
|
||||||
|
location: Window.location.hostname,
|
||||||
|
framerate: Stats.framerate,
|
||||||
|
simrate: Stats.simrate,
|
||||||
|
ping: {
|
||||||
|
audio: Stats.audioPing,
|
||||||
|
avatar: Stats.avatarPing,
|
||||||
|
entities: Stats.entitiesPing,
|
||||||
|
asset: Stats.assetPing
|
||||||
|
},
|
||||||
|
position: Camera.position,
|
||||||
|
yaw: Stats.yaw,
|
||||||
|
rotation: Camera.orientation.x + ',' + Camera.orientation.y + ',' + Camera.orientation.z + ',' + Camera.orientation.w
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function sendBatchToEndpoint(batch) {
|
||||||
|
// print('SEND BATCH TO ENDPOINT');
|
||||||
|
var req = new XMLHttpRequest();
|
||||||
|
req.open("POST", ENDPOINT_URL, false);
|
||||||
|
req.send(JSON.stringify(batch));
|
||||||
|
}
|
67
examples/example/misc/statsExample.js
Normal file
67
examples/example/misc/statsExample.js
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
//
|
||||||
|
// statsExample.js
|
||||||
|
// examples/example/misc
|
||||||
|
//
|
||||||
|
// Created by Thijs Wenker on 24 Sept 2015
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Prints the stats to the console.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
// The stats to be displayed
|
||||||
|
var stats = [
|
||||||
|
'serverCount',
|
||||||
|
'framerate', // a.k.a. FPS
|
||||||
|
'simrate',
|
||||||
|
'avatarSimrate',
|
||||||
|
'avatarCount',
|
||||||
|
'packetInCount',
|
||||||
|
'packetOutCount',
|
||||||
|
'mbpsIn',
|
||||||
|
'mbpsOut',
|
||||||
|
'audioPing',
|
||||||
|
'avatarPing',
|
||||||
|
'entitiesPing',
|
||||||
|
'assetPing',
|
||||||
|
'velocity',
|
||||||
|
'yaw',
|
||||||
|
'avatarMixerKbps',
|
||||||
|
'avatarMixerPps',
|
||||||
|
'audioMixerKbps',
|
||||||
|
'audioMixerPps',
|
||||||
|
'downloads',
|
||||||
|
'downloadsPending',
|
||||||
|
'triangles',
|
||||||
|
'quads',
|
||||||
|
'materialSwitches',
|
||||||
|
'meshOpaque',
|
||||||
|
'meshTranslucent',
|
||||||
|
'opaqueConsidered',
|
||||||
|
'opaqueOutOfView',
|
||||||
|
'opaqueTooSmall',
|
||||||
|
'translucentConsidered',
|
||||||
|
'translucentOutOfView',
|
||||||
|
'translucentTooSmall',
|
||||||
|
'sendingMode',
|
||||||
|
'packetStats',
|
||||||
|
'lodStatus',
|
||||||
|
'timingStats',
|
||||||
|
'serverElements',
|
||||||
|
'serverInternal',
|
||||||
|
'serverLeaves',
|
||||||
|
'localElements',
|
||||||
|
'localInternal',
|
||||||
|
'localLeaves'
|
||||||
|
];
|
||||||
|
|
||||||
|
// Force update the stats, in case the stats panel is invisible
|
||||||
|
Stats.forceUpdateStats();
|
||||||
|
|
||||||
|
// Loop through the stats and display them
|
||||||
|
for (var i in stats) {
|
||||||
|
var stat = stats[i];
|
||||||
|
print(stat + " = " + Stats[stat]);
|
||||||
|
}
|
|
@ -560,7 +560,7 @@
|
||||||
"id": "strafeLeft",
|
"id": "strafeLeft",
|
||||||
"type": "clip",
|
"type": "clip",
|
||||||
"data": {
|
"data": {
|
||||||
"url": "https://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/strafe_left.fbx",
|
"url": "http://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/side_step_left.fbx",
|
||||||
"startFrame": 0.0,
|
"startFrame": 0.0,
|
||||||
"endFrame": 31.0,
|
"endFrame": 31.0,
|
||||||
"timeScale": 1.0,
|
"timeScale": 1.0,
|
||||||
|
@ -572,7 +572,7 @@
|
||||||
"id": "strafeRight",
|
"id": "strafeRight",
|
||||||
"type": "clip",
|
"type": "clip",
|
||||||
"data": {
|
"data": {
|
||||||
"url": "https://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/strafe_right.fbx",
|
"url": "http://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/side_step_right.fbx",
|
||||||
"startFrame": 0.0,
|
"startFrame": 0.0,
|
||||||
"endFrame": 31.0,
|
"endFrame": 31.0,
|
||||||
"timeScale": 1.0,
|
"timeScale": 1.0,
|
||||||
|
|
|
@ -708,7 +708,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
||||||
|
|
||||||
// Now that menu is initalized we can sync myAvatar with it's state.
|
// Now that menu is initalized we can sync myAvatar with it's state.
|
||||||
_myAvatar->updateMotionBehaviorFromMenu();
|
_myAvatar->updateMotionBehaviorFromMenu();
|
||||||
_myAvatar->updateStandingHMDModeFromMenu();
|
|
||||||
|
|
||||||
// the 3Dconnexion device wants to be initiliazed after a window is displayed.
|
// the 3Dconnexion device wants to be initiliazed after a window is displayed.
|
||||||
ConnexionClient::getInstance().init();
|
ConnexionClient::getInstance().init();
|
||||||
|
@ -1055,8 +1054,6 @@ void Application::paintGL() {
|
||||||
auto displayPlugin = getActiveDisplayPlugin();
|
auto displayPlugin = getActiveDisplayPlugin();
|
||||||
displayPlugin->preRender();
|
displayPlugin->preRender();
|
||||||
_offscreenContext->makeCurrent();
|
_offscreenContext->makeCurrent();
|
||||||
// update the avatar with a fresh HMD pose
|
|
||||||
_myAvatar->updateFromHMDSensorMatrix(getHMDSensorPose());
|
|
||||||
|
|
||||||
auto lodManager = DependencyManager::get<LODManager>();
|
auto lodManager = DependencyManager::get<LODManager>();
|
||||||
|
|
||||||
|
@ -2898,6 +2895,9 @@ void Application::update(float deltaTime) {
|
||||||
userInputMapper->getActionState(UserInputMapper::SHIFT), RIGHT_HAND_INDEX);
|
userInputMapper->getActionState(UserInputMapper::SHIFT), RIGHT_HAND_INDEX);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// update the avatar with a fresh HMD pose
|
||||||
|
_myAvatar->updateFromHMDSensorMatrix(getHMDSensorPose(), deltaTime);
|
||||||
|
|
||||||
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
|
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
|
||||||
|
|
||||||
updateCamera(deltaTime); // handle various camera tweaks like off axis projection
|
updateCamera(deltaTime); // handle various camera tweaks like off axis projection
|
||||||
|
@ -4075,6 +4075,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
||||||
scriptEngine->registerFunction("WebWindow", WebWindowClass::constructor, 1);
|
scriptEngine->registerFunction("WebWindow", WebWindowClass::constructor, 1);
|
||||||
|
|
||||||
scriptEngine->registerGlobalObject("Menu", MenuScriptingInterface::getInstance());
|
scriptEngine->registerGlobalObject("Menu", MenuScriptingInterface::getInstance());
|
||||||
|
scriptEngine->registerGlobalObject("Stats", Stats::getInstance());
|
||||||
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
|
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
|
||||||
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
||||||
scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());
|
scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());
|
||||||
|
|
|
@ -294,9 +294,6 @@ Menu::Menu() {
|
||||||
|
|
||||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
|
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
|
||||||
|
|
||||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::StandingHMDSensorMode, 0, false,
|
|
||||||
avatar, SLOT(updateStandingHMDModeFromMenu()));
|
|
||||||
|
|
||||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::WorldAxes);
|
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::WorldAxes);
|
||||||
|
|
||||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);
|
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);
|
||||||
|
|
|
@ -124,14 +124,12 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
||||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!myAvatar->getStandingHMDSensorMode()) {
|
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
|
||||||
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
|
// since everyone else will see the full joint rotations for other people.
|
||||||
// since everyone else will see the full joint rotations for other people.
|
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
|
||||||
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
|
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
|
||||||
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
|
float currentTwist = getTorsoTwist();
|
||||||
float currentTwist = getTorsoTwist();
|
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
|
||||||
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!(_isFaceTrackerConnected || billboard)) {
|
if (!(_isFaceTrackerConnected || billboard)) {
|
||||||
|
@ -392,7 +390,7 @@ glm::quat Head::getCameraOrientation() const {
|
||||||
// always the same.
|
// always the same.
|
||||||
if (qApp->getAvatarUpdater()->isHMDMode()) {
|
if (qApp->getAvatarUpdater()->isHMDMode()) {
|
||||||
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
|
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
|
||||||
if (myAvatar && myAvatar->getStandingHMDSensorMode()) {
|
if (myAvatar) {
|
||||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
||||||
} else {
|
} else {
|
||||||
return getOrientation();
|
return getOrientation();
|
||||||
|
|
|
@ -104,7 +104,6 @@ MyAvatar::MyAvatar(RigPointer rig) :
|
||||||
_hmdSensorPosition(),
|
_hmdSensorPosition(),
|
||||||
_bodySensorMatrix(),
|
_bodySensorMatrix(),
|
||||||
_sensorToWorldMatrix(),
|
_sensorToWorldMatrix(),
|
||||||
_standingHMDSensorMode(false),
|
|
||||||
_goToPending(false),
|
_goToPending(false),
|
||||||
_goToPosition(),
|
_goToPosition(),
|
||||||
_goToOrientation(),
|
_goToOrientation(),
|
||||||
|
@ -268,29 +267,59 @@ void MyAvatar::simulate(float deltaTime) {
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::mat4 MyAvatar::getSensorToWorldMatrix() const {
|
glm::mat4 MyAvatar::getSensorToWorldMatrix() const {
|
||||||
if (getStandingHMDSensorMode()) {
|
return _sensorToWorldMatrix;
|
||||||
return _sensorToWorldMatrix;
|
|
||||||
} else {
|
|
||||||
return createMatFromQuatAndPos(getWorldAlignedOrientation(), getDefaultEyePosition());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// best called at start of main loop just after we have a fresh hmd pose.
|
// best called at start of main loop just after we have a fresh hmd pose.
|
||||||
// update internal body position from new hmd pose.
|
// update internal body position from new hmd pose.
|
||||||
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix, float deltaTime) {
|
||||||
// update the sensorMatrices based on the new hmd pose
|
// update the sensorMatrices based on the new hmd pose
|
||||||
_hmdSensorMatrix = hmdSensorMatrix;
|
_hmdSensorMatrix = hmdSensorMatrix;
|
||||||
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
||||||
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
||||||
_bodySensorMatrix = deriveBodyFromHMDSensor();
|
|
||||||
|
|
||||||
if (getStandingHMDSensorMode()) {
|
const float STRAIGHTING_LEAN_DURATION = 0.5f; // seconds
|
||||||
// set the body position/orientation to reflect motion due to the head.
|
const float STRAIGHTING_LEAN_THRESHOLD = 0.2f; // meters
|
||||||
auto worldMat = _sensorToWorldMatrix * _bodySensorMatrix;
|
|
||||||
nextAttitude(extractTranslation(worldMat), glm::quat_cast(worldMat));
|
auto newBodySensorMatrix = deriveBodyFromHMDSensor();
|
||||||
|
glm::vec3 diff = extractTranslation(newBodySensorMatrix) - extractTranslation(_bodySensorMatrix);
|
||||||
|
if (!_straightingLean && glm::length(diff) > STRAIGHTING_LEAN_THRESHOLD) {
|
||||||
|
|
||||||
|
// begin homing toward derived body position.
|
||||||
|
_straightingLean = true;
|
||||||
|
_straightingLeanAlpha = 0.0f;
|
||||||
|
|
||||||
|
} else if (_straightingLean) {
|
||||||
|
|
||||||
|
auto newBodySensorMatrix = deriveBodyFromHMDSensor();
|
||||||
|
auto worldBodyMatrix = _sensorToWorldMatrix * newBodySensorMatrix;
|
||||||
|
glm::vec3 worldBodyPos = extractTranslation(worldBodyMatrix);
|
||||||
|
glm::quat worldBodyRot = glm::normalize(glm::quat_cast(worldBodyMatrix));
|
||||||
|
|
||||||
|
_straightingLeanAlpha += (1.0f / STRAIGHTING_LEAN_DURATION) * deltaTime;
|
||||||
|
|
||||||
|
if (_straightingLeanAlpha >= 1.0f) {
|
||||||
|
_straightingLean = false;
|
||||||
|
nextAttitude(worldBodyPos, worldBodyRot);
|
||||||
|
_bodySensorMatrix = newBodySensorMatrix;
|
||||||
|
} else {
|
||||||
|
// interp position toward the desired pos
|
||||||
|
glm::vec3 pos = lerp(getPosition(), worldBodyPos, _straightingLeanAlpha);
|
||||||
|
glm::quat rot = glm::normalize(safeMix(getOrientation(), worldBodyRot, _straightingLeanAlpha));
|
||||||
|
nextAttitude(pos, rot);
|
||||||
|
|
||||||
|
// interp sensor matrix toward desired
|
||||||
|
glm::vec3 nextBodyPos = extractTranslation(newBodySensorMatrix);
|
||||||
|
glm::quat nextBodyRot = glm::normalize(glm::quat_cast(newBodySensorMatrix));
|
||||||
|
glm::vec3 prevBodyPos = extractTranslation(_bodySensorMatrix);
|
||||||
|
glm::quat prevBodyRot = glm::normalize(glm::quat_cast(_bodySensorMatrix));
|
||||||
|
pos = lerp(prevBodyPos, nextBodyPos, _straightingLeanAlpha);
|
||||||
|
rot = glm::normalize(safeMix(prevBodyRot, nextBodyRot, _straightingLeanAlpha));
|
||||||
|
_bodySensorMatrix = createMatFromQuatAndPos(rot, pos);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
//
|
||||||
// best called at end of main loop, just before rendering.
|
// best called at end of main loop, just before rendering.
|
||||||
// update sensor to world matrix from current body position and hmd sensor.
|
// update sensor to world matrix from current body position and hmd sensor.
|
||||||
// This is so the correct camera can be used for rendering.
|
// This is so the correct camera can be used for rendering.
|
||||||
|
@ -359,11 +388,9 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
||||||
|
|
||||||
Head* head = getHead();
|
Head* head = getHead();
|
||||||
if (inHmd || isPlaying()) {
|
if (inHmd || isPlaying()) {
|
||||||
if (!getStandingHMDSensorMode()) {
|
head->setDeltaPitch(estimatedRotation.x);
|
||||||
head->setDeltaPitch(estimatedRotation.x);
|
head->setDeltaYaw(estimatedRotation.y);
|
||||||
head->setDeltaYaw(estimatedRotation.y);
|
head->setDeltaRoll(estimatedRotation.z);
|
||||||
head->setDeltaRoll(estimatedRotation.z);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
float magnifyFieldOfView = qApp->getFieldOfView() /
|
float magnifyFieldOfView = qApp->getFieldOfView() /
|
||||||
_realWorldFieldOfView.get();
|
_realWorldFieldOfView.get();
|
||||||
|
@ -385,12 +412,10 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
||||||
relativePosition.x = -relativePosition.x;
|
relativePosition.x = -relativePosition.x;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!(inHmd && getStandingHMDSensorMode())) {
|
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
|
||||||
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
|
-MAX_LEAN, MAX_LEAN));
|
||||||
-MAX_LEAN, MAX_LEAN));
|
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
|
||||||
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
|
-MAX_LEAN, MAX_LEAN));
|
||||||
-MAX_LEAN, MAX_LEAN));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1744,11 +1769,6 @@ void MyAvatar::updateMotionBehaviorFromMenu() {
|
||||||
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
|
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyAvatar::updateStandingHMDModeFromMenu() {
|
|
||||||
Menu* menu = Menu::getInstance();
|
|
||||||
_standingHMDSensorMode = menu->isOptionChecked(MenuOption::StandingHMDSensorMode);
|
|
||||||
}
|
|
||||||
|
|
||||||
//Renders sixense laser pointers for UI selection with controllers
|
//Renders sixense laser pointers for UI selection with controllers
|
||||||
void MyAvatar::renderLaserPointers(gpu::Batch& batch) {
|
void MyAvatar::renderLaserPointers(gpu::Batch& batch) {
|
||||||
const float PALM_TIP_ROD_RADIUS = 0.002f;
|
const float PALM_TIP_ROD_RADIUS = 0.002f;
|
||||||
|
|
|
@ -68,7 +68,7 @@ public:
|
||||||
|
|
||||||
// best called at start of main loop just after we have a fresh hmd pose.
|
// best called at start of main loop just after we have a fresh hmd pose.
|
||||||
// update internal body position from new hmd pose.
|
// update internal body position from new hmd pose.
|
||||||
void updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix);
|
void updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix, float deltaTime);
|
||||||
|
|
||||||
// best called at end of main loop, just before rendering.
|
// best called at end of main loop, just before rendering.
|
||||||
// update sensor to world matrix from current body position and hmd sensor.
|
// update sensor to world matrix from current body position and hmd sensor.
|
||||||
|
@ -168,7 +168,6 @@ public:
|
||||||
static const float ZOOM_MAX;
|
static const float ZOOM_MAX;
|
||||||
static const float ZOOM_DEFAULT;
|
static const float ZOOM_DEFAULT;
|
||||||
|
|
||||||
bool getStandingHMDSensorMode() const { return _standingHMDSensorMode; }
|
|
||||||
void doUpdateBillboard();
|
void doUpdateBillboard();
|
||||||
void destroyAnimGraph();
|
void destroyAnimGraph();
|
||||||
|
|
||||||
|
@ -194,7 +193,6 @@ public slots:
|
||||||
void setThrust(glm::vec3 newThrust) { _thrust = newThrust; }
|
void setThrust(glm::vec3 newThrust) { _thrust = newThrust; }
|
||||||
|
|
||||||
void updateMotionBehaviorFromMenu();
|
void updateMotionBehaviorFromMenu();
|
||||||
void updateStandingHMDModeFromMenu();
|
|
||||||
|
|
||||||
glm::vec3 getLeftPalmPosition();
|
glm::vec3 getLeftPalmPosition();
|
||||||
glm::vec3 getLeftPalmVelocity();
|
glm::vec3 getLeftPalmVelocity();
|
||||||
|
@ -345,8 +343,6 @@ private:
|
||||||
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
|
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
|
||||||
glm::mat4 _sensorToWorldMatrix;
|
glm::mat4 _sensorToWorldMatrix;
|
||||||
|
|
||||||
bool _standingHMDSensorMode;
|
|
||||||
|
|
||||||
bool _goToPending;
|
bool _goToPending;
|
||||||
glm::vec3 _goToPosition;
|
glm::vec3 _goToPosition;
|
||||||
glm::quat _goToOrientation;
|
glm::quat _goToOrientation;
|
||||||
|
@ -362,6 +358,9 @@ private:
|
||||||
AudioListenerMode _audioListenerMode;
|
AudioListenerMode _audioListenerMode;
|
||||||
glm::vec3 _customListenPosition;
|
glm::vec3 _customListenPosition;
|
||||||
glm::quat _customListenOrientation;
|
glm::quat _customListenOrientation;
|
||||||
|
|
||||||
|
bool _straightingLean = false;
|
||||||
|
float _straightingLeanAlpha = 0.0f;
|
||||||
};
|
};
|
||||||
|
|
||||||
QScriptValue audioListenModeToScriptValue(QScriptEngine* engine, const AudioListenerMode& audioListenerMode);
|
QScriptValue audioListenModeToScriptValue(QScriptEngine* engine, const AudioListenerMode& audioListenerMode);
|
||||||
|
|
|
@ -123,7 +123,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
Rig::HeadParameters headParams;
|
Rig::HeadParameters headParams;
|
||||||
headParams.modelRotation = getRotation();
|
headParams.modelRotation = getRotation();
|
||||||
headParams.modelTranslation = getTranslation();
|
headParams.modelTranslation = getTranslation();
|
||||||
headParams.enableLean = qApp->getAvatarUpdater()->isHMDMode() && !myAvatar->getStandingHMDSensorMode();
|
headParams.enableLean = qApp->getAvatarUpdater()->isHMDMode();
|
||||||
headParams.leanSideways = head->getFinalLeanSideways();
|
headParams.leanSideways = head->getFinalLeanSideways();
|
||||||
headParams.leanForward = head->getFinalLeanForward();
|
headParams.leanForward = head->getFinalLeanForward();
|
||||||
headParams.torsoTwist = head->getTorsoTwist();
|
headParams.torsoTwist = head->getTorsoTwist();
|
||||||
|
|
|
@ -68,12 +68,7 @@ void OverlayConductor::updateMode() {
|
||||||
|
|
||||||
Mode newMode;
|
Mode newMode;
|
||||||
if (qApp->isHMDMode()) {
|
if (qApp->isHMDMode()) {
|
||||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
newMode = SITTING;
|
||||||
if (myAvatar->getStandingHMDSensorMode()) {
|
|
||||||
newMode = STANDING;
|
|
||||||
} else {
|
|
||||||
newMode = SITTING;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
newMode = FLAT;
|
newMode = FLAT;
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,14 +89,14 @@ bool Stats::includeTimingRecord(const QString& name) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Stats::updateStats() {
|
void Stats::updateStats(bool force) {
|
||||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
|
if (!force) {
|
||||||
if (isVisible()) {
|
if (!Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
|
||||||
setVisible(false);
|
if (isVisible()) {
|
||||||
}
|
setVisible(false);
|
||||||
return;
|
}
|
||||||
} else {
|
return;
|
||||||
if (!isVisible()) {
|
} else if (!isVisible()) {
|
||||||
setVisible(true);
|
setVisible(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -161,7 +161,7 @@ void Stats::updateStats() {
|
||||||
STAT_UPDATE(position, QVector3D(avatarPos.x, avatarPos.y, avatarPos.z));
|
STAT_UPDATE(position, QVector3D(avatarPos.x, avatarPos.y, avatarPos.z));
|
||||||
STAT_UPDATE_FLOAT(velocity, glm::length(myAvatar->getVelocity()), 0.1f);
|
STAT_UPDATE_FLOAT(velocity, glm::length(myAvatar->getVelocity()), 0.1f);
|
||||||
STAT_UPDATE_FLOAT(yaw, myAvatar->getBodyYaw(), 0.1f);
|
STAT_UPDATE_FLOAT(yaw, myAvatar->getBodyYaw(), 0.1f);
|
||||||
if (_expanded) {
|
if (_expanded || force) {
|
||||||
SharedNodePointer avatarMixer = nodeList->soloNodeOfType(NodeType::AvatarMixer);
|
SharedNodePointer avatarMixer = nodeList->soloNodeOfType(NodeType::AvatarMixer);
|
||||||
if (avatarMixer) {
|
if (avatarMixer) {
|
||||||
STAT_UPDATE(avatarMixerKbps, roundf(
|
STAT_UPDATE(avatarMixerKbps, roundf(
|
||||||
|
@ -175,7 +175,7 @@ void Stats::updateStats() {
|
||||||
STAT_UPDATE(avatarMixerPps, -1);
|
STAT_UPDATE(avatarMixerPps, -1);
|
||||||
}
|
}
|
||||||
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||||
if (audioMixerNode) {
|
if (audioMixerNode || force) {
|
||||||
STAT_UPDATE(audioMixerKbps, roundf(
|
STAT_UPDATE(audioMixerKbps, roundf(
|
||||||
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
|
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
|
||||||
bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer)));
|
bandwidthRecorder->getAverageOutputKilobitsPerSecond(NodeType::AudioMixer)));
|
||||||
|
@ -230,7 +230,7 @@ void Stats::updateStats() {
|
||||||
totalLeaves += stats.getTotalLeaves();
|
totalLeaves += stats.getTotalLeaves();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (_expanded) {
|
if (_expanded || force) {
|
||||||
if (serverCount == 0) {
|
if (serverCount == 0) {
|
||||||
sendingModeStream << "---";
|
sendingModeStream << "---";
|
||||||
}
|
}
|
||||||
|
@ -272,7 +272,7 @@ void Stats::updateStats() {
|
||||||
STAT_UPDATE(serverElements, (int)totalNodes);
|
STAT_UPDATE(serverElements, (int)totalNodes);
|
||||||
STAT_UPDATE(localElements, (int)OctreeElement::getNodeCount());
|
STAT_UPDATE(localElements, (int)OctreeElement::getNodeCount());
|
||||||
|
|
||||||
if (_expanded) {
|
if (_expanded || force) {
|
||||||
STAT_UPDATE(serverInternal, (int)totalInternal);
|
STAT_UPDATE(serverInternal, (int)totalInternal);
|
||||||
STAT_UPDATE(serverLeaves, (int)totalLeaves);
|
STAT_UPDATE(serverLeaves, (int)totalLeaves);
|
||||||
// Local Voxels
|
// Local Voxels
|
||||||
|
|
|
@ -81,7 +81,7 @@ public:
|
||||||
const QString& monospaceFont() {
|
const QString& monospaceFont() {
|
||||||
return _monospaceFont;
|
return _monospaceFont;
|
||||||
}
|
}
|
||||||
void updateStats();
|
void updateStats(bool force = false);
|
||||||
|
|
||||||
bool isExpanded() { return _expanded; }
|
bool isExpanded() { return _expanded; }
|
||||||
bool isTimingExpanded() { return _timingExpanded; }
|
bool isTimingExpanded() { return _timingExpanded; }
|
||||||
|
@ -93,6 +93,9 @@ public:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public slots:
|
||||||
|
void forceUpdateStats() { updateStats(true); }
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
void expandedChanged();
|
void expandedChanged();
|
||||||
void timingExpandedChanged();
|
void timingExpandedChanged();
|
||||||
|
@ -149,3 +152,4 @@ private:
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_Stats_h
|
#endif // hifi_Stats_h
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
|
|
||||||
#include "AnimInverseKinematics.h"
|
#include "AnimInverseKinematics.h"
|
||||||
|
|
||||||
|
#include <GeometryUtil.h>
|
||||||
#include <GLMHelpers.h>
|
#include <GLMHelpers.h>
|
||||||
#include <NumericalConstants.h>
|
#include <NumericalConstants.h>
|
||||||
#include <SharedUtil.h>
|
#include <SharedUtil.h>
|
||||||
|
@ -159,24 +160,34 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
AnimPose targetPose = target.pose;
|
AnimPose targetPose = target.pose;
|
||||||
glm::vec3 tip = absolutePoses[tipIndex].trans;
|
|
||||||
|
// cache tip absolute transform
|
||||||
|
glm::vec3 tipPosition = absolutePoses[tipIndex].trans;
|
||||||
|
glm::quat tipRotation = absolutePoses[tipIndex].rot;
|
||||||
|
|
||||||
|
// cache tip's parent's absolute rotation so we can recompute the tip's parent-relative
|
||||||
|
// as we proceed walking down the joint chain
|
||||||
|
int pivotIndex = _skeleton->getParentIndex(tipIndex);
|
||||||
|
glm::quat tipParentRotation;
|
||||||
|
if (pivotIndex != -1) {
|
||||||
|
tipParentRotation = absolutePoses[pivotIndex].rot;
|
||||||
|
}
|
||||||
|
|
||||||
// descend toward root, pivoting each joint to get tip closer to target
|
// descend toward root, pivoting each joint to get tip closer to target
|
||||||
int pivotIndex = _skeleton->getParentIndex(tipIndex);
|
int ancestorCount = 1;
|
||||||
float fractionDenominator = 1.0f;
|
|
||||||
while (pivotIndex != -1) {
|
while (pivotIndex != -1) {
|
||||||
// compute the two lines that should be aligned
|
// compute the two lines that should be aligned
|
||||||
glm::vec3 jointPosition = absolutePoses[pivotIndex].trans;
|
glm::vec3 jointPosition = absolutePoses[pivotIndex].trans;
|
||||||
glm::vec3 leverArm = tip - jointPosition;
|
glm::vec3 leverArm = tipPosition - jointPosition;
|
||||||
glm::vec3 targetLine = targetPose.trans - jointPosition;
|
glm::vec3 targetLine = targetPose.trans - jointPosition;
|
||||||
|
|
||||||
// compute the axis of the rotation that would align them
|
// compute the swing that would get get tip closer
|
||||||
glm::vec3 axis = glm::cross(leverArm, targetLine);
|
glm::vec3 axis = glm::cross(leverArm, targetLine);
|
||||||
float axisLength = glm::length(axis);
|
float axisLength = glm::length(axis);
|
||||||
glm::quat deltaRotation;
|
glm::quat deltaRotation;
|
||||||
const float MIN_AXIS_LENGTH = 1.0e-4f;
|
const float MIN_AXIS_LENGTH = 1.0e-4f;
|
||||||
if (axisLength > MIN_AXIS_LENGTH) {
|
if (axisLength > MIN_AXIS_LENGTH) {
|
||||||
// compute deltaRotation for alignment (brings tip closer to target)
|
// compute deltaRotation for alignment (swings tip closer to target)
|
||||||
axis /= axisLength;
|
axis /= axisLength;
|
||||||
float angle = acosf(glm::dot(leverArm, targetLine) / (glm::length(leverArm) * glm::length(targetLine)));
|
float angle = acosf(glm::dot(leverArm, targetLine) / (glm::length(leverArm) * glm::length(targetLine)));
|
||||||
|
|
||||||
|
@ -184,24 +195,50 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
|
||||||
// still possible for the angle to be zero so we also check that to avoid unnecessary calculations.
|
// still possible for the angle to be zero so we also check that to avoid unnecessary calculations.
|
||||||
const float MIN_ADJUSTMENT_ANGLE = 1.0e-4f;
|
const float MIN_ADJUSTMENT_ANGLE = 1.0e-4f;
|
||||||
if (angle > MIN_ADJUSTMENT_ANGLE) {
|
if (angle > MIN_ADJUSTMENT_ANGLE) {
|
||||||
// reduce angle by half: slows convergence but adds stability to IK solution
|
// reduce angle by a fraction (reduces IK swing contribution of this joint)
|
||||||
angle /= fractionDenominator;
|
angle /= (float)ancestorCount;
|
||||||
deltaRotation = glm::angleAxis(angle, axis);
|
deltaRotation = glm::angleAxis(angle, axis);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The swing will re-orient the tip but there will tend to be be a non-zero delta between the tip's
|
||||||
|
// new rotation and its target. We compute that delta here and rotate the tipJoint accordingly.
|
||||||
|
glm::quat tipRelativeRotation = glm::inverse(deltaRotation * tipParentRotation) * targetPose.rot;
|
||||||
|
|
||||||
|
// enforce tip's constraint
|
||||||
|
RotationConstraint* constraint = getConstraint(tipIndex);
|
||||||
|
if (constraint) {
|
||||||
|
bool constrained = constraint->apply(tipRelativeRotation);
|
||||||
|
if (constrained) {
|
||||||
|
// The tip's final parent-relative rotation violates its constraint
|
||||||
|
// so we try to twist this pivot to compensate.
|
||||||
|
glm::quat constrainedTipRotation = deltaRotation * tipParentRotation * tipRelativeRotation;
|
||||||
|
glm::quat missingRotation = targetPose.rot * glm::inverse(constrainedTipRotation);
|
||||||
|
glm::quat swingPart;
|
||||||
|
glm::quat twistPart;
|
||||||
|
glm::vec3 axis = glm::normalize(deltaRotation * leverArm);
|
||||||
|
swingTwistDecomposition(missingRotation, axis, swingPart, twistPart);
|
||||||
|
deltaRotation = twistPart * deltaRotation;
|
||||||
|
}
|
||||||
|
// we update the tip rotation here to rotate it as close to its target orientation as possible
|
||||||
|
// before moving on to next pivot
|
||||||
|
tipRotation = tipParentRotation * tipRelativeRotation;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
fractionDenominator++;
|
++ancestorCount;
|
||||||
|
|
||||||
int parentIndex = _skeleton->getParentIndex(pivotIndex);
|
int parentIndex = _skeleton->getParentIndex(pivotIndex);
|
||||||
if (parentIndex == -1) {
|
if (parentIndex == -1) {
|
||||||
// TODO? apply constraints to root?
|
// TODO? apply constraints to root?
|
||||||
// TODO? harvest the root's transform as movement of entire skeleton?
|
// TODO? harvest the root's transform as movement of entire skeleton?
|
||||||
} else {
|
} else {
|
||||||
// compute joint's new parent-relative rotation
|
// compute joint's new parent-relative rotation after swing
|
||||||
// Q' = dQ * Q and Q = Qp * q --> q' = Qp^ * dQ * Q
|
// Q' = dQ * Q and Q = Qp * q --> q' = Qp^ * dQ * Q
|
||||||
glm::quat newRot = glm::normalize(glm::inverse(
|
glm::quat newRot = glm::normalize(glm::inverse(
|
||||||
absolutePoses[parentIndex].rot) *
|
absolutePoses[parentIndex].rot) *
|
||||||
deltaRotation *
|
deltaRotation *
|
||||||
absolutePoses[pivotIndex].rot);
|
absolutePoses[pivotIndex].rot);
|
||||||
|
|
||||||
|
// enforce pivot's constraint
|
||||||
RotationConstraint* constraint = getConstraint(pivotIndex);
|
RotationConstraint* constraint = getConstraint(pivotIndex);
|
||||||
if (constraint) {
|
if (constraint) {
|
||||||
bool constrained = constraint->apply(newRot);
|
bool constrained = constraint->apply(newRot);
|
||||||
|
@ -214,6 +251,7 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
|
||||||
glm::inverse(absolutePoses[pivotIndex].rot);
|
glm::inverse(absolutePoses[pivotIndex].rot);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// store the rotation change in the accumulator
|
// store the rotation change in the accumulator
|
||||||
_accumulators[pivotIndex].add(newRot);
|
_accumulators[pivotIndex].add(newRot);
|
||||||
}
|
}
|
||||||
|
@ -222,8 +260,10 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
|
||||||
lowestMovedIndex = pivotIndex;
|
lowestMovedIndex = pivotIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
// keep track of tip's new position as we descend towards root
|
// keep track of tip's new transform as we descend towards root
|
||||||
tip = jointPosition + deltaRotation * leverArm;
|
tipPosition = jointPosition + deltaRotation * leverArm;
|
||||||
|
tipRotation = glm::normalize(deltaRotation * tipRotation);
|
||||||
|
tipParentRotation = glm::normalize(deltaRotation * tipParentRotation);
|
||||||
|
|
||||||
pivotIndex = _skeleton->getParentIndex(pivotIndex);
|
pivotIndex = _skeleton->getParentIndex(pivotIndex);
|
||||||
}
|
}
|
||||||
|
@ -464,7 +504,7 @@ void AnimInverseKinematics::initConstraints() {
|
||||||
} else if (0 == baseName.compare("Hand", Qt::CaseInsensitive)) {
|
} else if (0 == baseName.compare("Hand", Qt::CaseInsensitive)) {
|
||||||
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
|
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
|
||||||
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot);
|
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot);
|
||||||
const float MAX_HAND_TWIST = PI;
|
const float MAX_HAND_TWIST = 3.0f * PI / 5.0f;
|
||||||
const float MIN_HAND_TWIST = -PI / 2.0f;
|
const float MIN_HAND_TWIST = -PI / 2.0f;
|
||||||
if (isLeft) {
|
if (isLeft) {
|
||||||
stConstraint->setTwistLimits(-MAX_HAND_TWIST, -MIN_HAND_TWIST);
|
stConstraint->setTwistLimits(-MAX_HAND_TWIST, -MIN_HAND_TWIST);
|
||||||
|
|
Loading…
Reference in a new issue