mirror of
https://github.com/lubosz/overte.git
synced 2025-04-07 00:02:37 +02:00
Migrate core avatar rendering functionality to library
This commit is contained in:
parent
a7eecdfb26
commit
2d0bbf70ae
48 changed files with 530 additions and 467 deletions
|
@ -191,7 +191,7 @@ endif()
|
|||
# link required hifi libraries
|
||||
link_hifi_libraries(
|
||||
shared octree ktx gpu gl gpu-gl procedural model render
|
||||
recording fbx networking model-networking entities avatars
|
||||
recording fbx networking model-networking entities avatars trackers
|
||||
audio audio-client animation script-engine physics
|
||||
render-utils entities-renderer avatars-renderer ui auto-updater
|
||||
controllers plugins image trackers
|
||||
|
|
|
@ -129,12 +129,12 @@
|
|||
#include <Preferences.h>
|
||||
#include <display-plugins/CompositorHelper.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
|
||||
#include <avatars-renderer/ScriptAvatar.h>
|
||||
|
||||
#include "AudioClient.h"
|
||||
#include "audio/AudioScope.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
#include "avatar/ScriptAvatar.h"
|
||||
#include "avatar/MyHead.h"
|
||||
#include "CrashHandler.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/Leapmotion.h"
|
||||
|
@ -1586,6 +1586,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
connect(&domainHandler, &DomainHandler::hostnameChanged, this, &Application::addAssetToWorldMessageClose);
|
||||
|
||||
updateSystemTabletMode();
|
||||
|
||||
connect(&_myCamera, &Camera::modeUpdated, this, &Application::cameraModeChanged);
|
||||
}
|
||||
|
||||
void Application::domainConnectionRefused(const QString& reasonMessage, int reasonCodeInt, const QString& extraInfo) {
|
||||
|
@ -2191,7 +2193,7 @@ void Application::paintGL() {
|
|||
_myCamera.setOrientation(glm::quat_cast(camMat));
|
||||
} else {
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
|
||||
_myCamera.setOrientation(myAvatar->getHead()->getCameraOrientation());
|
||||
_myCamera.setOrientation(myAvatar->getMyHead()->getCameraOrientation());
|
||||
}
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
if (isHMDMode()) {
|
||||
|
@ -4082,6 +4084,30 @@ void Application::cycleCamera() {
|
|||
cameraMenuChanged(); // handle the menu change
|
||||
}
|
||||
|
||||
void Application::cameraModeChanged() {
|
||||
switch (_myCamera.getMode()) {
|
||||
case CAMERA_MODE_FIRST_PERSON:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, true);
|
||||
break;
|
||||
case CAMERA_MODE_THIRD_PERSON:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, true);
|
||||
break;
|
||||
case CAMERA_MODE_MIRROR:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FullscreenMirror, true);
|
||||
break;
|
||||
case CAMERA_MODE_INDEPENDENT:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::IndependentMode, true);
|
||||
break;
|
||||
case CAMERA_MODE_ENTITY:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::CameraEntityMode, true);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
cameraMenuChanged();
|
||||
}
|
||||
|
||||
|
||||
void Application::cameraMenuChanged() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||
if (_myCamera.getMode() != CAMERA_MODE_MIRROR) {
|
||||
|
|
|
@ -372,6 +372,7 @@ public slots:
|
|||
static void showHelp();
|
||||
|
||||
void cycleCamera();
|
||||
void cameraModeChanged();
|
||||
void cameraMenuChanged();
|
||||
void toggleOverlays();
|
||||
void setOverlaysVisible(bool visible);
|
||||
|
|
|
@ -12,6 +12,9 @@
|
|||
|
||||
#include "Application.h"
|
||||
|
||||
PickRay FancyCamera::computePickRay(float x, float y) const {
|
||||
return qApp->computePickRay(x, y);
|
||||
}
|
||||
|
||||
QUuid FancyCamera::getCameraEntity() const {
|
||||
if (_cameraEntity != nullptr) {
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
#ifndef hifi_FancyCamera_h
|
||||
#define hifi_FancyCamera_h
|
||||
|
||||
#include "Camera.h"
|
||||
#include <shared/Camera.h>
|
||||
|
||||
#include <EntityTypes.h>
|
||||
|
||||
|
@ -30,6 +30,8 @@ public:
|
|||
FancyCamera() : Camera() {}
|
||||
|
||||
EntityItemPointer getCameraEntityPointer() const { return _cameraEntity; }
|
||||
PickRay computePickRay(float x, float y) const override;
|
||||
|
||||
|
||||
public slots:
|
||||
QUuid getCameraEntity() const;
|
||||
|
|
|
@ -142,11 +142,6 @@ void renderWorldBox(gpu::Batch& batch) {
|
|||
geometryCache->renderSolidSphereInstance(batch, GREY);
|
||||
}
|
||||
|
||||
// Return a random vector of average length 1
|
||||
const glm::vec3 randVector() {
|
||||
return glm::vec3(randFloat() - 0.5f, randFloat() - 0.5f, randFloat() - 0.5f) * 2.0f;
|
||||
}
|
||||
|
||||
// Do some basic timing tests and report the results
|
||||
void runTimingTests() {
|
||||
// How long does it take to make a call to get the time?
|
||||
|
|
|
@ -17,9 +17,6 @@
|
|||
|
||||
#include <gpu/Batch.h>
|
||||
|
||||
float randFloat();
|
||||
const glm::vec3 randVector();
|
||||
|
||||
void renderWorldBox(gpu::Batch& batch);
|
||||
|
||||
void runTimingTests();
|
||||
|
|
|
@ -32,9 +32,9 @@
|
|||
#include <SettingHandle.h>
|
||||
#include <UsersScriptingInterface.h>
|
||||
#include <UUID.h>
|
||||
#include <avatars-renderer/OtherAvatar.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "AvatarManager.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Menu.h"
|
||||
|
@ -299,7 +299,7 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
}
|
||||
|
||||
AvatarSharedPointer AvatarManager::newSharedAvatar() {
|
||||
return std::make_shared<Avatar>(qApp->thread(), std::make_shared<Rig>());
|
||||
return std::make_shared<OtherAvatar>(qApp->thread(), std::make_shared<Rig>());
|
||||
}
|
||||
|
||||
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar, KillAvatarReason removalReason) {
|
||||
|
|
|
@ -21,13 +21,11 @@
|
|||
#include <PIDController.h>
|
||||
#include <SimpleMovingAverage.h>
|
||||
#include <shared/RateCounter.h>
|
||||
#include <avatars-renderer/AvatarMotionState.h>
|
||||
#include <avatars-renderer/ScriptAvatar.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "AvatarMotionState.h"
|
||||
#include "ScriptAvatar.h"
|
||||
|
||||
class MyAvatar;
|
||||
class AudioInjector;
|
||||
|
||||
class AvatarManager : public AvatarHashMap {
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "MyAvatar.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
|
||||
|
@ -43,11 +45,12 @@
|
|||
#include <RecordingScriptingInterface.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
|
||||
#include "MyHead.h"
|
||||
#include "MySkeletonModel.h"
|
||||
#include "Application.h"
|
||||
#include "AvatarManager.h"
|
||||
#include "AvatarActionHold.h"
|
||||
#include "Menu.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "Util.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "DebugDraw.h"
|
||||
|
@ -96,23 +99,12 @@ static const glm::quat DEFAULT_AVATAR_RIGHTFOOT_ROT { -0.4016716778278351f, 0.91
|
|||
|
||||
MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
|
||||
Avatar(thread, rig),
|
||||
_wasPushing(false),
|
||||
_isPushing(false),
|
||||
_isBeingPushed(false),
|
||||
_isBraking(false),
|
||||
_isAway(false),
|
||||
_boomLength(ZOOM_DEFAULT),
|
||||
_yawSpeed(YAW_SPEED_DEFAULT),
|
||||
_pitchSpeed(PITCH_SPEED_DEFAULT),
|
||||
_thrust(0.0f),
|
||||
_actionMotorVelocity(0.0f),
|
||||
_scriptedMotorVelocity(0.0f),
|
||||
_scriptedMotorTimescale(DEFAULT_SCRIPTED_MOTOR_TIMESCALE),
|
||||
_scriptedMotorFrame(SCRIPTED_MOTOR_CAMERA_FRAME),
|
||||
_motionBehaviors(AVATAR_MOTION_DEFAULTS),
|
||||
_characterController(this),
|
||||
_lookAtTargetAvatar(),
|
||||
_shouldRender(true),
|
||||
_eyeContactTarget(LEFT_EYE),
|
||||
_realWorldFieldOfView("realWorldFieldOfView",
|
||||
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
|
||||
|
@ -129,6 +121,14 @@ MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
|
|||
_audioListenerMode(FROM_HEAD),
|
||||
_hmdAtRestDetector(glm::vec3(0), glm::quat())
|
||||
{
|
||||
|
||||
// give the pointer to our head to inherited _headData variable from AvatarData
|
||||
_headData = new MyHead(this);
|
||||
|
||||
_skeletonModel = std::make_shared<MySkeletonModel>(this, nullptr, rig);
|
||||
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
|
||||
|
||||
|
||||
using namespace recording;
|
||||
_skeletonModel->flagAsCauterized();
|
||||
|
||||
|
@ -536,7 +536,7 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
}
|
||||
head->setPosition(headPosition);
|
||||
head->setScale(getUniformScale());
|
||||
head->simulate(deltaTime, true);
|
||||
head->simulate(deltaTime);
|
||||
}
|
||||
|
||||
// Record avatars movements.
|
||||
|
@ -1450,12 +1450,12 @@ void MyAvatar::updateMotors() {
|
|||
glm::quat motorRotation;
|
||||
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
|
||||
if (_characterController.getState() == CharacterController::State::Hover) {
|
||||
motorRotation = getHead()->getCameraOrientation();
|
||||
motorRotation = getMyHead()->getCameraOrientation();
|
||||
} else {
|
||||
// non-hovering = walking: follow camera twist about vertical but not lift
|
||||
// so we decompose camera's rotation and store the twist part in motorRotation
|
||||
glm::quat liftRotation;
|
||||
swingTwistDecomposition(getHead()->getCameraOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
||||
swingTwistDecomposition(getMyHead()->getCameraOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
||||
}
|
||||
const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
|
||||
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
|
||||
|
@ -1469,7 +1469,7 @@ void MyAvatar::updateMotors() {
|
|||
}
|
||||
if (_motionBehaviors & AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED) {
|
||||
if (_scriptedMotorFrame == SCRIPTED_MOTOR_CAMERA_FRAME) {
|
||||
motorRotation = getHead()->getCameraOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
motorRotation = getMyHead()->getCameraOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
} else if (_scriptedMotorFrame == SCRIPTED_MOTOR_AVATAR_FRAME) {
|
||||
motorRotation = getOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
} else {
|
||||
|
@ -1814,7 +1814,7 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
if (getCharacterController()->getState() == CharacterController::State::Hover) {
|
||||
|
||||
// This is the direction the user desires to fly in.
|
||||
glm::vec3 desiredFacing = getHead()->getCameraOrientation() * Vectors::UNIT_Z;
|
||||
glm::vec3 desiredFacing = getMyHead()->getCameraOrientation() * Vectors::UNIT_Z;
|
||||
desiredFacing.y = 0.0f;
|
||||
|
||||
// This is our reference frame, it is captured when the user begins to move.
|
||||
|
@ -1958,7 +1958,7 @@ void MyAvatar::updatePosition(float deltaTime) {
|
|||
if (!_hoverReferenceCameraFacingIsCaptured && (fabs(getDriveKey(TRANSLATE_Z)) > 0.1f || fabs(getDriveKey(TRANSLATE_X)) > 0.1f)) {
|
||||
_hoverReferenceCameraFacingIsCaptured = true;
|
||||
// transform the camera facing vector into sensor space.
|
||||
_hoverReferenceCameraFacing = transformVectorFast(glm::inverse(_sensorToWorldMatrix), getHead()->getCameraOrientation() * Vectors::UNIT_Z);
|
||||
_hoverReferenceCameraFacing = transformVectorFast(glm::inverse(_sensorToWorldMatrix), getMyHead()->getCameraOrientation() * Vectors::UNIT_Z);
|
||||
} else if (_hoverReferenceCameraFacingIsCaptured && (fabs(getDriveKey(TRANSLATE_Z)) <= 0.1f && fabs(getDriveKey(TRANSLATE_X)) <= 0.1f)) {
|
||||
_hoverReferenceCameraFacingIsCaptured = false;
|
||||
}
|
||||
|
@ -2804,3 +2804,7 @@ void MyAvatar::updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose&
|
|||
});
|
||||
}
|
||||
}
|
||||
|
||||
const MyHead* MyAvatar::getMyHead() const {
|
||||
return static_cast<const MyHead*>(getHead());
|
||||
}
|
||||
|
|
|
@ -22,14 +22,15 @@
|
|||
|
||||
#include <controllers/Pose.h>
|
||||
#include <controllers/Actions.h>
|
||||
#include <avatars-renderer/Avatar.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "AtRestDetector.h"
|
||||
#include "MyCharacterController.h"
|
||||
#include <ThreadSafeValueCache.h>
|
||||
|
||||
class AvatarActionHold;
|
||||
class ModelItemID;
|
||||
class MyHead;
|
||||
|
||||
enum eyeContactTarget {
|
||||
LEFT_EYE,
|
||||
|
@ -149,6 +150,7 @@ public:
|
|||
explicit MyAvatar(QThread* thread, RigPointer rig);
|
||||
~MyAvatar();
|
||||
|
||||
void instantiableAvatar() override {};
|
||||
void registerMetaTypes(QScriptEngine* engine);
|
||||
|
||||
virtual void simulateAttachments(float deltaTime) override;
|
||||
|
@ -353,6 +355,7 @@ public:
|
|||
|
||||
eyeContactTarget getEyeContactTarget();
|
||||
|
||||
const MyHead* getMyHead() const;
|
||||
Q_INVOKABLE glm::vec3 getHeadPosition() const { return getHead()->getPosition(); }
|
||||
Q_INVOKABLE float getHeadFinalYaw() const { return getHead()->getFinalYaw(); }
|
||||
Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); }
|
||||
|
@ -589,17 +592,17 @@ private:
|
|||
std::array<float, MAX_DRIVE_KEYS> _driveKeys;
|
||||
std::bitset<MAX_DRIVE_KEYS> _disabledDriveKeys;
|
||||
|
||||
bool _wasPushing;
|
||||
bool _isPushing;
|
||||
bool _isBeingPushed;
|
||||
bool _isBraking;
|
||||
bool _isAway;
|
||||
bool _wasPushing { false };
|
||||
bool _isPushing { false };
|
||||
bool _isBeingPushed { false };
|
||||
bool _isBraking { false };
|
||||
bool _isAway { false };
|
||||
|
||||
float _boomLength;
|
||||
float _boomLength { ZOOM_DEFAULT };
|
||||
float _yawSpeed; // degrees/sec
|
||||
float _pitchSpeed; // degrees/sec
|
||||
|
||||
glm::vec3 _thrust; // impulse accumulator for outside sources
|
||||
glm::vec3 _thrust { 0.0f }; // impulse accumulator for outside sources
|
||||
|
||||
glm::vec3 _actionMotorVelocity; // target local-frame velocity of avatar (default controller actions)
|
||||
glm::vec3 _scriptedMotorVelocity; // target local-frame velocity of avatar (analog script)
|
||||
|
@ -615,7 +618,7 @@ private:
|
|||
|
||||
AvatarWeakPointer _lookAtTargetAvatar;
|
||||
glm::vec3 _targetAvatarPosition;
|
||||
bool _shouldRender;
|
||||
bool _shouldRender { true };
|
||||
float _oculusYawOffset;
|
||||
|
||||
eyeContactTarget _eyeContactTarget;
|
||||
|
|
76
interface/src/avatar/MyHead.cpp
Normal file
76
interface/src/avatar/MyHead.cpp
Normal file
|
@ -0,0 +1,76 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "MyHead.h"
|
||||
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
#include <gpu/Batch.h>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <Rig.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "Application.h"
|
||||
#include "MyAvatar.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
||||
}
|
||||
|
||||
glm::quat MyHead::getCameraOrientation() const {
|
||||
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||
// always the same.
|
||||
if (qApp->isHMDMode()) {
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
||||
} else {
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
}
|
||||
|
||||
void MyHead::simulate(float deltaTime) {
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
// Only use face trackers when not playing back a recording.
|
||||
if (!player->isPlaying()) {
|
||||
FaceTracker* faceTracker = qApp->getActiveFaceTracker();
|
||||
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
|
||||
if (_isFaceTrackerConnected) {
|
||||
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||
|
||||
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
|
||||
calculateMouthShapes(deltaTime);
|
||||
|
||||
const int JAW_OPEN_BLENDSHAPE = 21;
|
||||
const int MMMM_BLENDSHAPE = 34;
|
||||
const int FUNNEL_BLENDSHAPE = 40;
|
||||
const int SMILE_LEFT_BLENDSHAPE = 28;
|
||||
const int SMILE_RIGHT_BLENDSHAPE = 29;
|
||||
_blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
|
||||
_blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
|
||||
_blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
|
||||
_blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
|
||||
_blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
|
||||
}
|
||||
applyEyelidOffset(getFinalOrientationInWorldFrame());
|
||||
}
|
||||
}
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||
}
|
||||
Parent::simulate(deltaTime);
|
||||
}
|
30
interface/src/avatar/MyHead.h
Normal file
30
interface/src/avatar/MyHead.h
Normal file
|
@ -0,0 +1,30 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_MyHead_h
|
||||
#define hifi_MyHead_h
|
||||
|
||||
#include <avatars-renderer/Head.h>
|
||||
|
||||
class MyAvatar;
|
||||
class MyHead : public Head {
|
||||
using Parent = Head;
|
||||
public:
|
||||
explicit MyHead(MyAvatar* owningAvatar);
|
||||
|
||||
/// \return orientationBody * orientationBasePitch
|
||||
glm::quat getCameraOrientation() const;
|
||||
void simulate(float deltaTime) override;
|
||||
|
||||
private:
|
||||
// disallow copies of the Head, copy of owning Avatar is disallowed too
|
||||
MyHead(const Head&);
|
||||
MyHead& operator= (const MyHead&);
|
||||
};
|
||||
|
||||
#endif // hifi_MyHead_h
|
158
interface/src/avatar/MySkeletonModel.cpp
Normal file
158
interface/src/avatar/MySkeletonModel.cpp
Normal file
|
@ -0,0 +1,158 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "MySkeletonModel.h"
|
||||
|
||||
#include <avatars-renderer/Avatar.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
MySkeletonModel::MySkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer rig) : SkeletonModel(owningAvatar, parent, rig) {
|
||||
}
|
||||
|
||||
Rig::CharacterControllerState convertCharacterControllerState(CharacterController::State state) {
|
||||
switch (state) {
|
||||
default:
|
||||
case CharacterController::State::Ground:
|
||||
return Rig::CharacterControllerState::Ground;
|
||||
case CharacterController::State::Takeoff:
|
||||
return Rig::CharacterControllerState::Takeoff;
|
||||
case CharacterController::State::InAir:
|
||||
return Rig::CharacterControllerState::InAir;
|
||||
case CharacterController::State::Hover:
|
||||
return Rig::CharacterControllerState::Hover;
|
||||
};
|
||||
}
|
||||
|
||||
// Called within Model::simulate call, below.
|
||||
void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
||||
Head* head = _owningAvatar->getHead();
|
||||
|
||||
// make sure lookAt is not too close to face (avoid crosseyes)
|
||||
glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition();
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
|
||||
Rig::HeadParameters headParams;
|
||||
|
||||
// input action is the highest priority source for head orientation.
|
||||
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
|
||||
if (avatarHeadPose.isValid()) {
|
||||
glm::mat4 rigHeadMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
|
||||
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
|
||||
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
if (qApp->isHMDMode()) {
|
||||
// get HMD position from sensor space into world space, and back into rig space
|
||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
|
||||
glm::mat4 worldToRig = glm::inverse(rigToWorld);
|
||||
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
|
||||
_rig->computeHeadFromHMD(AnimPose(rigHMDMat), headParams.rigHeadPosition, headParams.rigHeadOrientation);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and down in desktop mode.
|
||||
// preMult 180 is necessary to convert from avatar to rig coordinates.
|
||||
// postMult 180 is necessary to convert head from -z forward to z forward.
|
||||
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
|
||||
headParams.headEnabled = false;
|
||||
}
|
||||
}
|
||||
|
||||
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
|
||||
if (avatarHipsPose.isValid()) {
|
||||
glm::mat4 rigHipsMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHipsPose.getRotation(), avatarHipsPose.getTranslation());
|
||||
headParams.hipsMatrix = rigHipsMat;
|
||||
headParams.hipsEnabled = true;
|
||||
} else {
|
||||
headParams.hipsEnabled = false;
|
||||
}
|
||||
|
||||
auto avatarSpine2Pose = myAvatar->getSpine2ControllerPoseInAvatarFrame();
|
||||
if (avatarSpine2Pose.isValid()) {
|
||||
glm::mat4 rigSpine2Mat = Matrices::Y_180 * createMatFromQuatAndPos(avatarSpine2Pose.getRotation(), avatarSpine2Pose.getTranslation());
|
||||
headParams.spine2Matrix = rigSpine2Mat;
|
||||
headParams.spine2Enabled = true;
|
||||
} else {
|
||||
headParams.spine2Enabled = false;
|
||||
}
|
||||
|
||||
headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;
|
||||
|
||||
_rig->updateFromHeadParameters(headParams, deltaTime);
|
||||
|
||||
Rig::HandAndFeetParameters handAndFeetParams;
|
||||
|
||||
auto leftPose = myAvatar->getLeftHandControllerPoseInAvatarFrame();
|
||||
if (leftPose.isValid()) {
|
||||
handAndFeetParams.isLeftEnabled = true;
|
||||
handAndFeetParams.leftPosition = Quaternions::Y_180 * leftPose.getTranslation();
|
||||
handAndFeetParams.leftOrientation = Quaternions::Y_180 * leftPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isLeftEnabled = false;
|
||||
}
|
||||
|
||||
auto rightPose = myAvatar->getRightHandControllerPoseInAvatarFrame();
|
||||
if (rightPose.isValid()) {
|
||||
handAndFeetParams.isRightEnabled = true;
|
||||
handAndFeetParams.rightPosition = Quaternions::Y_180 * rightPose.getTranslation();
|
||||
handAndFeetParams.rightOrientation = Quaternions::Y_180 * rightPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isRightEnabled = false;
|
||||
}
|
||||
|
||||
auto leftFootPose = myAvatar->getLeftFootControllerPoseInAvatarFrame();
|
||||
if (leftFootPose.isValid()) {
|
||||
handAndFeetParams.isLeftFootEnabled = true;
|
||||
handAndFeetParams.leftFootPosition = Quaternions::Y_180 * leftFootPose.getTranslation();
|
||||
handAndFeetParams.leftFootOrientation = Quaternions::Y_180 * leftFootPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isLeftFootEnabled = false;
|
||||
}
|
||||
|
||||
auto rightFootPose = myAvatar->getRightFootControllerPoseInAvatarFrame();
|
||||
if (rightFootPose.isValid()) {
|
||||
handAndFeetParams.isRightFootEnabled = true;
|
||||
handAndFeetParams.rightFootPosition = Quaternions::Y_180 * rightFootPose.getTranslation();
|
||||
handAndFeetParams.rightFootOrientation = Quaternions::Y_180 * rightFootPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isRightFootEnabled = false;
|
||||
}
|
||||
|
||||
handAndFeetParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
|
||||
handAndFeetParams.bodyCapsuleHalfHeight = myAvatar->getCharacterController()->getCapsuleHalfHeight();
|
||||
handAndFeetParams.bodyCapsuleLocalOffset = myAvatar->getCharacterController()->getCapsuleLocalOffset();
|
||||
|
||||
_rig->updateFromHandAndFeetParameters(handAndFeetParams, deltaTime);
|
||||
|
||||
Rig::CharacterControllerState ccState = convertCharacterControllerState(myAvatar->getCharacterController()->getState());
|
||||
|
||||
auto velocity = myAvatar->getLocalVelocity();
|
||||
auto position = myAvatar->getLocalPosition();
|
||||
auto orientation = myAvatar->getLocalOrientation();
|
||||
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
|
||||
|
||||
// evaluate AnimGraph animation and update jointStates.
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
|
||||
Rig::EyeParameters eyeParams;
|
||||
eyeParams.eyeLookAt = lookAt;
|
||||
eyeParams.eyeSaccade = head->getSaccade();
|
||||
eyeParams.modelRotation = getRotation();
|
||||
eyeParams.modelTranslation = getTranslation();
|
||||
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
|
||||
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
||||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
|
||||
Parent::updateRig(deltaTime, parentTransform);
|
||||
}
|
||||
|
26
interface/src/avatar/MySkeletonModel.h
Normal file
26
interface/src/avatar/MySkeletonModel.h
Normal file
|
@ -0,0 +1,26 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_MySkeletonModel_h
|
||||
#define hifi_MySkeletonModel_h
|
||||
|
||||
#include <avatars-renderer/SkeletonModel.h>
|
||||
|
||||
/// A skeleton loaded from a model.
|
||||
class MySkeletonModel : public SkeletonModel {
|
||||
Q_OBJECT
|
||||
|
||||
private:
|
||||
using Parent = SkeletonModel;
|
||||
|
||||
public:
|
||||
MySkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr, RigPointer rig = nullptr);
|
||||
void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
};
|
||||
|
||||
#endif // hifi_MySkeletonModel_h
|
|
@ -9,20 +9,21 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "DdeFaceTracker.h"
|
||||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include <QCoreApplication>
|
||||
#include <QJsonDocument>
|
||||
#include <QJsonArray>
|
||||
#include <QJsonObject>
|
||||
#include <QTimer>
|
||||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonArray>
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QtCore/QTimer>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
#include <NumericalConstants.h>
|
||||
#include <FaceshiftConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "DdeFaceTracker.h"
|
||||
#include "FaceshiftConstants.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Menu.h"
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
#ifndef hifi_DdeFaceTracker_h
|
||||
#define hifi_DdeFaceTracker_h
|
||||
|
||||
#include <QtCore/QtGlobal>
|
||||
|
||||
#if defined(Q_OS_WIN) || defined(Q_OS_OSX)
|
||||
#define HAVE_DDE
|
||||
#endif
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// Leapmotion.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/2/2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -10,10 +7,11 @@
|
|||
//
|
||||
|
||||
#include "Leapmotion.h"
|
||||
#include "Menu.h"
|
||||
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
#include "Menu.h"
|
||||
|
||||
const int PALMROOT_NUM_JOINTS = 3;
|
||||
const int FINGER_NUM_JOINTS = 4;
|
||||
const int HAND_NUM_JOINTS = FINGER_NUM_JOINTS*5+PALMROOT_NUM_JOINTS;
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// Leapmotion.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/2/2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
set(TARGET_NAME avatars-renderer)
|
||||
AUTOSCRIBE_SHADER_LIB(gpu model render render-utils)
|
||||
setup_hifi_library(Widgets Network Script)
|
||||
link_hifi_libraries(shared gpu model animation physics model-networking script-engine render render-utils)
|
||||
link_hifi_libraries(shared gpu model animation physics model-networking script-engine render image render-utils)
|
||||
|
||||
target_bullet()
|
||||
|
|
|
@ -27,16 +27,13 @@
|
|||
#include <TextRenderer3D.h>
|
||||
#include <VariantMapToScriptValue.h>
|
||||
#include <DebugDraw.h>
|
||||
#include <shared/Camera.h>
|
||||
#include <SoftAttachmentModel.h>
|
||||
|
||||
#include "AvatarMotionState.h"
|
||||
#include "Camera.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "SceneScriptingInterface.h"
|
||||
#include "SoftAttachmentModel.h"
|
||||
#include "Logging.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
const glm::vec3 DEFAULT_UP_DIRECTION(0.0f, 1.0f, 0.0f);
|
||||
const int NUM_BODY_CONE_SIDES = 9;
|
||||
const float CHAT_MESSAGE_SCALE = 0.0015f;
|
||||
const float CHAT_MESSAGE_HEIGHT = 0.1f;
|
||||
|
@ -71,6 +68,11 @@ namespace render {
|
|||
}
|
||||
}
|
||||
|
||||
bool showAvatars { true };
|
||||
void Avatar::setShowAvatars(bool render) {
|
||||
showAvatars = render;
|
||||
}
|
||||
|
||||
static bool showReceiveStats = false;
|
||||
void Avatar::setShowReceiveStats(bool receiveStats) {
|
||||
showReceiveStats = receiveStats;
|
||||
|
@ -97,25 +99,6 @@ void Avatar::setShowNamesAboveHeads(bool show) {
|
|||
}
|
||||
|
||||
Avatar::Avatar(QThread* thread, RigPointer rig) :
|
||||
AvatarData(),
|
||||
_skeletonOffset(0.0f),
|
||||
_bodyYawDelta(0.0f),
|
||||
_positionDeltaAccumulator(0.0f),
|
||||
_lastVelocity(0.0f),
|
||||
_acceleration(0.0f),
|
||||
_lastAngularVelocity(0.0f),
|
||||
_lastOrientation(),
|
||||
_worldUpDirection(DEFAULT_UP_DIRECTION),
|
||||
_moving(false),
|
||||
_smoothPositionTime(SMOOTH_TIME_POSITION),
|
||||
_smoothPositionTimer(std::numeric_limits<float>::max()),
|
||||
_smoothOrientationTime(SMOOTH_TIME_ORIENTATION),
|
||||
_smoothOrientationTimer(std::numeric_limits<float>::max()),
|
||||
_smoothPositionInitial(),
|
||||
_smoothPositionTarget(),
|
||||
_smoothOrientationInitial(),
|
||||
_smoothOrientationTarget(),
|
||||
_initialized(false),
|
||||
_voiceSphereID(GeometryCache::UNKNOWN_ID)
|
||||
{
|
||||
// we may have been created in the network thread, but we live in the main thread
|
||||
|
@ -123,12 +106,6 @@ Avatar::Avatar(QThread* thread, RigPointer rig) :
|
|||
|
||||
setScale(glm::vec3(1.0f)); // avatar scale is uniform
|
||||
|
||||
// give the pointer to our head to inherited _headData variable from AvatarData
|
||||
_headData = static_cast<HeadData*>(new Head(this));
|
||||
|
||||
_skeletonModel = std::make_shared<SkeletonModel>(this, nullptr, rig);
|
||||
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
_nameRectGeometryID = geometryCache->allocateID();
|
||||
_leftPointerGeometryID = geometryCache->allocateID();
|
||||
|
@ -283,7 +260,7 @@ void Avatar::updateAvatarEntities() {
|
|||
// and either add or update the entity.
|
||||
QJsonDocument jsonProperties = QJsonDocument::fromBinaryData(data);
|
||||
if (!jsonProperties.isObject()) {
|
||||
qCDebug(interfaceapp) << "got bad avatarEntity json" << QString(data.toHex());
|
||||
qCDebug(avatars_renderer) << "got bad avatarEntity json" << QString(data.toHex());
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -306,7 +283,7 @@ void Avatar::updateAvatarEntities() {
|
|||
// NOTE: if this avatar entity is not attached to us, strip its entity script completely...
|
||||
auto attachedScript = properties.getScript();
|
||||
if (!isMyAvatar() && !attachedScript.isEmpty()) {
|
||||
qCDebug(interfaceapp) << "removing entity script from avatar attached entity:" << entityID << "old script:" << attachedScript;
|
||||
qCDebug(avatars_renderer) << "removing entity script from avatar attached entity:" << entityID << "old script:" << attachedScript;
|
||||
QString noScript;
|
||||
properties.setScript(noScript);
|
||||
}
|
||||
|
@ -410,7 +387,7 @@ void Avatar::simulate(float deltaTime, bool inView) {
|
|||
Head* head = getHead();
|
||||
head->setPosition(headPosition);
|
||||
head->setScale(getUniformScale());
|
||||
head->simulate(deltaTime, false);
|
||||
head->simulate(deltaTime);
|
||||
} else {
|
||||
// a non-full update is still required so that the position, rotation, scale and bounds of the skeletonModel are updated.
|
||||
_skeletonModel->simulate(deltaTime, false);
|
||||
|
@ -748,12 +725,12 @@ float Avatar::getBoundingRadius() const {
|
|||
#ifdef DEBUG
|
||||
void debugValue(const QString& str, const glm::vec3& value) {
|
||||
if (glm::any(glm::isnan(value)) || glm::any(glm::isinf(value))) {
|
||||
qCWarning(interfaceapp) << "debugValue() " << str << value;
|
||||
qCWarning(avatars_renderer) << "debugValue() " << str << value;
|
||||
}
|
||||
};
|
||||
void debugValue(const QString& str, const float& value) {
|
||||
if (glm::isnan(value) || glm::isinf(value)) {
|
||||
qCWarning(interfaceapp) << "debugValue() " << str << value;
|
||||
qCWarning(avatars_renderer) << "debugValue() " << str << value;
|
||||
}
|
||||
};
|
||||
#define DEBUG_VALUE(str, value) debugValue(str, value)
|
||||
|
@ -783,7 +760,7 @@ glm::vec3 Avatar::getDisplayNamePosition() const {
|
|||
}
|
||||
|
||||
if (glm::any(glm::isnan(namePosition)) || glm::any(glm::isinf(namePosition))) {
|
||||
qCWarning(interfaceapp) << "Invalid display name position" << namePosition
|
||||
qCWarning(avatars_renderer) << "Invalid display name position" << namePosition
|
||||
<< ", setting is to (0.0f, 0.5f, 0.0f)";
|
||||
namePosition = glm::vec3(0.0f, 0.5f, 0.0f);
|
||||
}
|
||||
|
@ -1115,14 +1092,14 @@ void Avatar::setModelURLFinished(bool success) {
|
|||
const int MAX_SKELETON_DOWNLOAD_ATTEMPTS = 4; // NOTE: we don't want to be as generous as ResourceCache is, we only want 4 attempts
|
||||
if (_skeletonModel->getResourceDownloadAttemptsRemaining() <= 0 ||
|
||||
_skeletonModel->getResourceDownloadAttempts() > MAX_SKELETON_DOWNLOAD_ATTEMPTS) {
|
||||
qCWarning(interfaceapp) << "Using default after failing to load Avatar model: " << _skeletonModelURL
|
||||
qCWarning(avatars_renderer) << "Using default after failing to load Avatar model: " << _skeletonModelURL
|
||||
<< "after" << _skeletonModel->getResourceDownloadAttempts() << "attempts.";
|
||||
// call _skeletonModel.setURL, but leave our copy of _skeletonModelURL alone. This is so that
|
||||
// we don't redo this every time we receive an identity packet from the avatar with the bad url.
|
||||
QMetaObject::invokeMethod(_skeletonModel.get(), "setURL",
|
||||
Qt::QueuedConnection, Q_ARG(QUrl, AvatarData::defaultFullAvatarModelUrl()));
|
||||
} else {
|
||||
qCWarning(interfaceapp) << "Avatar model: " << _skeletonModelURL
|
||||
qCWarning(avatars_renderer) << "Avatar model: " << _skeletonModelURL
|
||||
<< "failed to load... attempts:" << _skeletonModel->getResourceDownloadAttempts()
|
||||
<< "out of:" << MAX_SKELETON_DOWNLOAD_ATTEMPTS;
|
||||
}
|
||||
|
@ -1438,7 +1415,7 @@ void Avatar::setParentID(const QUuid& parentID) {
|
|||
if (success) {
|
||||
setTransform(beforeChangeTransform, success);
|
||||
if (!success) {
|
||||
qCDebug(interfaceapp) << "Avatar::setParentID failed to reset avatar's location.";
|
||||
qCDebug(avatars_renderer) << "Avatar::setParentID failed to reset avatar's location.";
|
||||
}
|
||||
if (initialParentID != parentID) {
|
||||
_parentChanged = usecTimestampNow();
|
||||
|
@ -1456,7 +1433,7 @@ void Avatar::setParentJointIndex(quint16 parentJointIndex) {
|
|||
if (success) {
|
||||
setTransform(beforeChangeTransform, success);
|
||||
if (!success) {
|
||||
qCDebug(interfaceapp) << "Avatar::setParentJointIndex failed to reset avatar's location.";
|
||||
qCDebug(avatars_renderer) << "Avatar::setParentJointIndex failed to reset avatar's location.";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1488,7 +1465,7 @@ QList<QVariant> Avatar::getSkeleton() {
|
|||
void Avatar::addToScene(AvatarSharedPointer myHandle, const render::ScenePointer& scene) {
|
||||
if (scene) {
|
||||
auto nodelist = DependencyManager::get<NodeList>();
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()
|
||||
if (showAvatars
|
||||
&& !nodelist->isIgnoringNode(getSessionUUID())
|
||||
&& !nodelist->isRadiusIgnoringNode(getSessionUUID())) {
|
||||
render::Transaction transaction;
|
||||
|
@ -1496,7 +1473,7 @@ void Avatar::addToScene(AvatarSharedPointer myHandle, const render::ScenePointer
|
|||
scene->enqueueTransaction(transaction);
|
||||
}
|
||||
} else {
|
||||
qCWarning(interfaceapp) << "Avatar::addAvatar() : Unexpected null scene, possibly during application shutdown";
|
||||
qCWarning(avatars_renderer) << "Avatar::addAvatar() : Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
}
|
||||
|
|
@ -20,6 +20,7 @@
|
|||
#include <AvatarData.h>
|
||||
#include <ShapeInfo.h>
|
||||
#include <render/Scene.h>
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
|
||||
#include "Head.h"
|
||||
|
@ -68,6 +69,7 @@ class Avatar : public AvatarData {
|
|||
Q_PROPERTY(glm::vec3 skeletonOffset READ getSkeletonOffset WRITE setSkeletonOffset)
|
||||
|
||||
public:
|
||||
static void setShowAvatars(bool render);
|
||||
static void setShowReceiveStats(bool receiveStats);
|
||||
static void setShowMyLookAtVectors(bool showMine);
|
||||
static void setShowOtherLookAtVectors(bool showOthers);
|
||||
|
@ -77,6 +79,8 @@ public:
|
|||
explicit Avatar(QThread* thread, RigPointer rig = nullptr);
|
||||
~Avatar();
|
||||
|
||||
virtual void instantiableAvatar() = 0;
|
||||
|
||||
typedef render::Payload<AvatarData> Payload;
|
||||
typedef std::shared_ptr<render::Item::PayloadInterface> PayloadPointer;
|
||||
|
||||
|
@ -251,7 +255,6 @@ public:
|
|||
bool isInScene() const { return render::Item::isValidID(_renderItemID); }
|
||||
bool isMoving() const { return _moving; }
|
||||
|
||||
//void setMotionState(AvatarMotionState* motionState);
|
||||
void setPhysicsCallback(AvatarPhysicsCallback cb);
|
||||
void addPhysicsFlags(uint32_t flags);
|
||||
bool isInPhysicsSimulation() const { return _physicsCallback != nullptr; }
|
||||
|
@ -268,7 +271,6 @@ public slots:
|
|||
void setModelURLFinished(bool success);
|
||||
|
||||
protected:
|
||||
|
||||
const float SMOOTH_TIME_POSITION = 0.125f;
|
||||
const float SMOOTH_TIME_ORIENTATION = 0.075f;
|
||||
|
||||
|
@ -282,7 +284,7 @@ protected:
|
|||
std::vector<std::shared_ptr<Model>> _attachmentsToRemove;
|
||||
std::vector<std::shared_ptr<Model>> _attachmentsToDelete;
|
||||
|
||||
float _bodyYawDelta; // degrees/sec
|
||||
float _bodyYawDelta { 0.0f }; // degrees/sec
|
||||
|
||||
// These position histories and derivatives are in the world-frame.
|
||||
// The derivatives are the MEASURED results of all external and internal forces
|
||||
|
@ -298,9 +300,8 @@ protected:
|
|||
glm::vec3 _angularAcceleration;
|
||||
glm::quat _lastOrientation;
|
||||
|
||||
glm::vec3 _worldUpDirection;
|
||||
float _stringLength;
|
||||
bool _moving; ///< set when position is changing
|
||||
glm::vec3 _worldUpDirection { Vectors::UP };
|
||||
bool _moving { false }; ///< set when position is changing
|
||||
|
||||
// protected methods...
|
||||
bool isLookingAtMe(AvatarSharedPointer avatar) const;
|
||||
|
@ -336,10 +337,10 @@ protected:
|
|||
RateCounter<> _jointDataSimulationRate;
|
||||
|
||||
// Smoothing data for blending from one position/orientation to another on remote agents.
|
||||
float _smoothPositionTime;
|
||||
float _smoothPositionTimer;
|
||||
float _smoothOrientationTime;
|
||||
float _smoothOrientationTimer;
|
||||
float _smoothPositionTime { SMOOTH_TIME_POSITION };
|
||||
float _smoothPositionTimer { std::numeric_limits<float>::max() };
|
||||
float _smoothOrientationTime { SMOOTH_TIME_ORIENTATION };
|
||||
float _smoothOrientationTimer { std::numeric_limits<float>::max() };
|
||||
glm::vec3 _smoothPositionInitial;
|
||||
glm::vec3 _smoothPositionTarget;
|
||||
glm::quat _smoothOrientationInitial;
|
||||
|
@ -360,7 +361,7 @@ private:
|
|||
int _leftPointerGeometryID { 0 };
|
||||
int _rightPointerGeometryID { 0 };
|
||||
int _nameRectGeometryID { 0 };
|
||||
bool _initialized;
|
||||
bool _initialized { false };
|
||||
bool _isLookAtTarget { false };
|
||||
bool _isAnimatingScale { false };
|
||||
|
|
@ -9,13 +9,12 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AvatarMotionState.h"
|
||||
|
||||
#include <PhysicsCollisionGroups.h>
|
||||
#include <PhysicsEngine.h>
|
||||
#include <PhysicsHelpers.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "AvatarMotionState.h"
|
||||
#include "BulletUtil.h"
|
||||
|
||||
AvatarMotionState::AvatarMotionState(AvatarSharedPointer avatar, const btCollisionShape* shape) : ObjectMotionState(shape), _avatar(avatar) {
|
||||
assert(_avatar);
|
|
@ -15,8 +15,9 @@
|
|||
#include <QSet>
|
||||
|
||||
#include <ObjectMotionState.h>
|
||||
#include <BulletUtil.h>
|
||||
|
||||
class Avatar;
|
||||
#include "Avatar.h"
|
||||
|
||||
class AvatarMotionState : public ObjectMotionState {
|
||||
public:
|
|
@ -8,55 +8,28 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Head.h"
|
||||
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
#include <gpu/Batch.h>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "DependencyManager.h"
|
||||
#include "GeometryUtil.h"
|
||||
#include "Head.h"
|
||||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include <Rig.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
static bool fixGaze { false };
|
||||
static bool disableEyelidAdjustment { false };
|
||||
|
||||
Head::Head(Avatar* owningAvatar) :
|
||||
HeadData((AvatarData*)owningAvatar),
|
||||
_returnHeadToCenter(false),
|
||||
_position(0.0f, 0.0f, 0.0f),
|
||||
_rotation(0.0f, 0.0f, 0.0f),
|
||||
_leftEyePosition(0.0f, 0.0f, 0.0f),
|
||||
_rightEyePosition(0.0f, 0.0f, 0.0f),
|
||||
_eyePosition(0.0f, 0.0f, 0.0f),
|
||||
_scale(1.0f),
|
||||
_lastLoudness(0.0f),
|
||||
_longTermAverageLoudness(-1.0f),
|
||||
_audioAttack(0.0f),
|
||||
_audioJawOpen(0.0f),
|
||||
_trailingAudioJawOpen(0.0f),
|
||||
_mouth2(0.0f),
|
||||
_mouth3(0.0f),
|
||||
_mouth4(0.0f),
|
||||
_mouthTime(0.0f),
|
||||
_saccade(0.0f, 0.0f, 0.0f),
|
||||
_saccadeTarget(0.0f, 0.0f, 0.0f),
|
||||
_leftEyeBlinkVelocity(0.0f),
|
||||
_rightEyeBlinkVelocity(0.0f),
|
||||
_timeWithoutTalking(0.0f),
|
||||
_deltaPitch(0.0f),
|
||||
_deltaYaw(0.0f),
|
||||
_deltaRoll(0.0f),
|
||||
_isCameraMoving(false),
|
||||
_isLookingAtMe(false),
|
||||
_lookingAtMeStarted(0),
|
||||
_wasLastLookingAtMe(0),
|
||||
HeadData(owningAvatar),
|
||||
_leftEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
_rightEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID())
|
||||
{
|
||||
|
@ -69,7 +42,7 @@ void Head::reset() {
|
|||
_baseYaw = _basePitch = _baseRoll = 0.0f;
|
||||
}
|
||||
|
||||
void Head::simulate(float deltaTime, bool isMine) {
|
||||
void Head::simulate(float deltaTime) {
|
||||
const float NORMAL_HZ = 60.0f; // the update rate the constant values were tuned for
|
||||
|
||||
// grab the audio loudness from the owning avatar, if we have one
|
||||
|
@ -90,43 +63,7 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
|
||||
}
|
||||
|
||||
if (isMine) {
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
// Only use face trackers when not playing back a recording.
|
||||
if (!player->isPlaying()) {
|
||||
FaceTracker* faceTracker = qApp->getActiveFaceTracker();
|
||||
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
|
||||
if (_isFaceTrackerConnected) {
|
||||
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||
|
||||
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
|
||||
calculateMouthShapes(deltaTime);
|
||||
|
||||
const int JAW_OPEN_BLENDSHAPE = 21;
|
||||
const int MMMM_BLENDSHAPE = 34;
|
||||
const int FUNNEL_BLENDSHAPE = 40;
|
||||
const int SMILE_LEFT_BLENDSHAPE = 28;
|
||||
const int SMILE_RIGHT_BLENDSHAPE = 29;
|
||||
_blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
|
||||
_blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
|
||||
_blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
|
||||
_blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
|
||||
_blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
|
||||
}
|
||||
|
||||
applyEyelidOffset(getFinalOrientationInWorldFrame());
|
||||
}
|
||||
}
|
||||
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||
}
|
||||
}
|
||||
|
||||
if (!_isFaceTrackerConnected) {
|
||||
|
||||
if (!_isEyeTrackerConnected) {
|
||||
// Update eye saccades
|
||||
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
||||
|
@ -222,7 +159,7 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
} else {
|
||||
_saccade = glm::vec3();
|
||||
}
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::FixGaze)) { // if debug menu turns off, use no saccade
|
||||
if (fixGaze) { // if debug menu turns off, use no saccade
|
||||
_saccade = glm::vec3();
|
||||
}
|
||||
|
||||
|
@ -277,7 +214,7 @@ void Head::calculateMouthShapes(float deltaTime) {
|
|||
void Head::applyEyelidOffset(glm::quat headOrientation) {
|
||||
// Adjusts the eyelid blendshape coefficients so that the eyelid follows the iris as the head pitches.
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisableEyelidAdjustment)) {
|
||||
if (disableEyelidAdjustment) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -350,7 +287,7 @@ glm::vec3 Head::getCorrectedLookAtPosition() {
|
|||
}
|
||||
}
|
||||
|
||||
void Head::setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition) {
|
||||
void Head::setCorrectedLookAtPosition(const glm::vec3& correctedLookAtPosition) {
|
||||
if (!isLookingAtMe()) {
|
||||
_lookingAtMeStarted = usecTimestampNow();
|
||||
}
|
||||
|
@ -366,25 +303,6 @@ bool Head::isLookingAtMe() {
|
|||
return _isLookingAtMe || (now - _wasLastLookingAtMe) < LOOKING_AT_ME_GAP_ALLOWED;
|
||||
}
|
||||
|
||||
glm::quat Head::getCameraOrientation() const {
|
||||
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||
// always the same.
|
||||
if (qApp->isHMDMode()) {
|
||||
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
|
||||
if (myAvatar) {
|
||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
||||
} else {
|
||||
return getOrientation();
|
||||
}
|
||||
} else {
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
}
|
||||
|
||||
glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
|
||||
glm::quat orientation = getOrientation();
|
||||
glm::vec3 lookAtDelta = _lookAtPosition - eyePosition;
|
|
@ -11,16 +11,10 @@
|
|||
#ifndef hifi_Head_h
|
||||
#define hifi_Head_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include <HeadData.h>
|
||||
|
||||
#include "world.h"
|
||||
|
||||
|
||||
const float EYE_EAR_GAP = 0.08f;
|
||||
|
||||
class Avatar;
|
||||
|
@ -31,9 +25,9 @@ public:
|
|||
|
||||
void init();
|
||||
void reset();
|
||||
void simulate(float deltaTime, bool isMine);
|
||||
virtual void simulate(float deltaTime);
|
||||
void setScale(float scale);
|
||||
void setPosition(glm::vec3 position) { _position = position; }
|
||||
void setPosition(const glm::vec3& position) { _position = position; }
|
||||
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
|
||||
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
|
||||
|
||||
|
@ -43,17 +37,14 @@ public:
|
|||
/// \return orientationBody * (orientationBase+Delta)
|
||||
glm::quat getFinalOrientationInWorldFrame() const;
|
||||
|
||||
/// \return orientationBody * orientationBasePitch
|
||||
glm::quat getCameraOrientation () const;
|
||||
|
||||
void setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition);
|
||||
void setCorrectedLookAtPosition(const glm::vec3& correctedLookAtPosition);
|
||||
glm::vec3 getCorrectedLookAtPosition();
|
||||
void clearCorrectedLookAtPosition() { _isLookingAtMe = false; }
|
||||
bool isLookingAtMe();
|
||||
quint64 getLookingAtMeStarted() { return _lookingAtMeStarted; }
|
||||
|
||||
float getScale() const { return _scale; }
|
||||
glm::vec3 getPosition() const { return _position; }
|
||||
const glm::vec3& getPosition() const { return _position; }
|
||||
const glm::vec3& getEyePosition() const { return _eyePosition; }
|
||||
const glm::vec3& getSaccade() const { return _saccade; }
|
||||
glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
|
@ -91,46 +82,46 @@ public:
|
|||
|
||||
float getTimeWithoutTalking() const { return _timeWithoutTalking; }
|
||||
|
||||
private:
|
||||
protected:
|
||||
glm::vec3 calculateAverageEyePosition() const { return _leftEyePosition + (_rightEyePosition - _leftEyePosition ) * 0.5f; }
|
||||
|
||||
// disallow copies of the Head, copy of owning Avatar is disallowed too
|
||||
Head(const Head&);
|
||||
Head& operator= (const Head&);
|
||||
|
||||
bool _returnHeadToCenter;
|
||||
bool _returnHeadToCenter { false };
|
||||
glm::vec3 _position;
|
||||
glm::vec3 _rotation;
|
||||
glm::vec3 _leftEyePosition;
|
||||
glm::vec3 _rightEyePosition;
|
||||
glm::vec3 _eyePosition;
|
||||
|
||||
float _scale;
|
||||
float _lastLoudness;
|
||||
float _longTermAverageLoudness;
|
||||
float _audioAttack;
|
||||
float _audioJawOpen;
|
||||
float _trailingAudioJawOpen;
|
||||
float _mouth2;
|
||||
float _mouth3;
|
||||
float _mouth4;
|
||||
float _mouthTime;
|
||||
float _scale { 1.0f };
|
||||
float _lastLoudness { 0.0f };
|
||||
float _longTermAverageLoudness { -1.0f };
|
||||
float _audioAttack { 0.0f };
|
||||
float _audioJawOpen { 0.0f };
|
||||
float _trailingAudioJawOpen { 0.0f };
|
||||
float _mouth2 { 0.0f };
|
||||
float _mouth3 { 0.0f };
|
||||
float _mouth4 { 0.0f };
|
||||
float _mouthTime { 0.0f };
|
||||
|
||||
glm::vec3 _saccade;
|
||||
glm::vec3 _saccadeTarget;
|
||||
float _leftEyeBlinkVelocity;
|
||||
float _rightEyeBlinkVelocity;
|
||||
float _timeWithoutTalking;
|
||||
float _leftEyeBlinkVelocity { 0.0f };
|
||||
float _rightEyeBlinkVelocity { 0.0f };
|
||||
float _timeWithoutTalking { 0.0f };
|
||||
|
||||
// delta angles for local head rotation (driven by hardware input)
|
||||
float _deltaPitch;
|
||||
float _deltaYaw;
|
||||
float _deltaRoll;
|
||||
float _deltaPitch { 0.0f };
|
||||
float _deltaYaw { 0.0f };
|
||||
float _deltaRoll { 0.0f };
|
||||
|
||||
bool _isCameraMoving;
|
||||
bool _isLookingAtMe;
|
||||
quint64 _lookingAtMeStarted;
|
||||
quint64 _wasLastLookingAtMe;
|
||||
bool _isCameraMoving { false };
|
||||
bool _isLookingAtMe { false };
|
||||
quint64 _lookingAtMeStarted { 0 };
|
||||
quint64 _wasLastLookingAtMe { 0 };
|
||||
|
||||
glm::vec3 _correctedLookAtPosition;
|
||||
|
|
@ -6,6 +6,6 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AvatarsRendererLogging.h"
|
||||
#include "Logging.h"
|
||||
|
||||
Q_LOGGING_CATEGORY(avatars_renderer, "hifi.avatars.rendering")
|
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "OtherAvatar.h"
|
||||
|
||||
OtherAvatar::OtherAvatar(QThread* thread, RigPointer rig) : Avatar(thread, rig) {
|
||||
// give the pointer to our head to inherited _headData variable from AvatarData
|
||||
_headData = new Head(this);
|
||||
_skeletonModel = std::make_shared<SkeletonModel>(this, nullptr, rig);
|
||||
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OtherAvatar_h
|
||||
#define hifi_OtherAvatar_h
|
||||
|
||||
#include "Avatar.h"
|
||||
|
||||
class OtherAvatar : public Avatar {
|
||||
public:
|
||||
explicit OtherAvatar(QThread* thread, RigPointer rig = nullptr);
|
||||
void instantiableAvatar() {};
|
||||
};
|
||||
|
||||
#endif // hifi_OtherAvatar_h
|
|
@ -9,19 +9,18 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "SkeletonModel.h"
|
||||
|
||||
#include <glm/gtx/transform.hpp>
|
||||
#include <QMultiMap>
|
||||
|
||||
#include <recording/Deck.h>
|
||||
#include <DebugDraw.h>
|
||||
#include <AnimDebugDraw.h>
|
||||
#include <CharacterController.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "Menu.h"
|
||||
#include "SkeletonModel.h"
|
||||
#include "Util.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "AnimDebugDraw.h"
|
||||
#include "Logging.h"
|
||||
|
||||
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer rig) :
|
||||
CauterizedModel(rig, parent),
|
||||
|
@ -47,7 +46,7 @@ void SkeletonModel::initJointStates() {
|
|||
// Determine the default eye position for avatar scale = 1.0
|
||||
int headJointIndex = geometry.headJointIndex;
|
||||
if (0 > headJointIndex || headJointIndex >= _rig->getJointStateCount()) {
|
||||
qCWarning(interfaceapp) << "Bad head joint! Got:" << headJointIndex << "jointCount:" << _rig->getJointStateCount();
|
||||
qCWarning(avatars_renderer) << "Bad head joint! Got:" << headJointIndex << "jointCount:" << _rig->getJointStateCount();
|
||||
}
|
||||
glm::vec3 leftEyePosition, rightEyePosition;
|
||||
getEyeModelPositions(leftEyePosition, rightEyePosition);
|
||||
|
@ -72,21 +71,6 @@ void SkeletonModel::initJointStates() {
|
|||
emit skeletonLoaded();
|
||||
}
|
||||
|
||||
Rig::CharacterControllerState convertCharacterControllerState(CharacterController::State state) {
|
||||
switch (state) {
|
||||
default:
|
||||
case CharacterController::State::Ground:
|
||||
return Rig::CharacterControllerState::Ground;
|
||||
case CharacterController::State::Takeoff:
|
||||
return Rig::CharacterControllerState::Takeoff;
|
||||
case CharacterController::State::InAir:
|
||||
return Rig::CharacterControllerState::InAir;
|
||||
case CharacterController::State::Hover:
|
||||
return Rig::CharacterControllerState::Hover;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// Called within Model::simulate call, below.
|
||||
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
@ -102,122 +86,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
|
||||
}
|
||||
|
||||
if (_owningAvatar->isMyAvatar()) {
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
|
||||
Rig::HeadParameters headParams;
|
||||
|
||||
// input action is the highest priority source for head orientation.
|
||||
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
|
||||
if (avatarHeadPose.isValid()) {
|
||||
glm::mat4 rigHeadMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
|
||||
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
|
||||
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
if (qApp->isHMDMode()) {
|
||||
// get HMD position from sensor space into world space, and back into rig space
|
||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
|
||||
glm::mat4 worldToRig = glm::inverse(rigToWorld);
|
||||
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
|
||||
_rig->computeHeadFromHMD(AnimPose(rigHMDMat), headParams.rigHeadPosition, headParams.rigHeadOrientation);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and down in desktop mode.
|
||||
// preMult 180 is necessary to convert from avatar to rig coordinates.
|
||||
// postMult 180 is necessary to convert head from -z forward to z forward.
|
||||
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
|
||||
headParams.headEnabled = false;
|
||||
}
|
||||
}
|
||||
|
||||
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
|
||||
if (avatarHipsPose.isValid()) {
|
||||
glm::mat4 rigHipsMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHipsPose.getRotation(), avatarHipsPose.getTranslation());
|
||||
headParams.hipsMatrix = rigHipsMat;
|
||||
headParams.hipsEnabled = true;
|
||||
} else {
|
||||
headParams.hipsEnabled = false;
|
||||
}
|
||||
|
||||
auto avatarSpine2Pose = myAvatar->getSpine2ControllerPoseInAvatarFrame();
|
||||
if (avatarSpine2Pose.isValid()) {
|
||||
glm::mat4 rigSpine2Mat = Matrices::Y_180 * createMatFromQuatAndPos(avatarSpine2Pose.getRotation(), avatarSpine2Pose.getTranslation());
|
||||
headParams.spine2Matrix = rigSpine2Mat;
|
||||
headParams.spine2Enabled = true;
|
||||
} else {
|
||||
headParams.spine2Enabled = false;
|
||||
}
|
||||
|
||||
headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;
|
||||
|
||||
_rig->updateFromHeadParameters(headParams, deltaTime);
|
||||
|
||||
Rig::HandAndFeetParameters handAndFeetParams;
|
||||
|
||||
auto leftPose = myAvatar->getLeftHandControllerPoseInAvatarFrame();
|
||||
if (leftPose.isValid()) {
|
||||
handAndFeetParams.isLeftEnabled = true;
|
||||
handAndFeetParams.leftPosition = Quaternions::Y_180 * leftPose.getTranslation();
|
||||
handAndFeetParams.leftOrientation = Quaternions::Y_180 * leftPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isLeftEnabled = false;
|
||||
}
|
||||
|
||||
auto rightPose = myAvatar->getRightHandControllerPoseInAvatarFrame();
|
||||
if (rightPose.isValid()) {
|
||||
handAndFeetParams.isRightEnabled = true;
|
||||
handAndFeetParams.rightPosition = Quaternions::Y_180 * rightPose.getTranslation();
|
||||
handAndFeetParams.rightOrientation = Quaternions::Y_180 * rightPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isRightEnabled = false;
|
||||
}
|
||||
|
||||
auto leftFootPose = myAvatar->getLeftFootControllerPoseInAvatarFrame();
|
||||
if (leftFootPose.isValid()) {
|
||||
handAndFeetParams.isLeftFootEnabled = true;
|
||||
handAndFeetParams.leftFootPosition = Quaternions::Y_180 * leftFootPose.getTranslation();
|
||||
handAndFeetParams.leftFootOrientation = Quaternions::Y_180 * leftFootPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isLeftFootEnabled = false;
|
||||
}
|
||||
|
||||
auto rightFootPose = myAvatar->getRightFootControllerPoseInAvatarFrame();
|
||||
if (rightFootPose.isValid()) {
|
||||
handAndFeetParams.isRightFootEnabled = true;
|
||||
handAndFeetParams.rightFootPosition = Quaternions::Y_180 * rightFootPose.getTranslation();
|
||||
handAndFeetParams.rightFootOrientation = Quaternions::Y_180 * rightFootPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isRightFootEnabled = false;
|
||||
}
|
||||
|
||||
handAndFeetParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
|
||||
handAndFeetParams.bodyCapsuleHalfHeight = myAvatar->getCharacterController()->getCapsuleHalfHeight();
|
||||
handAndFeetParams.bodyCapsuleLocalOffset = myAvatar->getCharacterController()->getCapsuleLocalOffset();
|
||||
|
||||
_rig->updateFromHandAndFeetParameters(handAndFeetParams, deltaTime);
|
||||
|
||||
Rig::CharacterControllerState ccState = convertCharacterControllerState(myAvatar->getCharacterController()->getState());
|
||||
|
||||
auto velocity = myAvatar->getLocalVelocity();
|
||||
auto position = myAvatar->getLocalPosition();
|
||||
auto orientation = myAvatar->getLocalOrientation();
|
||||
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
|
||||
|
||||
// evaluate AnimGraph animation and update jointStates.
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
|
||||
Rig::EyeParameters eyeParams;
|
||||
eyeParams.eyeLookAt = lookAt;
|
||||
eyeParams.eyeSaccade = head->getSaccade();
|
||||
eyeParams.modelRotation = getRotation();
|
||||
eyeParams.modelTranslation = getTranslation();
|
||||
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
|
||||
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
||||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
} else {
|
||||
if (!_owningAvatar->isMyAvatar()) {
|
||||
// no need to call Model::updateRig() because otherAvatars get their joint state
|
||||
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
|
||||
_needsUpdateClusterMatrices = true;
|
||||
|
@ -249,6 +118,9 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
}
|
||||
|
||||
// evaluate AnimGraph animation and update jointStates.
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
}
|
||||
|
||||
void SkeletonModel::updateAttitude() {
|
|
@ -114,7 +114,7 @@ protected:
|
|||
|
||||
void computeBoundingShape();
|
||||
|
||||
private:
|
||||
protected:
|
||||
|
||||
bool getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
|
||||
|
|
@ -94,7 +94,7 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
|||
// +-----+-----+-+-+-+--+
|
||||
// Key state - K0,K1 is found in the 1st and 2nd bits
|
||||
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
|
||||
// Faceshift - F is found in the 5th bit
|
||||
// Face tracker - F is found in the 5th bit
|
||||
// Eye tracker - E is found in the 6th bit
|
||||
// Referential Data - R is found in the 7th bit
|
||||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
|
||||
|
@ -123,7 +123,7 @@ namespace AvatarDataPacket {
|
|||
// it might be nice to use a dictionary to compress that
|
||||
|
||||
// Packet State Flags - we store the details about the existence of other records in this bitset:
|
||||
// AvatarGlobalPosition, Avatar Faceshift, eye tracking, and existence of
|
||||
// AvatarGlobalPosition, Avatar face tracker, eye tracking, and existence of
|
||||
using HasFlags = uint16_t;
|
||||
const HasFlags PACKET_HAS_AVATAR_GLOBAL_POSITION = 1U << 0;
|
||||
const HasFlags PACKET_HAS_AVATAR_BOUNDING_BOX = 1U << 1;
|
||||
|
|
|
@ -23,11 +23,6 @@
|
|||
|
||||
#include "AvatarData.h"
|
||||
|
||||
/// The names of the blendshapes expected by Faceshift, terminated with an empty string.
|
||||
extern const char* FACESHIFT_BLENDSHAPES[];
|
||||
/// The size of FACESHIFT_BLENDSHAPES
|
||||
extern const int NUM_FACESHIFT_BLENDSHAPES;
|
||||
|
||||
HeadData::HeadData(AvatarData* owningAvatar) :
|
||||
_baseYaw(0.0f),
|
||||
_basePitch(0.0f),
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "SkeletonModel.h"
|
||||
#include "CauterizedModel.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
|
@ -29,7 +29,7 @@ void CauterizedMeshPartPayload::updateTransformForCauterizedMesh(
|
|||
|
||||
void CauterizedMeshPartPayload::bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const {
|
||||
// Still relying on the raw data from the model
|
||||
SkeletonModel* skeleton = static_cast<SkeletonModel*>(_model);
|
||||
CauterizedModel* skeleton = static_cast<CauterizedModel*>(_model);
|
||||
bool useCauterizedMesh = (renderMode != RenderArgs::RenderMode::SHADOW_RENDER_MODE) && skeleton->getEnableCauterization();
|
||||
|
||||
if (useCauterizedMesh) {
|
|
@ -1,9 +1,6 @@
|
|||
//
|
||||
// CauterizedModelMeshPartPayload.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by AndrewMeadows 2017.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
@ -12,7 +9,7 @@
|
|||
#ifndef hifi_CauterizedMeshPartPayload_h
|
||||
#define hifi_CauterizedMeshPartPayload_h
|
||||
|
||||
#include <MeshPartPayload.h>
|
||||
#include "MeshPartPayload.h"
|
||||
|
||||
class CauterizedMeshPartPayload : public ModelMeshPartPayload {
|
||||
public:
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// CauterizedModel.cpp
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Andrew Meadows 2017.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -11,10 +8,10 @@
|
|||
|
||||
#include "CauterizedModel.h"
|
||||
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include <MeshPartPayload.h>
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "AbstractViewStateInterface.h"
|
||||
#include "MeshPartPayload.h"
|
||||
#include "CauterizedMeshPartPayload.h"
|
||||
#include "RenderUtilsLogging.h"
|
||||
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// CauterizeableModel.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Andrew Meadows 2016.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -13,7 +10,7 @@
|
|||
#define hifi_CauterizedModel_h
|
||||
|
||||
|
||||
#include <Model.h>
|
||||
#include "Model.h"
|
||||
|
||||
class CauterizedModel : public Model {
|
||||
Q_OBJECT
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// SoftAttachmentModel.cpp
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Anthony J. Thibault on 12/17/15.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -10,7 +7,6 @@
|
|||
//
|
||||
|
||||
#include "SoftAttachmentModel.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
SoftAttachmentModel::SoftAttachmentModel(RigPointer rig, QObject* parent, RigPointer rigOverride) :
|
||||
CauterizedModel(rig, parent),
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// SoftAttachmentModel.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Anthony J. Thibault on 12/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
|
@ -582,3 +582,9 @@ glm::mat4 orthoInverse(const glm::mat4& m) {
|
|||
r[3][3] = 1.0f;
|
||||
return r;
|
||||
}
|
||||
|
||||
// Return a random vector of average length 1
|
||||
glm::vec3 randVector() {
|
||||
return glm::vec3(randFloat() - 0.5f, randFloat() - 0.5f, randFloat() - 0.5f) * 2.0f;
|
||||
}
|
||||
|
||||
|
|
|
@ -252,6 +252,9 @@ inline bool isNaN(const glm::quat& value) { return isNaN(value.w) || isNaN(value
|
|||
|
||||
glm::mat4 orthoInverse(const glm::mat4& m);
|
||||
|
||||
// Return a random vector of average length 1
|
||||
glm::vec3 randVector();
|
||||
|
||||
//
|
||||
// Safe replacement of glm_mat4_mul() for unaligned arguments instead of __m128
|
||||
//
|
||||
|
|
|
@ -8,16 +8,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
#include <EventTypes.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Camera.h"
|
||||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
|
||||
|
||||
CameraMode stringToMode(const QString& mode) {
|
||||
if (mode == "third person") {
|
||||
|
@ -102,35 +93,9 @@ void Camera::setProjection(const glm::mat4& projection) {
|
|||
_projection = projection;
|
||||
}
|
||||
|
||||
PickRay Camera::computePickRay(float x, float y) {
|
||||
return qApp->computePickRay(x, y);
|
||||
}
|
||||
|
||||
void Camera::setModeString(const QString& mode) {
|
||||
CameraMode targetMode = stringToMode(mode);
|
||||
|
||||
switch (targetMode) {
|
||||
case CAMERA_MODE_FIRST_PERSON:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, true);
|
||||
break;
|
||||
case CAMERA_MODE_THIRD_PERSON:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, true);
|
||||
break;
|
||||
case CAMERA_MODE_MIRROR:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FullscreenMirror, true);
|
||||
break;
|
||||
case CAMERA_MODE_INDEPENDENT:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::IndependentMode, true);
|
||||
break;
|
||||
case CAMERA_MODE_ENTITY:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::CameraEntityMode, true);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
qApp->cameraMenuChanged();
|
||||
|
||||
if (_mode != targetMode) {
|
||||
setMode(targetMode);
|
||||
}
|
|
@ -11,11 +11,9 @@
|
|||
#ifndef hifi_Camera_h
|
||||
#define hifi_Camera_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include "../GLMHelpers.h"
|
||||
#include "../RegisteredMetaTypes.h"
|
||||
#include "../ViewFrustum.h"
|
||||
|
||||
enum CameraMode
|
||||
{
|
||||
|
@ -87,7 +85,7 @@ public slots:
|
|||
* @param {float} y Y-coordinate on screen.
|
||||
* @return {PickRay} The computed {PickRay}.
|
||||
*/
|
||||
PickRay computePickRay(float x, float y);
|
||||
virtual PickRay computePickRay(float x, float y) const = 0;
|
||||
|
||||
/**jsdoc
|
||||
* Set the camera to look at position <code>position</code>. Only works while in <code>independent</code>.
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
class Camera {
|
||||
class SimpleCamera {
|
||||
protected:
|
||||
float fov { 60.0f };
|
||||
float znear { DEFAULT_NEAR_CLIP }, zfar { DEFAULT_FAR_CLIP };
|
||||
|
@ -42,7 +42,7 @@ public:
|
|||
|
||||
std::bitset<KEYS_SIZE> keys;
|
||||
|
||||
Camera() {
|
||||
SimpleCamera() {
|
||||
matrices.perspective = glm::perspective(glm::radians(fov), aspect, znear, zfar);
|
||||
}
|
||||
|
||||
|
|
|
@ -109,7 +109,7 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
class QWindowCamera : public Camera {
|
||||
class QWindowCamera : public SimpleCamera {
|
||||
Key forKey(int key) {
|
||||
switch (key) {
|
||||
case Qt::Key_W: return FORWARD;
|
||||
|
@ -1067,7 +1067,7 @@ private:
|
|||
}
|
||||
|
||||
void cycleMode() {
|
||||
static auto defaultProjection = Camera().matrices.perspective;
|
||||
static auto defaultProjection = SimpleCamera().matrices.perspective;
|
||||
_renderMode = (RenderMode)((_renderMode + 1) % RENDER_MODE_COUNT);
|
||||
if (_renderMode == HMD) {
|
||||
_camera.matrices.perspective[0] = vec4 { 0.759056330, 0.000000000, 0.000000000, 0.000000000 };
|
||||
|
|
Loading…
Reference in a new issue