Merge pull request #9745 from AndrewMeadows/faster-avatar-updates

Faster avatar updates
This commit is contained in:
Andrew Meadows 2017-02-28 10:55:21 -08:00 committed by GitHub
commit c6730f7c58
9 changed files with 103 additions and 113 deletions

View file

@ -168,7 +168,6 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
QList<AvatarSharedPointer> avatarList;
std::unordered_map<AvatarSharedPointer, SharedNodePointer> avatarDataToNodes;
int listItem = 0;
std::for_each(_begin, _end, [&](const SharedNodePointer& otherNode) {
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
@ -176,7 +175,6 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
// but not have yet sent data that's linked to the node. Check for that case and don't
// consider those nodes.
if (otherNodeData) {
listItem++;
AvatarSharedPointer otherAvatar = otherNodeData->getAvatarSharedPointer();
avatarList << otherAvatar;
avatarDataToNodes[otherAvatar] = otherNode;
@ -185,8 +183,8 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
AvatarSharedPointer thisAvatar = nodeData->getAvatarSharedPointer();
ViewFrustum cameraView = nodeData->getViewFrustom();
std::priority_queue<AvatarPriority> sortedAvatars = AvatarData::sortAvatars(
avatarList, cameraView,
std::priority_queue<AvatarPriority> sortedAvatars;
AvatarData::sortAvatars(avatarList, cameraView, sortedAvatars,
[&](AvatarSharedPointer avatar)->uint64_t{
auto avatarNode = avatarDataToNodes[avatar];

View file

@ -351,7 +351,6 @@ void Avatar::simulate(float deltaTime, bool inView) {
_jointDataSimulationRate.increment();
_skeletonModel->simulate(deltaTime, true);
_skeletonModelSimulationRate.increment();
locationChanged(); // joints changed, so if there are any children, update them.
_hasNewJointData = false;
@ -367,8 +366,8 @@ void Avatar::simulate(float deltaTime, bool inView) {
} else {
// a non-full update is still required so that the position, rotation, scale and bounds of the skeletonModel are updated.
_skeletonModel->simulate(deltaTime, false);
_skeletonModelSimulationRate.increment();
}
_skeletonModelSimulationRate.increment();
}
// update animation for display name fade in/out

View file

@ -157,15 +157,14 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
lock.unlock();
PerformanceTimer perfTimer("otherAvatars");
uint64_t startTime = usecTimestampNow();
auto avatarMap = getHashCopy();
QList<AvatarSharedPointer> avatarList = avatarMap.values();
ViewFrustum cameraView;
qApp->copyDisplayViewFrustum(cameraView);
std::priority_queue<AvatarPriority> sortedAvatars = AvatarData::sortAvatars(
avatarList, cameraView,
std::priority_queue<AvatarPriority> sortedAvatars;
AvatarData::sortAvatars(avatarList, cameraView, sortedAvatars,
[](AvatarSharedPointer avatar)->uint64_t{
return std::static_pointer_cast<Avatar>(avatar)->getLastRenderUpdateTime();
@ -194,10 +193,9 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
});
render::PendingChanges pendingChanges;
const uint64_t RENDER_UPDATE_BUDGET = 1500; // usec
const uint64_t MAX_UPDATE_BUDGET = 2000; // usec
uint64_t renderExpiry = startTime + RENDER_UPDATE_BUDGET;
uint64_t maxExpiry = startTime + MAX_UPDATE_BUDGET;
uint64_t startTime = usecTimestampNow();
const uint64_t UPDATE_BUDGET = 2000; // usec
uint64_t updateExpiry = startTime + UPDATE_BUDGET;
int numAvatarsUpdated = 0;
int numAVatarsNotUpdated = 0;
@ -223,7 +221,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
const float OUT_OF_VIEW_THRESHOLD = 0.5f * AvatarData::OUT_OF_VIEW_PENALTY;
uint64_t now = usecTimestampNow();
if (now < renderExpiry) {
if (now < updateExpiry) {
// we're within budget
bool inView = sortData.priority > OUT_OF_VIEW_THRESHOLD;
if (inView && avatar->hasNewJointData()) {
@ -232,21 +230,13 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
avatar->simulate(deltaTime, inView);
avatar->updateRenderItem(pendingChanges);
avatar->setLastRenderUpdateTime(startTime);
} else if (now < maxExpiry) {
// we've spent most of our time budget, but we still simulate() the avatar as it if were out of view
// --> some avatars may freeze until their priority trickles up
bool inView = sortData.priority > OUT_OF_VIEW_THRESHOLD;
if (inView && avatar->hasNewJointData()) {
numAVatarsNotUpdated++;
}
avatar->simulate(deltaTime, false);
} else {
// we've spent ALL of our time budget --> bail on the rest of the avatar updates
// we've spent our full time budget --> bail on the rest of the avatar updates
// --> more avatars may freeze until their priority trickles up
// --> some scale or fade animations may glitch
// --> some avatar velocity measurements may be a little off
// HACK: no time simulate, but we will take the time to count how many were tragically missed
// no time simulate, but we take the time to count how many were tragically missed
bool inView = sortData.priority > OUT_OF_VIEW_THRESHOLD;
if (!inView) {
break;

View file

@ -95,12 +95,6 @@ void CauterizedModel::createCollisionRenderItemSet() {
Model::createCollisionRenderItemSet();
}
// Called within Model::simulate call, below.
void CauterizedModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Model::updateRig(deltaTime, parentTransform);
_needsUpdateClusterMatrices = true;
}
void CauterizedModel::updateClusterMatrices() {
PerformanceTimer perfTimer("CauterizedModel::updateClusterMatrices");

View file

@ -37,7 +37,6 @@ public:
void createVisibleRenderItemSet() override;
void createCollisionRenderItemSet() override;
virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
virtual void updateClusterMatrices() override;
void updateRenderItems() override;

View file

@ -166,7 +166,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
// evaluate AnimGraph animation and update jointStates.
CauterizedModel::updateRig(deltaTime, parentTransform);
Model::updateRig(deltaTime, parentTransform);
Rig::EyeParameters eyeParams;
eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
@ -179,7 +179,9 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
_rig->updateFromEyeParameters(eyeParams);
} else {
CauterizedModel::updateRig(deltaTime, parentTransform);
// no need to call Model::updateRig() because otherAvatars get their joint state
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
_needsUpdateClusterMatrices = true;
// This is a little more work than we really want.
//

View file

@ -1307,39 +1307,50 @@ void Rig::copyJointsIntoJointData(QVector<JointData>& jointDataVec) const {
void Rig::copyJointsFromJointData(const QVector<JointData>& jointDataVec) {
PerformanceTimer perfTimer("copyJoints");
PROFILE_RANGE(simulation_animation_detail, "copyJoints");
if (_animSkeleton && jointDataVec.size() == (int)_internalPoseSet._relativePoses.size()) {
// make a vector of rotations in absolute-geometry-frame
const AnimPoseVec& absoluteDefaultPoses = _animSkeleton->getAbsoluteDefaultPoses();
std::vector<glm::quat> rotations;
rotations.reserve(absoluteDefaultPoses.size());
const glm::quat rigToGeometryRot(glmExtractRotation(_rigToGeometryTransform));
for (int i = 0; i < jointDataVec.size(); i++) {
const JointData& data = jointDataVec.at(i);
if (data.rotationSet) {
// JointData rotations are in absolute rig-frame so we rotate them to absolute geometry-frame
rotations.push_back(rigToGeometryRot * data.rotation);
} else {
rotations.push_back(absoluteDefaultPoses[i].rot());
}
}
if (!_animSkeleton) {
return;
}
if (jointDataVec.size() != (int)_internalPoseSet._relativePoses.size()) {
// animations haven't fully loaded yet.
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
}
// convert rotations from absolute to parent relative.
_animSkeleton->convertAbsoluteRotationsToRelative(rotations);
// store new relative poses
const AnimPoseVec& relativeDefaultPoses = _animSkeleton->getRelativeDefaultPoses();
for (int i = 0; i < jointDataVec.size(); i++) {
const JointData& data = jointDataVec.at(i);
_internalPoseSet._relativePoses[i].scale() = Vectors::ONE;
_internalPoseSet._relativePoses[i].rot() = rotations[i];
if (data.translationSet) {
// JointData translations are in scaled relative-frame so we scale back to regular relative-frame
_internalPoseSet._relativePoses[i].trans() = _invGeometryOffset.scale() * data.translation;
} else {
_internalPoseSet._relativePoses[i].trans() = relativeDefaultPoses[i].trans();
}
// make a vector of rotations in absolute-geometry-frame
const AnimPoseVec& absoluteDefaultPoses = _animSkeleton->getAbsoluteDefaultPoses();
std::vector<glm::quat> rotations;
rotations.reserve(absoluteDefaultPoses.size());
const glm::quat rigToGeometryRot(glmExtractRotation(_rigToGeometryTransform));
for (int i = 0; i < jointDataVec.size(); i++) {
const JointData& data = jointDataVec.at(i);
if (data.rotationSet) {
// JointData rotations are in absolute rig-frame so we rotate them to absolute geometry-frame
rotations.push_back(rigToGeometryRot * data.rotation);
} else {
rotations.push_back(absoluteDefaultPoses[i].rot());
}
}
// convert rotations from absolute to parent relative.
_animSkeleton->convertAbsoluteRotationsToRelative(rotations);
// store new relative poses
const AnimPoseVec& relativeDefaultPoses = _animSkeleton->getRelativeDefaultPoses();
for (int i = 0; i < jointDataVec.size(); i++) {
const JointData& data = jointDataVec.at(i);
_internalPoseSet._relativePoses[i].scale() = Vectors::ONE;
_internalPoseSet._relativePoses[i].rot() = rotations[i];
if (data.translationSet) {
// JointData translations are in scaled relative-frame so we scale back to regular relative-frame
_internalPoseSet._relativePoses[i].trans() = _invGeometryOffset.scale() * data.translation;
} else {
_internalPoseSet._relativePoses[i].trans() = relativeDefaultPoses[i].trans();
}
}
// build absolute poses and copy to externalPoseSet
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
QWriteLocker writeLock(&_externalPoseSetLock);
_externalPoseSet = _internalPoseSet;
}
void Rig::computeAvatarBoundingCapsule(

View file

@ -2324,61 +2324,57 @@ float AvatarData::_avatarSortCoefficientSize { 0.5f };
float AvatarData::_avatarSortCoefficientCenter { 0.25 };
float AvatarData::_avatarSortCoefficientAge { 1.0f };
std::priority_queue<AvatarPriority> AvatarData::sortAvatars(
QList<AvatarSharedPointer> avatarList,
const ViewFrustum& cameraView,
std::function<uint64_t(AvatarSharedPointer)> getLastUpdated,
std::function<float(AvatarSharedPointer)> getBoundingRadius,
std::function<bool(AvatarSharedPointer)> shouldIgnore) {
void AvatarData::sortAvatars(
QList<AvatarSharedPointer> avatarList,
const ViewFrustum& cameraView,
std::priority_queue<AvatarPriority>& sortedAvatarsOut,
std::function<uint64_t(AvatarSharedPointer)> getLastUpdated,
std::function<float(AvatarSharedPointer)> getBoundingRadius,
std::function<bool(AvatarSharedPointer)> shouldIgnore) {
uint64_t startTime = usecTimestampNow();
PROFILE_RANGE(simulation, "sort");
uint64_t now = usecTimestampNow();
glm::vec3 frustumCenter = cameraView.getPosition();
const glm::vec3& forward = cameraView.getDirection();
for (int32_t i = 0; i < avatarList.size(); ++i) {
const auto& avatar = avatarList.at(i);
std::priority_queue<AvatarPriority> sortedAvatars;
{
PROFILE_RANGE(simulation, "sort");
for (int32_t i = 0; i < avatarList.size(); ++i) {
const auto& avatar = avatarList.at(i);
if (shouldIgnore(avatar)) {
continue;
}
// priority = weighted linear combination of:
// (a) apparentSize
// (b) proximity to center of view
// (c) time since last update
glm::vec3 avatarPosition = avatar->getPosition();
glm::vec3 offset = avatarPosition - frustumCenter;
float distance = glm::length(offset) + 0.001f; // add 1mm to avoid divide by zero
// FIXME - AvatarData has something equivolent to this
float radius = getBoundingRadius(avatar);
const glm::vec3& forward = cameraView.getDirection();
float apparentSize = 2.0f * radius / distance;
float cosineAngle = glm::length(glm::dot(offset, forward) * forward) / distance;
float age = (float)(startTime - getLastUpdated(avatar)) / (float)(USECS_PER_SECOND);
// NOTE: we are adding values of different units to get a single measure of "priority".
// Thus we multiply each component by a conversion "weight" that scales its units relative to the others.
// These weights are pure magic tuning and should be hard coded in the relation below,
// but are currently exposed for anyone who would like to explore fine tuning:
float priority = _avatarSortCoefficientSize * apparentSize
+ _avatarSortCoefficientCenter * cosineAngle
+ _avatarSortCoefficientAge * age;
// decrement priority of avatars outside keyhole
if (distance > cameraView.getCenterRadius()) {
if (!cameraView.sphereIntersectsFrustum(avatarPosition, radius)) {
priority += OUT_OF_VIEW_PENALTY;
}
}
sortedAvatars.push(AvatarPriority(avatar, priority));
if (shouldIgnore(avatar)) {
continue;
}
// priority = weighted linear combination of:
// (a) apparentSize
// (b) proximity to center of view
// (c) time since last update
glm::vec3 avatarPosition = avatar->getPosition();
glm::vec3 offset = avatarPosition - frustumCenter;
float distance = glm::length(offset) + 0.001f; // add 1mm to avoid divide by zero
// FIXME - AvatarData has something equivolent to this
float radius = getBoundingRadius(avatar);
float apparentSize = 2.0f * radius / distance;
float cosineAngle = glm::dot(offset, forward) / distance;
float age = (float)(now - getLastUpdated(avatar)) / (float)(USECS_PER_SECOND);
// NOTE: we are adding values of different units to get a single measure of "priority".
// Thus we multiply each component by a conversion "weight" that scales its units relative to the others.
// These weights are pure magic tuning and should be hard coded in the relation below,
// but are currently exposed for anyone who would like to explore fine tuning:
float priority = _avatarSortCoefficientSize * apparentSize
+ _avatarSortCoefficientCenter * cosineAngle
+ _avatarSortCoefficientAge * age;
// decrement priority of avatars outside keyhole
if (distance > cameraView.getCenterRadius()) {
if (!cameraView.sphereIntersectsFrustum(avatarPosition, radius)) {
priority += OUT_OF_VIEW_PENALTY;
}
}
sortedAvatarsOut.push(AvatarPriority(avatar, priority));
}
return sortedAvatars;
}
QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEntityMap& value) {

View file

@ -597,9 +597,10 @@ public:
static const float OUT_OF_VIEW_PENALTY;
static std::priority_queue<AvatarPriority> sortAvatars(
static void sortAvatars(
QList<AvatarSharedPointer> avatarList,
const ViewFrustum& cameraView,
std::priority_queue<AvatarPriority>& sortedAvatarsOut,
std::function<uint64_t(AvatarSharedPointer)> getLastUpdated,
std::function<float(AvatarSharedPointer)> getBoundingRadius,
std::function<bool(AvatarSharedPointer)> shouldIgnore);