mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 18:23:54 +02:00
Updates to overlay fade
* vive rendering works again * overlay fades away in standing mode, if you look or move away from the overlay sphere. * middle mouse button can be used to fade in or out the overlay * mouse pointer renders properly on overlay
This commit is contained in:
parent
1d93abf90c
commit
87a0e48d30
6 changed files with 40 additions and 22 deletions
|
@ -942,13 +942,15 @@ void Application::paintGL() {
|
|||
// Using the latter will cause the camera to wobble with idle animations,
|
||||
// or with changes from the face tracker
|
||||
|
||||
_myCamera.setPosition(_myAvatar->getDefaultEyePosition());
|
||||
if (!getActiveDisplayPlugin()->isHmd()) {
|
||||
_myCamera.setPosition(_myAvatar->getDefaultEyePosition());
|
||||
_myCamera.setRotation(_myAvatar->getHead()->getCameraOrientation());
|
||||
} else {
|
||||
// The plugin getModelview() call below will compose the base
|
||||
// avatar transform with the HMD pose.
|
||||
_myCamera.setRotation(_myAvatar->getOrientation());
|
||||
// sensor to world transform with the HMD pose.
|
||||
mat4 sensorToWorldMat = _myAvatar->getSensorToWorldMatrix();
|
||||
_myCamera.setPosition(extractTranslation(sensorToWorldMat));
|
||||
_myCamera.setRotation(glm::quat_cast(sensorToWorldMat));
|
||||
}
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
if (isHMDMode()) {
|
||||
|
@ -1683,7 +1685,7 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
|
|||
|
||||
} else if (event->button() == Qt::RightButton) {
|
||||
// right click items here
|
||||
|
||||
} else if (event->button() == Qt::MiddleButton) {
|
||||
// toggle the overlay
|
||||
_overlayConductor.setEnabled(!_overlayConductor.getEnabled());
|
||||
}
|
||||
|
|
|
@ -278,7 +278,7 @@ void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
|
|||
const QString& collisionSoundURL = myAvatar->getCollisionSoundURL();
|
||||
if (!collisionSoundURL.isEmpty()) {
|
||||
const float velocityChange = glm::length(collision.velocityChange);
|
||||
const float MIN_AVATAR_COLLISION_ACCELERATION = 0.01;
|
||||
const float MIN_AVATAR_COLLISION_ACCELERATION = 0.01f;
|
||||
const bool isSound = (collision.type == CONTACT_EVENT_TYPE_START) && (velocityChange > MIN_AVATAR_COLLISION_ACCELERATION);
|
||||
|
||||
if (!isSound) {
|
||||
|
|
|
@ -283,15 +283,12 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
batch.setResourceTexture(0, overlayFramebuffer->getRenderBuffer(0));
|
||||
|
||||
mat4 camMat;
|
||||
_cameraTransform.getMatrix(camMat);
|
||||
camMat = camMat * qApp->getEyeOffset(eye);
|
||||
_cameraBaseTransform.getMatrix(camMat);
|
||||
camMat = camMat * qApp->getEyePose(eye);
|
||||
batch.setViewTransform(camMat);
|
||||
|
||||
batch.setProjectionTransform(qApp->getEyeProjection(eye));
|
||||
|
||||
mat4 eyePose = qApp->getEyePose(eye);
|
||||
glm::mat4 overlayXfm = glm::inverse(eyePose);
|
||||
|
||||
#ifdef DEBUG_OVERLAY
|
||||
{
|
||||
batch.setModelTransform(glm::translate(mat4(), vec3(0, 0, -2)));
|
||||
|
@ -313,6 +310,9 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
bindCursorTexture(batch);
|
||||
|
||||
//Controller Pointers
|
||||
glm::mat4 overlayXfm;
|
||||
_modelTransform.getMatrix(overlayXfm);
|
||||
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
|
||||
PalmData& palm = myAvatar->getHand()->getPalms()[i];
|
||||
|
@ -731,7 +731,7 @@ void ApplicationCompositor::updateTooltips() {
|
|||
}
|
||||
|
||||
void ApplicationCompositor::update(float dt) {
|
||||
const int ALPHA_FADE_RATE = 1.0f;
|
||||
const int ALPHA_FADE_RATE = 2.0f;
|
||||
_prevAlpha = _alpha;
|
||||
if (_fadeInAlpha && _alpha < 1.0f) {
|
||||
_alpha = std::min(_alpha + ALPHA_FADE_RATE * dt, 1.0f);
|
||||
|
|
|
@ -64,8 +64,11 @@ public:
|
|||
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
|
||||
uint32_t getOverlayTexture() const;
|
||||
|
||||
void setCameraTransform(const Transform& transform) { _cameraTransform = transform; }
|
||||
void setCameraBaseTransform(const Transform& transform) { _cameraBaseTransform = transform; }
|
||||
const Transform& getCameraBaseTransform() const { return _cameraBaseTransform; }
|
||||
|
||||
void setModelTransform(const Transform& transform) { _modelTransform = transform; }
|
||||
const Transform& getModelTransform() const { return _modelTransform; }
|
||||
|
||||
void fadeIn() { _fadeInAlpha = true; }
|
||||
void fadeOut() { _fadeInAlpha = false; }
|
||||
|
@ -127,7 +130,7 @@ private:
|
|||
glm::vec3 _previousMagnifierTopRight;
|
||||
|
||||
Transform _modelTransform;
|
||||
Transform _cameraTransform;
|
||||
Transform _cameraBaseTransform;
|
||||
};
|
||||
|
||||
#endif // hifi_ApplicationCompositor_h
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
//
|
||||
|
||||
#include "Application.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
|
||||
#include "OverlayConductor.h"
|
||||
|
@ -29,18 +30,32 @@ void OverlayConductor::update(float dt) {
|
|||
// the camera is taken directly from the HMD.
|
||||
Transform identity;
|
||||
qApp->getApplicationCompositor().setModelTransform(identity);
|
||||
Transform t;
|
||||
t.evalFromRawMatrix(qApp->getHMDSensorPose());
|
||||
qApp->getApplicationCompositor().setCameraTransform(t);
|
||||
qApp->getApplicationCompositor().setCameraBaseTransform(identity);
|
||||
break;
|
||||
}
|
||||
case STANDING: {
|
||||
// when standing, the overlay is at a reference position, which is set when the overlay is
|
||||
// enabled. The camera is taken directly from the HMD in world space.
|
||||
// enabled. The camera is taken directly from the HMD, but in world space.
|
||||
// So the sensorToWorldMatrix must be applied.
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
Transform t;
|
||||
t.evalFromRawMatrix(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
|
||||
qApp->getApplicationCompositor().setCameraTransform(t);
|
||||
t.evalFromRawMatrix(myAvatar->getSensorToWorldMatrix());
|
||||
qApp->getApplicationCompositor().setCameraBaseTransform(t);
|
||||
|
||||
// detect when head moves out side of sweet spot, or looks away.
|
||||
mat4 headMat = myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose();
|
||||
vec3 headWorldPos = extractTranslation(headMat);
|
||||
vec3 headForward = glm::quat_cast(headMat) * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
Transform modelXform = qApp->getApplicationCompositor().getModelTransform();
|
||||
vec3 compositorWorldPos = modelXform.getTranslation();
|
||||
vec3 compositorForward = modelXform.getRotation() * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
const float MAX_COMPOSITOR_DISTANCE = 0.6f;
|
||||
const float MAX_COMPOSITOR_ANGLE = 110.0f;
|
||||
if (_enabled && (glm::distance(headWorldPos, compositorWorldPos) > MAX_COMPOSITOR_DISTANCE ||
|
||||
glm::dot(headForward, compositorForward) < cosf(glm::radians(MAX_COMPOSITOR_ANGLE)))) {
|
||||
// fade out the overlay
|
||||
setEnabled(false);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case FLAT:
|
||||
|
@ -48,8 +63,6 @@ void OverlayConductor::update(float dt) {
|
|||
break;
|
||||
}
|
||||
|
||||
// TODO: detect when head moves out side of sweet spot.
|
||||
// TODO: set reference position when HMD is on, etc are changed.
|
||||
|
||||
// process alpha fade animations
|
||||
qApp->getApplicationCompositor().update(dt);
|
||||
|
|
|
@ -145,7 +145,7 @@ mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) con
|
|||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getModelview(Eye eye, const mat4& baseModelview) const {
|
||||
return baseModelview * _eyesData[eye]._eyeOffset;
|
||||
return baseModelview * getEyePose(eye);
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::resetSensors() {
|
||||
|
|
Loading…
Reference in a new issue