Working on overlay refactor

This commit is contained in:
Brad Davis 2015-06-16 09:58:03 -07:00
parent 07845df4dc
commit ba13b7820c
10 changed files with 590 additions and 287 deletions

View file

@ -321,15 +321,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_viewFrustum(),
_lastQueriedViewFrustum(),
_lastQueriedTime(usecTimestampNow()),
_mirrorViewRect(QRect(MIRROR_VIEW_LEFT_PADDING, MIRROR_VIEW_TOP_PADDING, MIRROR_VIEW_WIDTH, MIRROR_VIEW_HEIGHT)),
_firstRun("firstRun", true),
_previousScriptLocation("LastScriptLocation"),
_scriptsLocationHandle("scriptsLocation"),
_fieldOfView("fieldOfView", DEFAULT_FIELD_OF_VIEW_DEGREES),
_viewTransform(),
_scaleMirror(1.0f),
_rotateMirror(0.0f),
_raiseMirror(0.0f),
_cursorVisible(true),
_lastMouseMove(usecTimestampNow()),
_lastMouseMoveWasSimulated(false),
@ -905,11 +901,13 @@ void Application::paintGL() {
}
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
#if 0
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setPosition(_myAvatar->getDefaultEyePosition() +
glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0) +
(_myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
#endif
}
// Update camera position
@ -951,14 +949,13 @@ void Application::paintGL() {
_compositor.displayOverlayTexture(&renderArgs);
glPopMatrix();
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
_rearMirrorTools->render(&renderArgs, true, _glWidget->mapFromGlobal(QCursor::pos()));
} else if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderRearViewMirror(&renderArgs, _mirrorViewRect);
}
renderArgs._renderMode = RenderArgs::NORMAL_RENDER_MODE;
//renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
//if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
// _rearMirrorTools->render(&renderArgs, true, _glWidget->mapFromGlobal(QCursor::pos()));
//} else if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
// renderRearViewMirror(&renderArgs, _mirrorViewRect);
//}
//renderArgs._renderMode = RenderArgs::NORMAL_RENDER_MODE;
auto finalFbo = DependencyManager::get<GlowEffect>()->render(&renderArgs);
@ -1050,10 +1047,12 @@ void Application::resizeGL() {
offscreenUi->resize(_glWidget->size());
_glWidget->makeCurrent();
#if 0
// update Stats width
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH + MIRROR_VIEW_LEFT_PADDING * 2;
Stats::getInstance()->resetWidth(_renderResolution.x, horizontalOffset);
#endif
}
void Application::updateProjectionMatrix() {
@ -1269,37 +1268,37 @@ void Application::keyPressEvent(QKeyEvent* event) {
Menu::getInstance()->triggerOption(MenuOption::Chat);
break;
case Qt::Key_Up:
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
if (!isShifted) {
_scaleMirror *= 0.95f;
} else {
_raiseMirror += 0.05f;
}
}
break;
//case Qt::Key_Up:
// if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
// if (!isShifted) {
// _scaleMirror *= 0.95f;
// } else {
// _raiseMirror += 0.05f;
// }
// }
// break;
case Qt::Key_Down:
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
if (!isShifted) {
_scaleMirror *= 1.05f;
} else {
_raiseMirror -= 0.05f;
}
}
break;
//case Qt::Key_Down:
// if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
// if (!isShifted) {
// _scaleMirror *= 1.05f;
// } else {
// _raiseMirror -= 0.05f;
// }
// }
// break;
case Qt::Key_Left:
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_rotateMirror += PI / 20.0f;
}
break;
//case Qt::Key_Left:
// if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
// _rotateMirror += PI / 20.0f;
// }
// break;
case Qt::Key_Right:
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_rotateMirror -= PI / 20.0f;
}
break;
//case Qt::Key_Right:
// if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
// _rotateMirror -= PI / 20.0f;
// }
// break;
#if 0
case Qt::Key_I:
@ -1562,11 +1561,12 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
// stop propagation
return;
}
#if 0
if (_rearMirrorTools->mousePressEvent(getMouseX(), getMouseY())) {
// stop propagation
return;
}
#endif
}
// nobody handled this - make it an action event on the _window object
@ -1617,12 +1617,14 @@ void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
if (event->button() == Qt::LeftButton) {
_mousePressed = false;
#if 0
if (Menu::getInstance()->isOptionChecked(MenuOption::Stats) && mouseOnScreen()) {
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH;
Stats::getInstance()->checkClick(getMouseX(), getMouseY(),
getMouseDragStartedX(), getMouseDragStartedY(), horizontalOffset);
}
#endif
// fire an action end event
HFActionEvent actionEvent(HFActionEvent::endType(),
@ -2137,7 +2139,9 @@ void Application::init() {
DependencyManager::get<AvatarManager>()->init();
_myCamera.setMode(CAMERA_MODE_FIRST_PERSON);
#if 0
_mirrorCamera.setMode(CAMERA_MODE_MIRROR);
#endif
OculusManager::connect();
if (OculusManager::isConnected()) {
@ -2209,12 +2213,13 @@ void Application::init() {
_entityClipboardRenderer.setViewFrustum(getViewFrustum());
_entityClipboardRenderer.setTree(&_entityClipboard);
#if 0
_rearMirrorTools = new RearMirrorTools(_mirrorViewRect);
connect(_rearMirrorTools, SIGNAL(closeView()), SLOT(closeMirrorView()));
connect(_rearMirrorTools, SIGNAL(restoreView()), SLOT(restoreMirrorView()));
connect(_rearMirrorTools, SIGNAL(shrinkView()), SLOT(shrinkMirrorView()));
connect(_rearMirrorTools, SIGNAL(resetView()), SLOT(resetSensors()));
#endif
// initialize the GlowEffect with our widget
bool glow = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
@ -3115,6 +3120,7 @@ PickRay Application::computePickRay(float x, float y) const {
return result;
}
#if 0
QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
auto primaryFramebuffer = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFramebuffer));
@ -3133,15 +3139,14 @@ QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,
BILLBOARD_SIZE, BILLBOARD_SIZE),
true);
QImage image(BILLBOARD_SIZE, BILLBOARD_SIZE, QImage::Format_ARGB32);
glReadPixels(0, 0, BILLBOARD_SIZE, BILLBOARD_SIZE, GL_BGRA, GL_UNSIGNED_BYTE, image.bits());
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
return image;
}
#endif
ViewFrustum* Application::getViewFrustum() {
#ifdef DEBUG
@ -3331,9 +3336,8 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
// flip x if in mirror mode (also requires reversing winding order for backface culling)
if (theCamera.getMode() == CAMERA_MODE_MIRROR) {
glScalef(-1.0f, 1.0f, 1.0f);
glFrontFace(GL_CW);
//glScalef(-1.0f, 1.0f, 1.0f);
//glFrontFace(GL_CW);
} else {
glFrontFace(GL_CCW);
}
@ -3356,7 +3360,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
viewTransform.setTranslation(theCamera.getPosition());
viewTransform.setRotation(rotation);
if (theCamera.getMode() == CAMERA_MODE_MIRROR) {
viewTransform.setScale(Transform::Vec3(-1.0f, 1.0f, 1.0f));
//viewTransform.setScale(Transform::Vec3(-1.0f, 1.0f, 1.0f));
}
if (renderArgs->_renderSide != RenderArgs::MONO) {
glm::mat4 invView = glm::inverse(_untranslatedViewMatrix);
@ -3623,78 +3627,83 @@ glm::vec2 Application::getScaledScreenPoint(glm::vec2 projectedPoint) {
return screenPoint;
}
void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& region, bool billboard) {
// Grab current viewport to reset it at the end
int viewport[4];
glGetIntegerv(GL_VIEWPORT, viewport);
float aspect = (float)region.width() / region.height();
float fov = MIRROR_FIELD_OF_VIEW;
// bool eyeRelativeCamera = false;
if (billboard) {
fov = BILLBOARD_FIELD_OF_VIEW; // degees
_mirrorCamera.setPosition(_myAvatar->getPosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * BILLBOARD_DISTANCE * _myAvatar->getScale());
} else if (RearMirrorTools::rearViewZoomLevel.get() == BODY) {
_mirrorCamera.setPosition(_myAvatar->getChestPosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
} else { // HEAD zoom level
// FIXME note that the positioing of the camera relative to the avatar can suffer limited
// precision as the user's position moves further away from the origin. Thus at
// /1e7,1e7,1e7 (well outside the buildable volume) the mirror camera veers and sways
// wildly as you rotate your avatar because the floating point values are becoming
// larger, squeezing out the available digits of precision you have available at the
// human scale for camera positioning.
// Previously there was a hack to correct this using the mechanism of repositioning
// the avatar at the origin of the world for the purposes of rendering the mirror,
// but it resulted in failing to render the avatar's head model in the mirror view
// when in first person mode. Presumably this was because of some missed culling logic
// that was not accounted for in the hack.
// This was removed in commit 71e59cfa88c6563749594e25494102fe01db38e9 but could be further
// investigated in order to adapt the technique while fixing the head rendering issue,
// but the complexity of the hack suggests that a better approach
_mirrorCamera.setPosition(_myAvatar->getHead()->getEyePosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
}
_mirrorCamera.setProjection(glm::perspective(glm::radians(fov), aspect, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
_mirrorCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI, 0.0f)));
// set the bounds of rear mirror view
if (billboard) {
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
glViewport(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
glScissor(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
} else {
// if not rendering the billboard, the region is in device independent coordinates; must convert to device
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
float ratio = QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale();
int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
glViewport(x, size.height() - y - height, width, height);
glScissor(x, size.height() - y - height, width, height);
}
bool updateViewFrustum = false;
updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
glEnable(GL_SCISSOR_TEST);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// render rear mirror view
glPushMatrix();
displaySide(renderArgs, _mirrorCamera, true, billboard);
glPopMatrix();
if (!billboard) {
_rearMirrorTools->render(renderArgs, false, _glWidget->mapFromGlobal(QCursor::pos()));
}
// reset Viewport and projection matrix
glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
glDisable(GL_SCISSOR_TEST);
updateProjectionMatrix(_myCamera, updateViewFrustum);
}
//void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& region, bool billboard) {
// // Grab current viewport to reset it at the end
// int viewport[4];
// glGetIntegerv(GL_VIEWPORT, viewport);
// float aspect = (float)region.width() / region.height();
// float fov = MIRROR_FIELD_OF_VIEW;
//
// // bool eyeRelativeCamera = false;
// if (billboard) {
// fov = BILLBOARD_FIELD_OF_VIEW; // degees
// _mirrorCamera.setPosition(_myAvatar->getPosition() +
// _myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * BILLBOARD_DISTANCE * _myAvatar->getScale());
//
// } else if (RearMirrorTools::rearViewZoomLevel.get() == BODY) {
// _mirrorCamera.setPosition(_myAvatar->getChestPosition() +
// _myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
//
// } else { // HEAD zoom level
// // FIXME note that the positioing of the camera relative to the avatar can suffer limited
// // precision as the user's position moves further away from the origin. Thus at
// // /1e7,1e7,1e7 (well outside the buildable volume) the mirror camera veers and sways
// // wildly as you rotate your avatar because the floating point values are becoming
// // larger, squeezing out the available digits of precision you have available at the
// // human scale for camera positioning.
//
// // Previously there was a hack to correct this using the mechanism of repositioning
// // the avatar at the origin of the world for the purposes of rendering the mirror,
// // but it resulted in failing to render the avatar's head model in the mirror view
// // when in first person mode. Presumably this was because of some missed culling logic
// // that was not accounted for in the hack.
//
// // This was removed in commit 71e59cfa88c6563749594e25494102fe01db38e9 but could be further
// // investigated in order to adapt the technique while fixing the head rendering issue,
// // but the complexity of the hack suggests that a better approach
// _mirrorCamera.setPosition(_myAvatar->getHead()->getEyePosition() +
// _myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
// }
// _mirrorCamera.setProjection(glm::perspective(glm::radians(fov), aspect, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
// _mirrorCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI, 0.0f)));
//
// // set the bounds of rear mirror view
// if (billboard) {
// QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
// glViewport(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
// glScissor(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
// } else {
// // if not rendering the billboard, the region is in device independent coordinates; must convert to device
// QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
// float ratio = QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale();
// int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
// glViewport(x, size.height() - y - height, width, height);
// glScissor(x, size.height() - y - height, width, height);
// }
// bool updateViewFrustum = false;
// updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
// glEnable(GL_SCISSOR_TEST);
// glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//
// // render rear mirror view
// glMatrixMode(GL_MODELVIEW);
// glPushMatrix();
// glLoadIdentity();
// glLoadMatrixf(glm::value_ptr(glm::mat4_cast(_mirrorCamera.getOrientation()) * glm::translate(glm::mat4(), _mirrorCamera.getPosition())));
// renderArgs->_context->syncCache();
// displaySide(renderArgs, _mirrorCamera, true, billboard);
// glMatrixMode(GL_MODELVIEW);
// glPopMatrix();
//
// if (!billboard) {
// _rearMirrorTools->render(renderArgs, false, _glWidget->mapFromGlobal(QCursor::pos()));
// }
//
// // reset Viewport and projection matrix
// glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
// glDisable(GL_SCISSOR_TEST);
// updateProjectionMatrix(_myCamera, updateViewFrustum);
//}
void Application::resetSensors() {
DependencyManager::get<Faceshift>()->reset();

View file

@ -109,15 +109,6 @@ static const QString FST_EXTENSION = ".fst";
static const float BILLBOARD_FIELD_OF_VIEW = 30.0f; // degrees
static const float BILLBOARD_DISTANCE = 5.56f; // meters
static const int MIRROR_VIEW_TOP_PADDING = 5;
static const int MIRROR_VIEW_LEFT_PADDING = 10;
static const int MIRROR_VIEW_WIDTH = 265;
static const int MIRROR_VIEW_HEIGHT = 215;
static const float MIRROR_FULLSCREEN_DISTANCE = 0.389f;
static const float MIRROR_REARVIEW_DISTANCE = 0.722f;
static const float MIRROR_REARVIEW_BODY_DISTANCE = 2.56f;
static const float MIRROR_FIELD_OF_VIEW = 30.0f;
static const quint64 TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS = 1 * USECS_PER_SECOND;
static const QString INFO_HELP_PATH = "html/interface-welcome.html";
@ -482,6 +473,7 @@ private slots:
void faceTrackerMuteToggled();
void setCursorVisible(bool visible);
//void renderRearViewMirror(RenderArgs* renderArgs, const QRect& region, bool billboard = false);
private:
void resetCamerasOnResizeGL(Camera& camera, const glm::uvec2& size);
@ -516,7 +508,6 @@ private:
glm::vec3 getSunDirection();
void updateShadowMap(RenderArgs* renderArgs);
void renderRearViewMirror(RenderArgs* renderArgs, const QRect& region, bool billboard = false);
void setMenuShortcutsEnabled(bool enabled);
static void attachNewHeadToNode(Node *newNode);
@ -564,31 +555,21 @@ private:
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
Camera _myCamera; // My view onto the world
Camera _mirrorCamera; // Cammera for mirror view
QRect _mirrorViewRect;
RearMirrorTools* _rearMirrorTools;
KeyboardMouseDevice _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
UserInputMapper _userInputMapper; // User input mapper allowing to mapp different real devices to the action channels that the application has to offer
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
Camera _myCamera; // My view onto the world
Setting::Handle<bool> _firstRun;
Setting::Handle<QString> _previousScriptLocation;
Setting::Handle<QString> _scriptsLocationHandle;
Setting::Handle<float> _fieldOfView;
Setting::Handle<bool> _firstRun;
Setting::Handle<QString> _previousScriptLocation;
Setting::Handle<QString> _scriptsLocationHandle;
Setting::Handle<float> _fieldOfView;
Transform _viewTransform;
glm::mat4 _untranslatedViewMatrix;
glm::vec3 _viewMatrixTranslation;
glm::mat4 _projectionMatrix;
float _scaleMirror;
float _rotateMirror;
float _raiseMirror;
static const int CASCADED_SHADOW_MATRIX_COUNT = 4;
glm::mat4 _shadowMatrices[CASCADED_SHADOW_MATRIX_COUNT];
glm::vec3 _shadowDistances;

View file

@ -1458,9 +1458,10 @@ void MyAvatar::maybeUpdateBillboard() {
return;
}
}
/*
gpu::Context context(new gpu::GLBackend());
RenderArgs renderArgs(&context);
QImage image = Application::getInstance()->renderAvatarBillboard(&renderArgs);
QImage image = qApp->renderAvatarBillboard(&renderArgs);
_billboard.clear();
QBuffer buffer(&_billboard);
buffer.open(QIODevice::WriteOnly);
@ -1468,6 +1469,7 @@ void MyAvatar::maybeUpdateBillboard() {
_billboardValid = true;
sendBillboardPacket();
*/
}
void MyAvatar::increaseSize() {

View file

@ -41,6 +41,7 @@
static const float MAG_SPEED = 0.08f;
static const quint64 MSECS_TO_USECS = 1000ULL;
static const quint64 TOOLTIP_DELAY = 2000000ULL;
static const float WHITE_TEXT[] = { 0.93f, 0.93f, 0.93f };
static const float RETICLE_COLOR[] = { 0.0f, 198.0f / 255.0f, 244.0f / 255.0f };
@ -57,6 +58,8 @@ static const glm::vec2 MOUSE_RANGE(MOUSE_YAW_RANGE, MOUSE_PITCH_RANGE);
static gpu::BufferPointer _hemiVertices;
static gpu::BufferPointer _hemiIndices;
static int _hemiIndexCount{ 0 };
EntityItemID ApplicationCompositor::_noItemId;
// Return a point's cartesian coordinates on a sphere from pitch and yaw
glm::vec3 getPoint(float yaw, float pitch) {
@ -132,11 +135,32 @@ ApplicationCompositor::ApplicationCompositor() {
_reticleQuad = geometryCache->allocateID();
_magnifierQuad = geometryCache->allocateID();
_audioRedQuad = geometryCache->allocateID();
_audioGreenQuad = geometryCache->allocateID();
_audioBlueQuad = geometryCache->allocateID();
_domainStatusBorder = geometryCache->allocateID();
_magnifierBorder = geometryCache->allocateID();
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
connect(entityScriptingInterface.data(), &EntityScriptingInterface::hoverEnterEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
if (_hoverItemId != entityItemID) {
_hoverItemId = entityItemID;
_hoverItemEnterUsecs = usecTimestampNow();
auto properties = entityScriptingInterface->getEntityProperties(_hoverItemId);
_hoverItemHref = properties.getHref();
auto cursor = Cursor::Manager::instance().getCursor();
if (!_hoverItemHref.isEmpty()) {
cursor->setIcon(Cursor::Icon::LINK);
} else {
cursor->setIcon(Cursor::Icon::DEFAULT);
}
}
});
connect(entityScriptingInterface.data(), &EntityScriptingInterface::hoverLeaveEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
if (_hoverItemId == entityItemID) {
_hoverItemId = _noItemId;
_hoverItemHref.clear();
auto cursor = Cursor::Manager::instance().getCursor();
cursor->setIcon(Cursor::Icon::DEFAULT);
}
});
}
ApplicationCompositor::~ApplicationCompositor() {
@ -154,13 +178,26 @@ void ApplicationCompositor::bindCursorTexture(gpu::Batch& batch, uint8_t cursorI
batch.setUniformTexture(0, _cursors[iconId]);
}
// Draws the FBO texture for the screen
void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
if (_alpha == 0.0f) {
return;
}
vec2 canvasSize = qApp->getCanvasSize();
_textureAspectRatio = aspect(canvasSize);
if (_hoverItemId != _noItemId) {
quint64 hoverDuration = usecTimestampNow() - _hoverItemEnterUsecs;
if (!_hoverItemHref.isEmpty() && hoverDuration > TOOLTIP_DELAY) {
// TODO Enable and position the tooltip
}
}
//Handle fading and deactivation/activation of UI
gpu::Batch batch;
GLuint texture = qApp->getApplicationOverlay().getOverlayTexture();
if (!texture) {
return;
@ -168,7 +205,6 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
renderArgs->_context->syncCache();
auto geometryCache = DependencyManager::get<GeometryCache>();
gpu::Batch batch;
geometryCache->useSimpleDrawPipeline(batch);
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
@ -179,17 +215,12 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
geometryCache->renderUnitQuad(batch, vec4(vec3(1), _alpha));
// Doesn't actually render
renderPointers();
renderPointers(batch);
//draw the mouse pointer
vec2 canvasSize = qApp->getCanvasSize();
// Get the mouse coordinates and convert to NDC [-1, 1]
vec2 mousePosition = vec2(qApp->getMouse());
mousePosition /= canvasSize;
mousePosition *= 2.0f;
mousePosition -= 1.0f;
vec2 mousePosition = toNormalizedDeviceScale(vec2(qApp->getMouse()), canvasSize);
// Invert the Y axis
mousePosition.y *= -1.0f;
Transform model;
@ -250,7 +281,7 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
drawSphereSection(batch);
// Doesn't actually render
renderPointers();
renderPointers(batch);
vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize);
bindCursorTexture(batch);
@ -360,7 +391,7 @@ bool ApplicationCompositor::calculateRayUICollisionPoint(const glm::vec3& positi
}
//Renders optional pointers
void ApplicationCompositor::renderPointers() {
void ApplicationCompositor::renderPointers(gpu::Batch& batch) {
//glEnable(GL_TEXTURE_2D);
//glEnable(GL_BLEND);
//glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
@ -374,7 +405,7 @@ void ApplicationCompositor::renderPointers() {
_lastMouseMove = usecTimestampNow();
}
QPoint position = QPoint(qApp->getTrueMouseX(), qApp->getTrueMouseY());
static const int MAX_IDLE_TIME = 3;
if (_reticlePosition[MOUSE] != position) {
_lastMouseMove = usecTimestampNow();
@ -383,11 +414,11 @@ void ApplicationCompositor::renderPointers() {
//OculusManager::getEulerAngles(yaw, pitch, roll);
glm::quat orientation = qApp->getHeadOrientation(); // (glm::vec3(pitch, yaw, roll));
glm::vec3 result;
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
if (calculateRayUICollisionPoint(myAvatar->getEyePosition(),
myAvatar->getOrientation() * orientation * IDENTITY_FRONT,
result)) {
myAvatar->getOrientation() * orientation * IDENTITY_FRONT,
result)) {
glm::vec3 lookAtDirection = glm::inverse(myAvatar->getOrientation()) * (result - myAvatar->getDefaultEyePosition());
glm::vec2 spericalPos = directionToSpherical(glm::normalize(lookAtDirection));
glm::vec2 screenPos = sphericalToScreen(spericalPos);
@ -398,7 +429,7 @@ void ApplicationCompositor::renderPointers() {
qDebug() << "No collision point";
}
}
_reticlePosition[MOUSE] = position;
_reticleActive[MOUSE] = true;
_magActive[MOUSE] = _magnifier;
@ -409,13 +440,14 @@ void ApplicationCompositor::renderPointers() {
//only render controller pointer if we aren't already rendering a mouse pointer
_reticleActive[MOUSE] = false;
_magActive[MOUSE] = false;
renderControllerPointers();
renderControllerPointers(batch);
}
//glBindTexture(GL_TEXTURE_2D, 0);
//glDisable(GL_TEXTURE_2D);
}
void ApplicationCompositor::renderControllerPointers() {
void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
//Static variables used for storing controller state
@ -526,7 +558,7 @@ void ApplicationCompositor::renderControllerPointers() {
}
//Renders a small magnification of the currently bound texture at the coordinates
void ApplicationCompositor::renderMagnifier(const glm::vec2& magPos, float sizeMult, bool showBorder) {
void ApplicationCompositor::renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder) {
if (!_magnifier) {
return;
}
@ -755,3 +787,128 @@ glm::vec2 ApplicationCompositor::screenToOverlay(const glm::vec2& screenPos) con
glm::vec2 ApplicationCompositor::overlayToScreen(const glm::vec2& overlayPos) const {
return sphericalToScreen(overlayToSpherical(overlayPos));
}
#if 0
gpu::PipelinePointer ApplicationOverlay::getDrawPipeline() {
if (!_standardDrawPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(standardTransformPNTC_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(standardDrawTexture_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::makeProgram((*program));
auto state = gpu::StatePointer(new gpu::State());
// enable decal blend
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_standardDrawPipeline.reset(gpu::Pipeline::create(program, state));
}
return _standardDrawPipeline;
}
// Draws the FBO texture for the screen
void ApplicationOverlay::displayOverlayTexture(RenderArgs* renderArgs) {
if (_alpha == 0.0f) {
return;
}
renderArgs->_context->syncCache();
gpu::Batch batch;
Transform model;
//DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, true);
batch.setPipeline(getDrawPipeline());
batch.setModelTransform(Transform());
batch.setProjectionTransform(mat4());
batch.setViewTransform(model);
batch._glBindTexture(GL_TEXTURE_2D, _framebufferObject->texture());
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
DependencyManager::get<GeometryCache>()->renderUnitQuad(batch, vec4(vec3(1), _alpha));
//draw the mouse pointer
glm::vec2 canvasSize = qApp->getCanvasSize();
// Get the mouse coordinates and convert to NDC [-1, 1]
vec2 mousePosition = vec2(qApp->getMouseX(), qApp->getMouseY());
mousePosition /= canvasSize;
mousePosition *= 2.0f;
mousePosition -= 1.0f;
mousePosition.y *= -1.0f;
model.setTranslation(vec3(mousePosition, 0));
glm::vec2 mouseSize = CURSOR_PIXEL_SIZE / canvasSize;
model.setScale(vec3(mouseSize, 1.0f));
batch.setModelTransform(model);
bindCursorTexture(batch);
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
DependencyManager::get<GeometryCache>()->renderUnitQuad(batch, vec4(1));
renderArgs->_context->render(batch);
}
// Draws the FBO texture for Oculus rift.
void ApplicationOverlay::displayOverlayTextureHmd(RenderArgs* renderArgs, Camera& whichCamera) {
if (_alpha == 0.0f) {
return;
}
renderArgs->_context->syncCache();
gpu::Batch batch;
batch.setPipeline(getDrawPipeline());
batch._glDisable(GL_DEPTH_TEST);
batch._glDisable(GL_CULL_FACE);
batch._glBindTexture(GL_TEXTURE_2D, _framebufferObject->texture());
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
batch._glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
batch.setProjectionTransform(whichCamera.getProjection());
batch.setViewTransform(Transform());
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
const quat& avatarOrientation = myAvatar->getOrientation();
quat hmdOrientation = qApp->getCamera()->getHmdRotation();
vec3 hmdPosition = glm::inverse(avatarOrientation) * qApp->getCamera()->getHmdPosition();
mat4 overlayXfm = glm::mat4_cast(glm::inverse(hmdOrientation)) * glm::translate(mat4(), -hmdPosition);
batch.setModelTransform(Transform(overlayXfm));
drawSphereSection(batch);
bindCursorTexture(batch);
auto geometryCache = DependencyManager::get<GeometryCache>();
vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize);
//Controller Pointers
for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {
PalmData& palm = myAvatar->getHand()->getPalms()[i];
if (palm.isActive()) {
glm::vec2 polar = getPolarCoordinates(palm);
// Convert to quaternion
mat4 pointerXfm = glm::mat4_cast(quat(vec3(polar.y, -polar.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
// Render reticle at location
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
}
//Mouse Pointer
if (_reticleActive[MOUSE]) {
glm::vec2 projection = screenToSpherical(glm::vec2(_reticlePosition[MOUSE].x(),
_reticlePosition[MOUSE].y()));
mat4 pointerXfm = glm::mat4_cast(quat(vec3(-projection.y, projection.x, 0.0f))) * glm::translate(mat4(), vec3(0, 0, -1));
mat4 reticleXfm = overlayXfm * pointerXfm;
reticleXfm = glm::scale(reticleXfm, reticleScale);
batch.setModelTransform(reticleXfm);
geometryCache->renderUnitQuad(batch, glm::vec4(1), _reticleQuad);
}
renderArgs->_context->render(batch);
}
#endif

View file

@ -6,12 +6,13 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ApplicationOverlayCompositor_h
#define hifi_ApplicationOverlayCompositor_h
#ifndef hifi_ApplicationCompositor_h
#define hifi_ApplicationCompositor_h
#include <QObject>
#include <cstdint>
#include <EntityItemID.h>
#include <GeometryCache.h>
#include <GLMHelpers.h>
#include <gpu/Batch.h>
@ -61,7 +62,7 @@ public:
glm::vec2 screenToOverlay(const glm::vec2 & screenPos) const;
glm::vec2 overlayToScreen(const glm::vec2 & overlayPos) const;
void computeHmdPickRay(glm::vec2 cursorPos, glm::vec3& origin, glm::vec3& direction) const;
GLuint getOverlayTexture();
GLuint getOverlayTexture() const;
static glm::vec2 directionToSpherical(const glm::vec3 & direction);
static glm::vec3 sphericalToDirection(const glm::vec2 & sphericalPos);
@ -74,12 +75,17 @@ private:
void buildHemiVertices(const float fov, const float aspectRatio, const int slices, const int stacks);
void drawSphereSection(gpu::Batch& batch);
void renderPointers();
void renderMagnifier(const glm::vec2& magPos, float sizeMult, bool showBorder);
void renderControllerPointers();
void renderPointersOculus();
void renderPointers(gpu::Batch& batch);
void renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder);
void renderControllerPointers(gpu::Batch& batch);
void renderPointersOculus(gpu::Batch& batch);
// Support for hovering and tooltips
static EntityItemID _noItemId;
EntityItemID _hoverItemId{ _noItemId };
QString _hoverItemHref;
quint64 _hoverItemEnterUsecs{ 0 };
float _hmdUIAngularSize = DEFAULT_HMD_UI_ANGULAR_SIZE;
float _textureFov{ glm::radians(DEFAULT_HMD_UI_ANGULAR_SIZE) };
float _textureAspectRatio{ 1.0f };
@ -100,10 +106,6 @@ private:
int _reticleQuad;
int _magnifierQuad;
int _audioRedQuad;
int _audioGreenQuad;
int _audioBlueQuad;
int _domainStatusBorder;
int _magnifierBorder;
int _previousBorderWidth{ -1 };
@ -115,4 +117,4 @@ private:
glm::vec3 _previousMagnifierTopRight;
};
#endif // hifi_ApplicationOverlayCompositor_h
#endif // hifi_ApplicationCompositor_h

View file

@ -39,15 +39,21 @@
const float WHITE_TEXT[] = { 0.93f, 0.93f, 0.93f };
const int AUDIO_METER_GAP = 5;
const int MUTE_ICON_PADDING = 10;
const float CONNECTION_STATUS_BORDER_COLOR[] = { 1.0f, 0.0f, 0.0f };
const vec4 CONNECTION_STATUS_BORDER_COLOR{ 1.0f, 0.0f, 0.0f, 0.8f };
const float CONNECTION_STATUS_BORDER_LINE_WIDTH = 4.0f;
static const int MIRROR_VIEW_TOP_PADDING = 5;
static const int MIRROR_VIEW_LEFT_PADDING = 10;
static const int MIRROR_VIEW_WIDTH = 265;
static const int MIRROR_VIEW_HEIGHT = 215;
static const int STATS_HORIZONTAL_OFFSET = MIRROR_VIEW_WIDTH + MIRROR_VIEW_LEFT_PADDING * 2;
static const float MIRROR_FULLSCREEN_DISTANCE = 0.389f;
static const float MIRROR_REARVIEW_DISTANCE = 0.722f;
static const float MIRROR_REARVIEW_BODY_DISTANCE = 2.56f;
static const float MIRROR_FIELD_OF_VIEW = 30.0f;
ApplicationOverlay::ApplicationOverlay() :
_alpha(1.0f),
_trailingAudioLoudness(0.0f),
_previousBorderWidth(-1),
_previousBorderHeight(-1),
_framebufferObject(nullptr)
_mirrorViewRect(QRect(MIRROR_VIEW_LEFT_PADDING, MIRROR_VIEW_TOP_PADDING, MIRROR_VIEW_WIDTH, MIRROR_VIEW_HEIGHT))
{
auto geometryCache = DependencyManager::get<GeometryCache>();
_audioRedQuad = geometryCache->allocateID();
@ -82,67 +88,70 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
Overlays& overlays = qApp->getOverlays();
glm::vec2 size = qApp->getCanvasSize();
// TODO Handle fading and deactivation/activation of UI
// Render 2D overlay
// TODO First render the mirror to the mirror FBO
// Now render the overlay components together into a single texture
gpu::Batch batch;
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->useSimpleDrawPipeline(batch);
static const float NEAR_CLIP = -10000;
static const float FAR_CLIP = 10000;
batch._glDisable(GL_DEPTH);
batch._glDisable(GL_LIGHTING);
batch._glEnable(GL_BLEND);
renderAudioMeter(batch);
renderCameraToggle(batch);
renderStatsAndLogs(batch);
renderDomainConnectionStatusBorder(batch);
renderQmlUi(batch);
// Execute the batch into our framebuffer
buildFramebufferObject();
_overlayFramebuffer->bind();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, size.x, size.y);
mat4 legacyProjection = glm::ortho<float>(0, size.x, size.y, 0, NEAR_CLIP, FAR_CLIP);
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadMatrixf(glm::value_ptr(legacyProjection));
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
buildFramebufferObject();
_framebufferObject->bind();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, size.x, size.y);
glMatrixMode(GL_PROJECTION);
glPushMatrix(); {
const float NEAR_CLIP = -10000;
const float FAR_CLIP = 10000;
glLoadIdentity();
glOrtho(0, size.x, size.y, 0, NEAR_CLIP, FAR_CLIP);
glMatrixMode(GL_MODELVIEW);
renderAudioMeter();
renderCameraToggle();
renderStatsAndLogs();
// give external parties a change to hook in
emit qApp->renderingOverlay();
overlays.renderHUD(renderArgs);
renderDomainConnectionStatusBorder();
if (_uiTexture) {
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, _uiTexture);
DependencyManager::get<GeometryCache>()->renderUnitQuad();
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
}
glLoadIdentity();
glMatrixMode(GL_PROJECTION);
} glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glPushMatrix();
glLoadIdentity();
_framebufferObject->release();
// give external parties a change to hook in
//emit qApp->renderingOverlay();
overlays.renderHUD(renderArgs);
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
renderArgs->_context->syncCache();
renderArgs->_context->render(batch);
_overlayFramebuffer->release();
}
void ApplicationOverlay::renderCameraToggle() {
void ApplicationOverlay::renderQmlUi(gpu::Batch& batch) {
if (_uiTexture) {
batch.setProjectionTransform(mat4());
batch.setModelTransform(mat4());
batch._glBindTexture(GL_TEXTURE_2D, _uiTexture);
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->renderUnitQuad(batch, glm::vec4(1));
}
}
void ApplicationOverlay::renderCameraToggle(gpu::Batch& batch) {
/*
if (Menu::getInstance()->isOptionChecked(MenuOption::NoFaceTracking)) {
return;
}
@ -158,9 +167,11 @@ void ApplicationOverlay::renderCameraToggle() {
}
DependencyManager::get<CameraToolBox>()->render(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, boxed);
*/
}
void ApplicationOverlay::renderAudioMeter() {
void ApplicationOverlay::renderAudioMeter(gpu::Batch& batch) {
/*
auto audio = DependencyManager::get<AudioClient>();
// Audio VU Meter and Mute Icon
@ -274,9 +285,90 @@ void ApplicationOverlay::renderAudioMeter() {
audioLevel, AUDIO_METER_HEIGHT, quadColor,
_audioBlueQuad);
}
*/
}
void ApplicationOverlay::renderStatsAndLogs() {
void ApplicationOverlay::renderRearView(gpu::Batch& batch) {
// // Grab current viewport to reset it at the end
// int viewport[4];
// glGetIntegerv(GL_VIEWPORT, viewport);
// float aspect = (float)region.width() / region.height();
// float fov = MIRROR_FIELD_OF_VIEW;
// // bool eyeRelativeCamera = false;
// if (billboard) {
// fov = BILLBOARD_FIELD_OF_VIEW; // degees
// _mirrorCamera.setPosition(_myAvatar->getPosition() +
// _myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * BILLBOARD_DISTANCE * _myAvatar->getScale());
// } else if (RearMirrorTools::rearViewZoomLevel.get() == BODY) {
// _mirrorCamera.setPosition(_myAvatar->getChestPosition() +
// _myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_BODY_DISTANCE * _myAvatar->getScale());
// } else { // HEAD zoom level
// // FIXME note that the positioing of the camera relative to the avatar can suffer limited
// // precision as the user's position moves further away from the origin. Thus at
// // /1e7,1e7,1e7 (well outside the buildable volume) the mirror camera veers and sways
// // wildly as you rotate your avatar because the floating point values are becoming
// // larger, squeezing out the available digits of precision you have available at the
// // human scale for camera positioning.
// // Previously there was a hack to correct this using the mechanism of repositioning
// // the avatar at the origin of the world for the purposes of rendering the mirror,
// // but it resulted in failing to render the avatar's head model in the mirror view
// // when in first person mode. Presumably this was because of some missed culling logic
// // that was not accounted for in the hack.
// // This was removed in commit 71e59cfa88c6563749594e25494102fe01db38e9 but could be further
// // investigated in order to adapt the technique while fixing the head rendering issue,
// // but the complexity of the hack suggests that a better approach
// _mirrorCamera.setPosition(_myAvatar->getHead()->getEyePosition() +
// _myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
// }
// _mirrorCamera.setProjection(glm::perspective(glm::radians(fov), aspect, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
// _mirrorCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI, 0.0f)));
// // set the bounds of rear mirror view
// if (billboard) {
// QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
// glViewport(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
// glScissor(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
// } else {
// // if not rendering the billboard, the region is in device independent coordinates; must convert to device
// QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
// float ratio = QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale();
// int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
// glViewport(x, size.height() - y - height, width, height);
// glScissor(x, size.height() - y - height, width, height);
// }
// bool updateViewFrustum = false;
// updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
// glEnable(GL_SCISSOR_TEST);
// glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// // render rear mirror view
// glMatrixMode(GL_MODELVIEW);
// glPushMatrix();
// glLoadIdentity();
// glLoadMatrixf(glm::value_ptr(glm::mat4_cast(_mirrorCamera.getOrientation()) * glm::translate(glm::mat4(), _mirrorCamera.getPosition())));
// renderArgs->_context->syncCache();
// displaySide(renderArgs, _mirrorCamera, true, billboard);
// glMatrixMode(GL_MODELVIEW);
// glPopMatrix();
// if (!billboard) {
// _rearMirrorTools->render(renderArgs, false, _glWidget->mapFromGlobal(QCursor::pos()));
// }
// // reset Viewport and projection matrix
// glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
// glDisable(GL_SCISSOR_TEST);
// updateProjectionMatrix(_myCamera, updateViewFrustum);
//}
}
void ApplicationOverlay::renderStatsAndLogs(gpu::Batch& batch) {
/*
Application* application = Application::getInstance();
QSharedPointer<BandwidthRecorder> bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
@ -284,7 +376,7 @@ void ApplicationOverlay::renderStatsAndLogs() {
NodeBounds& nodeBoundsDisplay = application->getNodeBoundsDisplay();
// Display stats and log text onscreen
glLineWidth(1.0f);
batch._glLineWidth(1.0f);
glPointSize(1.0f);
// Determine whether to compute timing details
@ -297,10 +389,9 @@ void ApplicationOverlay::renderStatsAndLogs() {
if (Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH + MIRROR_VIEW_LEFT_PADDING * 2;
int voxelPacketsToProcess = octreePacketProcessor.packetsToProcessCount();
// Onscreen text about position, servers, etc
Stats::getInstance()->display(WHITE_TEXT, horizontalOffset, application->getFps(),
Stats::getInstance()->display(WHITE_TEXT, STATS_HORIZONTAL_OFFSET, application->getFps(),
bandwidthRecorder->getCachedTotalAverageInputPacketsPerSecond(),
bandwidthRecorder->getCachedTotalAverageOutputPacketsPerSecond(),
bandwidthRecorder->getCachedTotalAverageInputKilobitsPerSecond(),
@ -320,56 +411,73 @@ void ApplicationOverlay::renderStatsAndLogs() {
0.30f, 0.0f, 0, frameTimer.toUtf8().constData(), WHITE_TEXT);
}
nodeBoundsDisplay.drawOverlay();
*/
}
void ApplicationOverlay::renderDomainConnectionStatusBorder() {
void ApplicationOverlay::renderDomainConnectionStatusBorder(gpu::Batch& batch) {
auto geometryCache = DependencyManager::get<GeometryCache>();
std::once_flag once;
std::call_once(once, [&] {
QVector<vec2> points;
static const float B = 0.99;
points.push_back(vec2(-B));
points.push_back(vec2(B, -B));
points.push_back(vec2(B));
points.push_back(vec2(-B, B));
points.push_back(vec2(-B));
geometryCache->updateVertices(_domainStatusBorder, points, CONNECTION_STATUS_BORDER_COLOR);
});
auto nodeList = DependencyManager::get<NodeList>();
if (nodeList && !nodeList->getDomainHandler().isConnected()) {
auto geometryCache = DependencyManager::get<GeometryCache>();
auto canvasSize = qApp->getCanvasSize();
if ((int)canvasSize.x != _previousBorderWidth || (int)canvasSize.y != _previousBorderHeight) {
glm::vec4 color(CONNECTION_STATUS_BORDER_COLOR[0],
CONNECTION_STATUS_BORDER_COLOR[1],
CONNECTION_STATUS_BORDER_COLOR[2], 1.0f);
batch.setProjectionTransform(mat4());
batch.setModelTransform(mat4());
batch.setUniformTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
batch._glLineWidth(CONNECTION_STATUS_BORDER_LINE_WIDTH);
QVector<glm::vec2> border;
border << glm::vec2(0, 0);
border << glm::vec2(0, canvasSize.y);
border << glm::vec2(canvasSize.x, canvasSize.y);
border << glm::vec2(canvasSize.x, 0);
border << glm::vec2(0, 0);
geometryCache->updateVertices(_domainStatusBorder, border, color);
_previousBorderWidth = canvasSize.x;
_previousBorderHeight = canvasSize.y;
}
// TODO animate the disconnect border for some excitement while not connected?
//double usecs = usecTimestampNow();
//double secs = usecs / 1000000.0;
//float scaleAmount = 1.0f + (0.01f * sin(secs * 5.0f));
//batch.setModelTransform(glm::scale(mat4(), vec3(scaleAmount)));
glLineWidth(CONNECTION_STATUS_BORDER_LINE_WIDTH);
geometryCache->renderVertices(gpu::LINE_STRIP, _domainStatusBorder);
geometryCache->renderVertices(batch, gpu::LINE_STRIP, _domainStatusBorder);
}
}
GLuint ApplicationOverlay::getOverlayTexture() {
if (!_framebufferObject) {
if (!_overlayFramebuffer) {
return 0;
}
return _framebufferObject->texture();
return _overlayFramebuffer->texture();
}
void ApplicationOverlay::buildFramebufferObject() {
if (!_mirrorFramebuffer) {
_mirrorFramebuffer = new QOpenGLFramebufferObject(QSize(MIRROR_VIEW_WIDTH, MIRROR_VIEW_HEIGHT), QOpenGLFramebufferObject::Depth);
glBindTexture(GL_TEXTURE_2D, _mirrorFramebuffer->texture());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
GLfloat borderColor[4] = { 0.0f, 0.0f, 0.0f, 0.0f };
glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, borderColor);
glBindTexture(GL_TEXTURE_2D, 0);
}
auto canvasSize = qApp->getCanvasSize();
QSize fboSize = QSize(canvasSize.x, canvasSize.y);
if (_framebufferObject != NULL && fboSize == _framebufferObject->size()) {
if (_overlayFramebuffer && fboSize == _overlayFramebuffer->size()) {
// Already built
return;
}
if (_framebufferObject != NULL) {
delete _framebufferObject;
if (_overlayFramebuffer) {
delete _overlayFramebuffer;
}
_framebufferObject = new QOpenGLFramebufferObject(fboSize, QOpenGLFramebufferObject::Depth);
_overlayFramebuffer = new QOpenGLFramebufferObject(fboSize, QOpenGLFramebufferObject::Depth);
glBindTexture(GL_TEXTURE_2D, getOverlayTexture());
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

View file

@ -29,13 +29,16 @@ public:
private:
void renderAudioMeter();
void renderCameraToggle();
void renderStatsAndLogs();
void renderDomainConnectionStatusBorder();
void renderAudioMeter(gpu::Batch& batch);
void renderCameraToggle(gpu::Batch& batch);
void renderStatsAndLogs(gpu::Batch& batch);
void renderDomainConnectionStatusBorder(gpu::Batch& batch);
void renderRearView(gpu::Batch& batch);
void renderQmlUi(gpu::Batch& batch);
void buildFramebufferObject();
float _alpha = 1.0f;
float _trailingAudioLoudness;
float _alpha{ 1.0f };
float _trailingAudioLoudness{ 0.0f };
GLuint _uiTexture{ 0 };
int _audioRedQuad;
@ -44,10 +47,15 @@ private:
int _domainStatusBorder;
int _magnifierBorder;
int _previousBorderWidth;
int _previousBorderHeight;
float _scaleMirror{ 1.0f };
float _rotateMirror{ 0.0f };
float _raiseMirror{ 0.0f };
QOpenGLFramebufferObject* _framebufferObject;
ivec2 _previousBorderSize{ -1 };
QRect _mirrorViewRect;
QOpenGLFramebufferObject* _overlayFramebuffer{ nullptr };
QOpenGLFramebufferObject* _mirrorFramebuffer{ nullptr };
};
#endif // hifi_ApplicationOverlay_h

View file

@ -14,7 +14,7 @@
#include <stdint.h>
#include <QDebug>
#include <QObject>
#include <QHash>
#include <QScriptEngine>

View file

@ -434,6 +434,8 @@ glm::vec2 Font::drawString(float x, float y, const QString & str,
fromGlm(MatrixStack::projection().top()));
if (effectType == TextRenderer::OUTLINE_EFFECT) {
_program->setUniformValue("Outline", true);
} else {
_program->setUniformValue("Outline", false);
}
// Needed?
glEnable(GL_TEXTURE_2D);
@ -539,6 +541,25 @@ glm::vec2 TextRenderer::computeExtent(const QString & str) const {
return glm::vec2(0.1f,0.1f);
}
static gpu::PipelinePointer _textDrawPipeline;
void TextRenderer::useTextPipeline(gpu::Batch& batch) {
if (!_textDrawPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(sdf_text_vert)));
auto fs = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(sdf_text_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, fs));
gpu::Shader::makeProgram((*program));
auto state = gpu::StatePointer(new gpu::State());
// enable decal blend
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_textDrawPipeline.reset(gpu::Pipeline::create(program, state));
}
batch.setPipeline(_textDrawPipeline);
batch._glBindTexture(GL_TEXTURE_2D, this->_font->_texture->textureId());
}
float TextRenderer::draw(float x, float y, const QString & str,
const glm::vec4& color, const glm::vec2 & bounds) {
glm::vec4 actualColor(color);

View file

@ -122,6 +122,21 @@ float aspect(const T& t) {
return (float)t.x / (float)t.y;
}
// Take values in an arbitrary range [0, size] and convert them to the range [0, 1]
template <typename T>
T toUnitScale(const T& value, const T& size) {
return value / size;
}
// Take values in an arbitrary range [0, size] and convert them to the range [0, 1]
template <typename T>
T toNormalizedDeviceScale(const T& value, const T& size) {
vec2 result = toUnitScale(value, size);
result *= 2.0f;
result -= 1.0f;
return result;
}
#define YAW(euler) euler.y
#define PITCH(euler) euler.x
#define ROLL(euler) euler.z