Hand lasers

This commit is contained in:
Brad Davis 2016-08-03 11:59:11 -07:00
parent b10e2020a7
commit 901e2da828
12 changed files with 247 additions and 587 deletions

View file

@ -1,35 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
uniform vec4 color = vec4(1.0, 1.0, 1.0, 1.0);
layout(location = 0) in vec3 inLineDistance;
out vec4 FragColor;
void main() {
vec2 d = inLineDistance.xy;
d.y = abs(d.y);
d.x = abs(d.x);
if (d.x > 1.0) {
d.x = (d.x - 1.0) / 0.02;
} else {
d.x = 0.0;
}
float alpha = 1.0 - length(d);
if (alpha <= 0.0) {
discard;
}
alpha = pow(alpha, 10.0);
if (alpha < 0.05) {
discard;
}
FragColor = vec4(color.rgb, alpha);
}

View file

@ -1,70 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
#extension GL_EXT_geometry_shader4 : enable
layout(location = 0) out vec3 outLineDistance;
layout(lines) in;
layout(triangle_strip, max_vertices = 24) out;
vec3[2] getOrthogonals(in vec3 n, float scale) {
float yDot = abs(dot(n, vec3(0, 1, 0)));
vec3 result[2];
if (yDot < 0.9) {
result[0] = normalize(cross(n, vec3(0, 1, 0)));
} else {
result[0] = normalize(cross(n, vec3(1, 0, 0)));
}
// The cross of result[0] and n is orthogonal to both, which are orthogonal to each other
result[1] = cross(result[0], n);
result[0] *= scale;
result[1] *= scale;
return result;
}
vec2 orthogonal(vec2 v) {
vec2 result = v.yx;
result.y *= -1.0;
return result;
}
void main() {
vec2 endpoints[2];
for (int i = 0; i < 2; ++i) {
endpoints[i] = gl_PositionIn[i].xy / gl_PositionIn[i].w;
}
vec2 lineNormal = normalize(endpoints[1] - endpoints[0]);
vec2 lineOrthogonal = orthogonal(lineNormal);
lineNormal *= 0.02;
lineOrthogonal *= 0.02;
gl_Position = gl_PositionIn[0];
gl_Position.xy -= lineOrthogonal;
outLineDistance = vec3(-1.02, -1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[0];
gl_Position.xy += lineOrthogonal;
outLineDistance = vec3(-1.02, 1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[1];
gl_Position.xy -= lineOrthogonal;
outLineDistance = vec3(1.02, -1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[1];
gl_Position.xy += lineOrthogonal;
outLineDistance = vec3(1.02, 1, gl_Position.z);
EmitVertex();
EndPrimitive();
}

View file

@ -1,15 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
uniform mat4 mvp = mat4(1);
in vec3 Position;
void main() {
gl_Position = mvp * vec4(Position, 1);
}

View file

@ -325,6 +325,17 @@ void OpenGLDisplayPlugin::customizeContext() {
}
if (!_presentPipeline) {
{
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
auto ps = gpu::StandardShaderLib::getDrawTexturePS();
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::makeProgram(*program);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setScissorEnable(true);
_simplePipeline = gpu::Pipeline::create(program, state);
}
{
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
auto ps = gpu::Shader::createPixel(std::string(SRGB_TO_LINEAR_FRAG));
@ -460,7 +471,7 @@ void OpenGLDisplayPlugin::updateFrameData() {
void OpenGLDisplayPlugin::compositeOverlay() {
render([&](gpu::Batch& batch){
batch.enableStereo(false);
batch.setFramebuffer(_currentFrame->framebuffer);
batch.setFramebuffer(_compositeFramebuffer);
batch.setPipeline(_overlayPipeline);
batch.setResourceTexture(0, _currentFrame->overlay);
if (isStereo()) {
@ -482,7 +493,7 @@ void OpenGLDisplayPlugin::compositePointer() {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setProjectionTransform(mat4());
batch.setFramebuffer(_currentFrame->framebuffer);
batch.setFramebuffer(_compositeFramebuffer);
batch.setPipeline(_cursorPipeline);
batch.setResourceTexture(0, cursorData.texture);
batch.clearViewTransform();
@ -500,6 +511,17 @@ void OpenGLDisplayPlugin::compositePointer() {
}
void OpenGLDisplayPlugin::compositeScene() {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setFramebuffer(_compositeFramebuffer);
batch.setViewportTransform(ivec4(uvec2(), _compositeFramebuffer->getSize()));
batch.setStateScissorRect(ivec4(uvec2(), _compositeFramebuffer->getSize()));
batch.clearViewTransform();
batch.setProjectionTransform(mat4());
batch.setPipeline(_simplePipeline);
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
}
void OpenGLDisplayPlugin::compositeLayers() {
@ -528,7 +550,7 @@ void OpenGLDisplayPlugin::internalPresent() {
batch.clearViewTransform();
batch.setFramebuffer(gpu::FramebufferPointer());
batch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels()));
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
batch.setResourceTexture(0, _compositeFramebuffer->getRenderBuffer(0));
batch.setPipeline(_presentPipeline);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});

View file

@ -112,6 +112,7 @@ protected:
gpu::FramebufferPointer _compositeFramebuffer;
gpu::TexturePointer _compositeTexture;
gpu::PipelinePointer _overlayPipeline;
gpu::PipelinePointer _simplePipeline;
gpu::PipelinePointer _presentPipeline;
gpu::PipelinePointer _cursorPipeline;
float _compositeOverlayAlpha { 1.0f };

View file

@ -35,6 +35,14 @@ bool DebugHmdDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
withNonPresentThreadLock([&] {
_uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
_frameInfos[frameIndex] = _currentRenderFrameInfo;
_handPoses[0] = glm::translate(mat4(), vec3(-0.3f, 0.0f, 0.0f));
_handLasers[0].color = vec4(1, 0, 0, 1);
_handLasers[0].mode = HandLaserMode::Overlay;
_handPoses[1] = glm::translate(mat4(), vec3(0.3f, 0.0f, 0.0f));
_handLasers[1].color = vec4(0, 1, 1, 1);
_handLasers[1].mode = HandLaserMode::Overlay;
});
return Parent::beginFrameRender(frameIndex);
}
@ -70,7 +78,6 @@ bool DebugHmdDisplayPlugin::internalActivate() {
}
void DebugHmdDisplayPlugin::updatePresentPose() {
// if (usecTimestampNow() % 4000000 > 2000000) {
_currentPresentFrameInfo.presentPose = glm::mat4_cast(glm::angleAxis(0.5f, Vectors::UP));
// }
// Simulates head pose latency correction
_currentPresentFrameInfo.presentPose = glm::mat4_cast(glm::angleAxis(sin(secTimestampNow()) * 0.25f, Vectors::UP)) * glm::mat4_cast(glm::angleAxis(cos(secTimestampNow()) * 0.25f, Vectors::RIGHT));
}

View file

@ -23,7 +23,7 @@
#include <CursorManager.h>
#include <gl/GLWidget.h>
#include <shared/NsightHelpers.h>
#include <GeometryCache.h>
#include <gpu/Context.h>
#include <gpu/StandardShaderLib.h>
#include <gpu/gl/GLBackend.h>
@ -113,25 +113,191 @@ void HmdDisplayPlugin::customizeContext() {
enableVsync(false);
#endif
_enablePreview = !isVsyncEnabled();
_overlay.build();
#if 0
updateReprojectionProgram();
updateLaserProgram();
_laserGeometry = loadLaser(_laserProgram);
#endif
_overlayRenderer.build();
}
void HmdDisplayPlugin::uncustomizeContext() {
_overlay = OverlayRenderer();
#if 0
_previewProgram.reset();
_laserProgram.reset();
_laserGeometry.reset();
#endif
_overlayRenderer = OverlayRenderer();
getGLBackend()->setCameraCorrection(mat4());
Parent::uncustomizeContext();
}
void HmdDisplayPlugin::internalPresent() {
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
// Composite together the scene, overlay and mouse cursor
hmdPresent();
if (_enablePreview) {
// screen preview mirroring
auto window = _container->getPrimaryWidget();
auto devicePixelRatio = window->devicePixelRatio();
auto windowSize = toGlm(window->size());
windowSize *= devicePixelRatio;
float windowAspect = aspect(windowSize);
float sceneAspect = _enablePreview ? aspect(_renderTargetSize) : _previewAspect;
if (_enablePreview && _monoPreview) {
sceneAspect /= 2.0f;
}
float aspectRatio = sceneAspect / windowAspect;
uvec2 targetViewportSize = windowSize;
if (aspectRatio < 1.0f) {
targetViewportSize.x *= aspectRatio;
} else {
targetViewportSize.y /= aspectRatio;
}
uvec2 targetViewportPosition;
if (targetViewportSize.x < windowSize.x) {
targetViewportPosition.x = (windowSize.x - targetViewportSize.x) / 2;
} else if (targetViewportSize.y < windowSize.y) {
targetViewportPosition.y = (windowSize.y - targetViewportSize.y) / 2;
}
gpu::Batch presentBatch;
presentBatch.enableStereo(false);
presentBatch.clearViewTransform();
presentBatch.setFramebuffer(gpu::FramebufferPointer());
presentBatch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
presentBatch.setViewportTransform(ivec4(uvec2(0), windowSize));
if (_monoPreview) {
presentBatch.setStateScissorRect(ivec4(targetViewportPosition, targetViewportSize));
targetViewportSize.x *= 2;
presentBatch.setViewportTransform(ivec4(targetViewportPosition, targetViewportSize));
} else {
presentBatch.setStateScissorRect(ivec4(targetViewportPosition, targetViewportSize));
presentBatch.setViewportTransform(ivec4(targetViewportPosition, targetViewportSize));
}
presentBatch.setResourceTexture(0, _compositeTexture);
presentBatch.setPipeline(_presentPipeline);
presentBatch.draw(gpu::TRIANGLE_STRIP, 4);
_backend->render(presentBatch);
swapBuffers();
}
postPreview();
}
// HMD specific stuff
glm::mat4 HmdDisplayPlugin::getHeadPose() const {
return _currentRenderFrameInfo.renderPose;
}
void HmdDisplayPlugin::updatePresentPose() {
// By default assume we'll present with the same pose as the render
_currentPresentFrameInfo.presentPose = _currentPresentFrameInfo.renderPose;
}
void HmdDisplayPlugin::updateFrameData() {
// Check if we have old frame data to discard
static const uint32_t INVALID_FRAME = (uint32_t)(~0);
uint32_t oldFrameIndex = _currentFrame ? _currentFrame->frameIndex : INVALID_FRAME;
Parent::updateFrameData();
uint32_t newFrameIndex = _currentFrame ? _currentFrame->frameIndex : INVALID_FRAME;
if (oldFrameIndex != newFrameIndex) {
withPresentThreadLock([&] {
if (oldFrameIndex != INVALID_FRAME) {
auto itr = _frameInfos.find(oldFrameIndex);
if (itr != _frameInfos.end()) {
_frameInfos.erase(itr);
}
}
if (newFrameIndex != INVALID_FRAME) {
_currentPresentFrameInfo = _frameInfos[newFrameIndex];
}
});
}
updatePresentPose();
if (_currentFrame) {
auto batchPose = _currentFrame->pose;
auto currentPose = _currentPresentFrameInfo.presentPose;
auto correction = glm::inverse(batchPose) * currentPose;
getGLBackend()->setCameraCorrection(correction);
}
withPresentThreadLock([&] {
_presentHandLasers = _handLasers;
_presentHandPoses = _handPoses;
_presentUiModelTransform = _uiModelTransform;
});
auto compositorHelper = DependencyManager::get<CompositorHelper>();
glm::mat4 modelMat = compositorHelper->getModelTransform().getMatrix();
std::array<vec2, NUMBER_OF_HANDS> handGlowPoints{ { vec2(-1), vec2(-1) } };
// compute the glow point interesections
for (size_t i = 0; i < NUMBER_OF_HANDS; ++i) {
if (_presentHandPoses[i] == IDENTITY_MATRIX) {
continue;
}
const auto& handLaser = _presentHandLasers[i];
if (!handLaser.valid()) {
continue;
}
const auto& laserDirection = handLaser.direction;
auto model = _presentHandPoses[i];
auto castDirection = glm::quat_cast(model) * laserDirection;
if (glm::abs(glm::length2(castDirection) - 1.0f) > EPSILON) {
castDirection = glm::normalize(castDirection);
castDirection = glm::inverse(_presentUiModelTransform.getRotation()) * castDirection;
}
// FIXME fetch the actual UI radius from... somewhere?
float uiRadius = 1.0f;
// Find the intersection of the laser with he UI and use it to scale the model matrix
float distance;
if (!glm::intersectRaySphere(vec3(_presentHandPoses[i][3]), castDirection, _presentUiModelTransform.getTranslation(), uiRadius * uiRadius, distance)) {
continue;
}
_presentHandLaserPoints[i].first = vec3(_presentHandPoses[i][3]);
vec3 intersectionPosition = vec3(_presentHandPoses[i][3]) + (castDirection * distance) - _presentUiModelTransform.getTranslation();
intersectionPosition = glm::inverse(_presentUiModelTransform.getRotation()) * intersectionPosition;
_presentHandLaserPoints[i].second = intersectionPosition;
// Take the interesection normal and convert it to a texture coordinate
vec2 yawPitch;
{
vec2 xdir = glm::normalize(vec2(intersectionPosition.x, -intersectionPosition.z));
yawPitch.x = glm::atan(xdir.x, xdir.y);
yawPitch.y = (acosf(intersectionPosition.y) * -1.0f) + M_PI_2;
}
vec2 halfFov = CompositorHelper::VIRTUAL_UI_TARGET_FOV / 2.0f;
// Are we out of range
if (glm::any(glm::greaterThan(glm::abs(yawPitch), halfFov))) {
continue;
}
yawPitch /= CompositorHelper::VIRTUAL_UI_TARGET_FOV;
yawPitch += 0.5f;
handGlowPoints[i] = yawPitch;
}
for_each_eye([&](Eye eye) {
auto modelView = glm::inverse(_currentPresentFrameInfo.presentPose * getEyeToHeadTransform(eye)) * modelMat;
_overlayRenderer.mvps[eye] = _eyeProjections[eye] * modelView;
});
// Setup the uniforms
{
auto& uniforms = _overlayRenderer.uniforms;
uniforms.alpha = _compositeOverlayAlpha;
uniforms.glowPoints = vec4(handGlowPoints[0], handGlowPoints[1]);
uniforms.glowColors[0] = _presentHandLasers[0].color;
uniforms.glowColors[1] = _presentHandLasers[1].color;
}
}
void HmdDisplayPlugin::OverlayRenderer::build() {
vertices = std::make_shared<gpu::Buffer>();
indices = std::make_shared<gpu::Buffer>();
@ -199,7 +365,7 @@ void HmdDisplayPlugin::OverlayRenderer::updatePipeline() {
static const QString vsFile = PathUtils::resourcesPath() + "/shaders/hmd_ui_glow.vert";
static const QString fsFile = PathUtils::resourcesPath() + "/shaders/hmd_ui_glow.frag";
#if LIVE_SHADER_RELOAD
#if 1
static qint64 vsBuiltAge = 0;
static qint64 fsBuiltAge = 0;
QFileInfo vsInfo(vsFile);
@ -256,76 +422,6 @@ void HmdDisplayPlugin::OverlayRenderer::render(HmdDisplayPlugin& plugin) {
});
}
void HmdDisplayPlugin::updateLaserProgram() {
#if 0
static const QString vsFile = PathUtils::resourcesPath() + "/shaders/hmd_hand_lasers.vert";
static const QString gsFile = PathUtils::resourcesPath() + "/shaders/hmd_hand_lasers.geom";
static const QString fsFile = PathUtils::resourcesPath() + "/shaders/hmd_hand_lasers.frag";
#if LIVE_SHADER_RELOAD
static qint64 vsBuiltAge = 0;
static qint64 gsBuiltAge = 0;
static qint64 fsBuiltAge = 0;
QFileInfo vsInfo(vsFile);
QFileInfo fsInfo(fsFile);
QFileInfo gsInfo(fsFile);
auto vsAge = vsInfo.lastModified().toMSecsSinceEpoch();
auto fsAge = fsInfo.lastModified().toMSecsSinceEpoch();
auto gsAge = gsInfo.lastModified().toMSecsSinceEpoch();
if (!_laserProgram || vsAge > vsBuiltAge || fsAge > fsBuiltAge || gsAge > gsBuiltAge) {
vsBuiltAge = vsAge;
gsBuiltAge = gsAge;
fsBuiltAge = fsAge;
#else
if (!_laserProgram) {
#endif
QString vsSource = readFile(vsFile);
QString fsSource = readFile(fsFile);
QString gsSource = readFile(gsFile);
ProgramPtr program;
try {
compileProgram(program, vsSource.toLocal8Bit().toStdString(), gsSource.toLocal8Bit().toStdString(), fsSource.toLocal8Bit().toStdString());
if (program) {
using namespace oglplus;
_laserUniforms.color = Uniform<glm::vec4>(*program, "color").Location();
_laserUniforms.mvp = Uniform<glm::mat4>(*program, "mvp").Location();
_laserProgram = program;
}
} catch (std::runtime_error& error) {
qWarning() << "Error building hand laser composite shader " << error.what();
}
}
#endif
}
// By default assume we'll present with the same pose as the render
void HmdDisplayPlugin::updatePresentPose() {
_currentPresentFrameInfo.presentPose = _currentPresentFrameInfo.renderPose;
}
void HmdDisplayPlugin::compositeScene() {
render([&](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setFramebuffer(_compositeFramebuffer);
batch.setViewportTransform(ivec4(uvec2(), _renderTargetSize));
batch.setStateScissorRect(ivec4(uvec2(), _renderTargetSize));
batch.clearViewTransform();
batch.setProjectionTransform(mat4());
batch.setPipeline(_presentPipeline);
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
}
void HmdDisplayPlugin::compositeOverlay() {
if (!_currentFrame || !_currentFrame->overlay) {
return;
}
_overlay.render(*this);
}
void HmdDisplayPlugin::compositePointer() {
auto& cursorManager = Cursor::Manager::instance();
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
@ -351,171 +447,12 @@ void HmdDisplayPlugin::compositePointer() {
});
}
void HmdDisplayPlugin::internalPresent() {
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
// Composite together the scene, overlay and mouse cursor
hmdPresent();
if (_enablePreview) {
// screen preview mirroring
auto window = _container->getPrimaryWidget();
auto devicePixelRatio = window->devicePixelRatio();
auto windowSize = toGlm(window->size());
windowSize *= devicePixelRatio;
float windowAspect = aspect(windowSize);
float sceneAspect = _enablePreview ? aspect(_renderTargetSize) : _previewAspect;
if (_enablePreview && _monoPreview) {
sceneAspect /= 2.0f;
}
float aspectRatio = sceneAspect / windowAspect;
uvec2 targetViewportSize = windowSize;
if (aspectRatio < 1.0f) {
targetViewportSize.x *= aspectRatio;
} else {
targetViewportSize.y /= aspectRatio;
}
uvec2 targetViewportPosition;
if (targetViewportSize.x < windowSize.x) {
targetViewportPosition.x = (windowSize.x - targetViewportSize.x) / 2;
} else if (targetViewportSize.y < windowSize.y) {
targetViewportPosition.y = (windowSize.y - targetViewportSize.y) / 2;
}
gpu::Batch presentBatch;
presentBatch.enableStereo(false);
presentBatch.clearViewTransform();
presentBatch.setFramebuffer(gpu::FramebufferPointer());
presentBatch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
presentBatch.setViewportTransform(ivec4(uvec2(0), windowSize));
if (_monoPreview) {
presentBatch.setStateScissorRect(ivec4(targetViewportPosition, targetViewportSize));
targetViewportSize.x *= 2;
presentBatch.setViewportTransform(ivec4(targetViewportPosition, targetViewportSize));
} else {
presentBatch.setStateScissorRect(ivec4(targetViewportPosition, targetViewportSize));
presentBatch.setViewportTransform(ivec4(targetViewportPosition, targetViewportSize));
}
presentBatch.setResourceTexture(0, _compositeTexture);
presentBatch.setPipeline(_presentPipeline);
presentBatch.draw(gpu::TRIANGLE_STRIP, 4);
_backend->render(presentBatch);
swapBuffers();
}
postPreview();
}
void HmdDisplayPlugin::updateFrameData() {
// Check if we have old frame data to discard
static const uint32_t INVALID_FRAME = (uint32_t)(~0);
uint32_t oldFrameIndex = _currentFrame ? _currentFrame->frameIndex : INVALID_FRAME;
Parent::updateFrameData();
uint32_t newFrameIndex = _currentFrame ? _currentFrame->frameIndex : INVALID_FRAME;
if (oldFrameIndex != newFrameIndex) {
withPresentThreadLock([&] {
if (oldFrameIndex != INVALID_FRAME) {
auto itr = _frameInfos.find(oldFrameIndex);
if (itr != _frameInfos.end()) {
_frameInfos.erase(itr);
}
}
if (newFrameIndex != INVALID_FRAME) {
_currentPresentFrameInfo = _frameInfos[newFrameIndex];
}
});
void HmdDisplayPlugin::compositeOverlay() {
if (!_currentFrame || !_currentFrame->overlay) {
return;
}
updatePresentPose();
if (_currentFrame) {
auto batchPose = _currentFrame->pose;
auto currentPose = _currentPresentFrameInfo.presentPose;
auto correction = glm::inverse(batchPose) * currentPose;
getGLBackend()->setCameraCorrection(correction);
}
withPresentThreadLock([&] {
_presentHandLasers = _handLasers;
_presentHandPoses = _handPoses;
_presentUiModelTransform = _uiModelTransform;
});
auto compositorHelper = DependencyManager::get<CompositorHelper>();
glm::mat4 modelMat = compositorHelper->getModelTransform().getMatrix();
std::array<vec2, NUMBER_OF_HANDS> handGlowPoints{ { vec2(-1), vec2(-1) } };
// compute the glow point interesections
for (size_t i = 0; i < NUMBER_OF_HANDS; ++i) {
if (_presentHandPoses[i] == IDENTITY_MATRIX) {
continue;
}
const auto& handLaser = _presentHandLasers[i];
if (!handLaser.valid()) {
continue;
}
const auto& laserDirection = handLaser.direction;
auto model = _presentHandPoses[i];
auto castDirection = glm::quat_cast(model) * laserDirection;
if (glm::abs(glm::length2(castDirection) - 1.0f) > EPSILON) {
castDirection = glm::normalize(castDirection);
castDirection = glm::inverse(_presentUiModelTransform.getRotation()) * castDirection;
}
// FIXME fetch the actual UI radius from... somewhere?
float uiRadius = 1.0f;
// Find the intersection of the laser with he UI and use it to scale the model matrix
float distance;
if (!glm::intersectRaySphere(vec3(_presentHandPoses[i][3]), castDirection, _presentUiModelTransform.getTranslation(), uiRadius * uiRadius, distance)) {
continue;
}
vec3 intersectionPosition = vec3(_presentHandPoses[i][3]) + (castDirection * distance) - _presentUiModelTransform.getTranslation();
intersectionPosition = glm::inverse(_presentUiModelTransform.getRotation()) * intersectionPosition;
// Take the interesection normal and convert it to a texture coordinate
vec2 yawPitch;
{
vec2 xdir = glm::normalize(vec2(intersectionPosition.x, -intersectionPosition.z));
yawPitch.x = glm::atan(xdir.x, xdir.y);
yawPitch.y = (acosf(intersectionPosition.y) * -1.0f) + M_PI_2;
}
vec2 halfFov = CompositorHelper::VIRTUAL_UI_TARGET_FOV / 2.0f;
// Are we out of range
if (glm::any(glm::greaterThan(glm::abs(yawPitch), halfFov))) {
continue;
}
yawPitch /= CompositorHelper::VIRTUAL_UI_TARGET_FOV;
yawPitch += 0.5f;
handGlowPoints[i] = yawPitch;
}
for_each_eye([&](Eye eye) {
auto modelView = glm::inverse(_currentPresentFrameInfo.presentPose * getEyeToHeadTransform(eye)) * modelMat;
_overlay.mvps[eye] = _eyeProjections[eye] * modelView;
});
// Setup the uniforms
{
auto& uniforms = _overlay.uniforms;
uniforms.alpha = _compositeOverlayAlpha;
uniforms.glowPoints = vec4(handGlowPoints[0], handGlowPoints[1]);
uniforms.glowColors[0] = _presentHandLasers[0].color;
uniforms.glowColors[1] = _presentHandLasers[1].color;
}
}
glm::mat4 HmdDisplayPlugin::getHeadPose() const {
return _currentRenderFrameInfo.renderPose;
_overlayRenderer.render(*this);
}
bool HmdDisplayPlugin::setHandLaser(uint32_t hands, HandLaserMode mode, const vec4& color, const vec3& direction) {
@ -537,7 +474,6 @@ bool HmdDisplayPlugin::setHandLaser(uint32_t hands, HandLaserMode mode, const ve
}
void HmdDisplayPlugin::compositeExtra() {
#if 0
// If neither hand laser is activated, exit
if (!_presentHandLasers[0].valid() && !_presentHandLasers[1].valid()) {
return;
@ -547,66 +483,20 @@ void HmdDisplayPlugin::compositeExtra() {
return;
}
updateLaserProgram();
// Render hand lasers
using namespace oglplus;
useProgram(_laserProgram);
_laserGeometry->Use();
std::array<mat4, NUMBER_OF_HANDS> handLaserModelMatrices;
for (int i = 0; i < NUMBER_OF_HANDS; ++i) {
if (_presentHandPoses[i] == IDENTITY_MATRIX) {
continue;
}
const auto& handLaser = _presentHandLasers[i];
if (!handLaser.valid()) {
continue;
}
const auto& laserDirection = handLaser.direction;
auto model = _presentHandPoses[i];
auto castDirection = glm::quat_cast(model) * laserDirection;
if (glm::abs(glm::length2(castDirection) - 1.0f) > EPSILON) {
castDirection = glm::normalize(castDirection);
}
// FIXME fetch the actual UI radius from... somewhere?
float uiRadius = 1.0f;
// Find the intersection of the laser with he UI and use it to scale the model matrix
float distance;
if (!glm::intersectRaySphere(vec3(_presentHandPoses[i][3]), castDirection, _presentUiModelTransform.getTranslation(), uiRadius * uiRadius, distance)) {
continue;
}
// Make sure we rotate to match the desired laser direction
if (laserDirection != Vectors::UNIT_NEG_Z) {
auto rotation = glm::rotation(Vectors::UNIT_NEG_Z, laserDirection);
model = model * glm::mat4_cast(rotation);
}
model = glm::scale(model, vec3(distance));
handLaserModelMatrices[i] = model;
}
glEnable(GL_BLEND);
for_each_eye([&](Eye eye) {
eyeViewport(eye);
auto eyePose = _currentPresentFrameInfo.presentPose * getEyeToHeadTransform(eye);
auto view = glm::inverse(eyePose);
const auto& projection = _eyeProjections[eye];
for (int i = 0; i < NUMBER_OF_HANDS; ++i) {
if (handLaserModelMatrices[i] == IDENTITY_MATRIX) {
continue;
auto geometryCache = DependencyManager::get<GeometryCache>();
render([&](gpu::Batch& batch) {
batch.setFramebuffer(_compositeFramebuffer);
batch.setViewportTransform(ivec4(uvec2(0), _renderTargetSize));
batch.setViewTransform(_currentPresentFrameInfo.presentPose, false);
bilateral::for_each_side([&](bilateral::Side side){
auto index = bilateral::index(side);
if (_presentHandPoses[index] == IDENTITY_MATRIX) {
return;
}
Uniform<glm::mat4>(*_laserProgram, "mvp").Set(projection * view * handLaserModelMatrices[i]);
Uniform<glm::vec4>(*_laserProgram, "color").Set(_presentHandLasers[i].color);
_laserGeometry->Draw();
}
const auto& points = _presentHandLaserPoints[index];
const auto& lasers = _presentHandLasers[index];
geometryCache->renderGlowLine(batch, points.first, points.second, lasers.color);
});
});
glDisable(GL_BLEND);
#endif
}

View file

@ -46,7 +46,6 @@ protected:
bool internalActivate() override;
void internalDeactivate() override;
void compositeScene() override;
void compositeOverlay() override;
void compositePointer() override;
void internalPresent() override;
@ -68,26 +67,27 @@ protected:
Transform _uiModelTransform;
std::array<HandLaserInfo, 2> _handLasers;
std::array<glm::mat4, 2> _handPoses;
std::array<mat4, 2> _handPoses;
Transform _presentUiModelTransform;
std::array<HandLaserInfo, 2> _presentHandLasers;
std::array<mat4, 2> _presentHandPoses;
std::array<std::pair<vec3, vec3>, 2> _presentHandLaserPoints;
std::array<glm::mat4, 2> _eyeOffsets;
std::array<glm::mat4, 2> _eyeProjections;
std::array<glm::mat4, 2> _eyeInverseProjections;
std::array<mat4, 2> _eyeOffsets;
std::array<mat4, 2> _eyeProjections;
std::array<mat4, 2> _eyeInverseProjections;
glm::mat4 _cullingProjection;
glm::uvec2 _renderTargetSize;
mat4 _cullingProjection;
uvec2 _renderTargetSize;
float _ipd { 0.064f };
struct FrameInfo {
glm::mat4 renderPose;
glm::mat4 presentPose;
mat4 renderPose;
mat4 presentPose;
double sensorSampleTime { 0 };
double predictedDisplayTime { 0 };
glm::mat3 presentReprojection;
mat3 presentReprojection;
};
QMap<uint32_t, FrameInfo> _frameInfos;
@ -95,9 +95,6 @@ protected:
FrameInfo _currentRenderFrameInfo;
private:
void updateLaserProgram();
void updateReprojectionProgram();
bool _enablePreview { false };
bool _monoPreview { true };
bool _enableReprojection { true };
@ -140,26 +137,5 @@ private:
void build();
void updatePipeline();
void render(HmdDisplayPlugin& plugin);
} _overlay;
#if 0
ProgramPtr _previewProgram;
struct PreviewUniforms {
int32_t previewTexture { -1 };
} _previewUniforms;
ProgramPtr _reprojectionProgram;
struct ReprojectionUniforms {
int32_t reprojectionMatrix { -1 };
int32_t inverseProjectionMatrix { -1 };
int32_t projectionMatrix { -1 };
} _reprojectionUniforms;
ProgramPtr _laserProgram;
struct LaserUniforms {
int32_t mvp { -1 };
int32_t color { -1 };
} _laserUniforms;
ShapeWrapperPtr _laserGeometry;
#endif
} _overlayRenderer;
};

View file

@ -1,35 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
uniform vec4 color = vec4(1.0, 1.0, 1.0, 1.0);
layout(location = 0) in vec3 inLineDistance;
out vec4 FragColor;
void main() {
vec2 d = inLineDistance.xy;
d.y = abs(d.y);
d.x = abs(d.x);
if (d.x > 1.0) {
d.x = (d.x - 1.0) / 0.02;
} else {
d.x = 0.0;
}
float alpha = 1.0 - length(d);
if (alpha <= 0.0) {
discard;
}
alpha = pow(alpha, 10.0);
if (alpha < 0.05) {
discard;
}
FragColor = vec4(color.rgb, alpha);
}

View file

@ -1,70 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
#extension GL_EXT_geometry_shader4 : enable
layout(location = 0) out vec3 outLineDistance;
layout(lines) in;
layout(triangle_strip, max_vertices = 24) out;
vec3[2] getOrthogonals(in vec3 n, float scale) {
float yDot = abs(dot(n, vec3(0, 1, 0)));
vec3 result[2];
if (yDot < 0.9) {
result[0] = normalize(cross(n, vec3(0, 1, 0)));
} else {
result[0] = normalize(cross(n, vec3(1, 0, 0)));
}
// The cross of result[0] and n is orthogonal to both, which are orthogonal to each other
result[1] = cross(result[0], n);
result[0] *= scale;
result[1] *= scale;
return result;
}
vec2 orthogonal(vec2 v) {
vec2 result = v.yx;
result.y *= -1.0;
return result;
}
void main() {
vec2 endpoints[2];
for (int i = 0; i < 2; ++i) {
endpoints[i] = gl_PositionIn[i].xy / gl_PositionIn[i].w;
}
vec2 lineNormal = normalize(endpoints[1] - endpoints[0]);
vec2 lineOrthogonal = orthogonal(lineNormal);
lineNormal *= 0.02;
lineOrthogonal *= 0.02;
gl_Position = gl_PositionIn[0];
gl_Position.xy -= lineOrthogonal;
outLineDistance = vec3(-1.02, -1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[0];
gl_Position.xy += lineOrthogonal;
outLineDistance = vec3(-1.02, 1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[1];
gl_Position.xy -= lineOrthogonal;
outLineDistance = vec3(1.02, -1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[1];
gl_Position.xy += lineOrthogonal;
outLineDistance = vec3(1.02, 1, gl_Position.z);
EmitVertex();
EndPrimitive();
}

View file

@ -1,13 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
in vec3 Position;
void main() {
gl_Position = mvp * vec4(Position, 1);
}

View file

@ -13,7 +13,9 @@ if (WIN32)
set(TARGET_NAME oculus)
setup_hifi_plugin(Multimedia)
link_hifi_libraries(shared gl gpu gpu-gl controllers ui plugins ui-plugins display-plugins input-plugins audio-client networking)
link_hifi_libraries(shared gl gpu gpu-gl controllers ui
plugins ui-plugins display-plugins input-plugins
audio-client networking render-utils)
include_hifi_library_headers(octree)