Batch replay reprojection

This commit is contained in:
Brad Davis 2016-07-31 21:57:17 -07:00
parent 7e93747acf
commit adcfd55cc0
33 changed files with 96 additions and 471 deletions

View file

@ -7,8 +7,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
ExternalProject_Add( ExternalProject_Add(
${EXTERNAL_NAME} ${EXTERNAL_NAME}
URL https://github.com/ValveSoftware/openvr/archive/v0.9.19.zip URL https://github.com/ValveSoftware/openvr/archive/v1.0.2.zip
URL_MD5 843f9dde488584d8af1f3ecf2252b4e0 URL_MD5 0d1cf5f579cf092e33f34759967b7046
CONFIGURE_COMMAND "" CONFIGURE_COMMAND ""
BUILD_COMMAND "" BUILD_COMMAND ""
INSTALL_COMMAND "" INSTALL_COMMAND ""

View file

@ -1,17 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform sampler2D sampler;
in vec2 varTexCoord0;
out vec4 FragColor;
void main() {
FragColor = texture(sampler, varTexCoord0);
}

View file

@ -1,90 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
precision highp float;
struct TransformCamera {
mat4 _view;
mat4 _viewInverse;
mat4 _projectionViewUntranslated;
mat4 _projection;
mat4 _projectionInverse;
vec4 _viewport;
vec4 _stereoInfo;
};
layout(std140) uniform transformCameraBuffer {
TransformCamera _camera;
};
TransformCamera getTransformCamera() {
return _camera;
}
vec3 getEyeWorldPos() {
return _camera._viewInverse[3].xyz;
}
bool cam_isStereo() {
return _camera._stereoInfo.x > 0.0;
}
float cam_getStereoSide() {
return _camera._stereoInfo.y;
}
struct Reprojection {
mat4 rotation;
};
layout(std140) uniform reprojectionBuffer {
Reprojection reprojection;
};
layout(location = 0) in vec4 inPosition;
noperspective out vec2 varTexCoord0;
void main(void) {
// standard transform
TransformCamera cam = getTransformCamera();
vec2 uv = inPosition.xy;
uv.x /= 2.0;
vec4 pos = inPosition;
pos *= 2.0;
pos -= 1.0;
if (cam_getStereoSide() > 0.0) {
uv.x += 0.5;
}
if (reprojection.rotation != mat4(1)) {
vec4 eyeSpace = _camera._projectionInverse * pos;
eyeSpace /= eyeSpace.w;
// Convert to a noramlized ray
vec3 ray = eyeSpace.xyz;
ray = normalize(ray);
// Adjust the ray by the rotation
ray = mat3(inverse(reprojection.rotation)) * ray;
// Project back on to the texture plane
ray *= eyeSpace.z / ray.z;
eyeSpace.xyz = ray;
// Move back into NDC space
eyeSpace = _camera._projection * eyeSpace;
eyeSpace /= eyeSpace.w;
eyeSpace.z = 0.0;
pos = eyeSpace;
}
gl_Position = pos;
varTexCoord0 = uv;
}

View file

@ -1888,7 +1888,6 @@ void Application::paintGL() {
auto baseProjection = renderArgs.getViewFrustum().getProjection(); auto baseProjection = renderArgs.getViewFrustum().getProjection();
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>(); auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
float IPDScale = hmdInterface->getIPDScale(); float IPDScale = hmdInterface->getIPDScale();
mat4 headPose = displayPlugin->getHeadPose();
// FIXME we probably don't need to set the projection matrix every frame, // FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user // only when the display plugin changes (or in non-HMD modes when the user

View file

@ -142,7 +142,7 @@ void AudioScope::render(RenderArgs* renderArgs, int width, int height) {
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, -1000, 1000); mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, -1000, 1000);
batch.setProjectionTransform(legacyProjection); batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
batch.setViewTransform(Transform()); batch.clearViewTransform();
geometryCache->renderQuad(batch, x, y, w, h, backgroundColor, _audioScopeBackground); geometryCache->renderQuad(batch, x, y, w, h, backgroundColor, _audioScopeBackground);
renderLineStrip(batch, _inputID, inputColor, x, y, _samplesPerScope, _scopeInputOffset, _scopeInput); renderLineStrip(batch, _inputID, inputColor, x, y, _samplesPerScope, _scopeInputOffset, _scopeInput);

View file

@ -103,7 +103,7 @@ void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
geometryCache->useSimpleDrawPipeline(batch); geometryCache->useSimpleDrawPipeline(batch);
batch.setProjectionTransform(mat4()); batch.setProjectionTransform(mat4());
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
batch.setViewTransform(Transform()); batch.clearViewTransform();
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _uiTexture); batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _uiTexture);
geometryCache->renderUnitQuad(batch, glm::vec4(1)); geometryCache->renderUnitQuad(batch, glm::vec4(1));
@ -123,7 +123,7 @@ void ApplicationOverlay::renderAudioScope(RenderArgs* renderArgs) {
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP); mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP);
batch.setProjectionTransform(legacyProjection); batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
batch.setViewTransform(Transform()); batch.clearViewTransform();
// Render the audio scope // Render the audio scope
DependencyManager::get<AudioScope>()->render(renderArgs, width, height); DependencyManager::get<AudioScope>()->render(renderArgs, width, height);
@ -142,7 +142,7 @@ void ApplicationOverlay::renderOverlays(RenderArgs* renderArgs) {
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP); mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP);
batch.setProjectionTransform(legacyProjection); batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
batch.setViewTransform(Transform()); batch.clearViewTransform();
// Render all of the Script based "HUD" aka 2D overlays. // Render all of the Script based "HUD" aka 2D overlays.
// note: we call them HUD, as opposed to 2D, only because there are some cases of 3D HUD overlays, like the // note: we call them HUD, as opposed to 2D, only because there are some cases of 3D HUD overlays, like the
@ -168,7 +168,7 @@ void ApplicationOverlay::renderRearView(RenderArgs* renderArgs) {
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP); mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP);
batch.setProjectionTransform(legacyProjection); batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
batch.setViewTransform(Transform()); batch.clearViewTransform();
float screenRatio = ((float)qApp->getDevicePixelRatio()); float screenRatio = ((float)qApp->getDevicePixelRatio());
float renderRatio = ((float)qApp->getRenderResolutionScale()); float renderRatio = ((float)qApp->getRenderResolutionScale());
@ -230,7 +230,7 @@ void ApplicationOverlay::renderDomainConnectionStatusBorder(RenderArgs* renderAr
geometryCache->useSimpleDrawPipeline(batch); geometryCache->useSimpleDrawPipeline(batch);
batch.setProjectionTransform(mat4()); batch.setProjectionTransform(mat4());
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
batch.setViewTransform(Transform()); batch.clearViewTransform();
batch.setResourceTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture()); batch.setResourceTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
// FIXME: THe line width of CONNECTION_STATUS_BORDER_LINE_WIDTH is not supported anymore, we ll need a workaround // FIXME: THe line width of CONNECTION_STATUS_BORDER_LINE_WIDTH is not supported anymore, we ll need a workaround

View file

@ -38,10 +38,10 @@ void LocalModelsOverlay::render(RenderArgs* args) {
Transform transform = Transform(); Transform transform = Transform();
transform.setTranslation(args->getViewFrustum().getPosition() + getPosition()); transform.setTranslation(args->getViewFrustum().getPosition() + getPosition());
batch->setViewTransform(transform); batch->setViewTransform(transform, true);
_entityTreeRenderer->render(args); _entityTreeRenderer->render(args);
transform.setTranslation(args->getViewFrustum().getPosition()); transform.setTranslation(args->getViewFrustum().getPosition());
batch->setViewTransform(transform); batch->setViewTransform(transform, true);
} }
} }

View file

@ -121,7 +121,7 @@ void Overlays::renderHUD(RenderArgs* renderArgs) {
batch.setResourceTexture(0, textureCache->getWhiteTexture()); // FIXME - do we really need to do this?? batch.setResourceTexture(0, textureCache->getWhiteTexture()); // FIXME - do we really need to do this??
batch.setProjectionTransform(legacyProjection); batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
batch.setViewTransform(Transform()); batch.clearViewTransform();
thisOverlay->render(renderArgs); thisOverlay->render(renderArgs);
} }

View file

@ -475,7 +475,7 @@ void OpenGLDisplayPlugin::compositePointer() {
batch.setFramebuffer(_currentFrame->framebuffer); batch.setFramebuffer(_currentFrame->framebuffer);
batch.setPipeline(_cursorPipeline); batch.setPipeline(_cursorPipeline);
batch.setResourceTexture(0, cursorData.texture); batch.setResourceTexture(0, cursorData.texture);
batch.setViewTransform(Transform()); batch.clearViewTransform();
batch.setModelTransform(cursorTransform); batch.setModelTransform(cursorTransform);
if (isStereo()) { if (isStereo()) {
for_each_eye([&](Eye eye) { for_each_eye([&](Eye eye) {
@ -515,7 +515,7 @@ void OpenGLDisplayPlugin::compositeLayers() {
void OpenGLDisplayPlugin::internalPresent() { void OpenGLDisplayPlugin::internalPresent() {
gpu::Batch presentBatch; gpu::Batch presentBatch;
presentBatch.enableStereo(false); presentBatch.enableStereo(false);
presentBatch.setViewTransform(Transform()); presentBatch.clearViewTransform();
presentBatch.setFramebuffer(gpu::FramebufferPointer()); presentBatch.setFramebuffer(gpu::FramebufferPointer());
presentBatch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels())); presentBatch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels()));
presentBatch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0)); presentBatch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));

View file

@ -42,7 +42,7 @@ bool DebugHmdDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
// DLL based display plugins MUST initialize GLEW inside the DLL code. // DLL based display plugins MUST initialize GLEW inside the DLL code.
void DebugHmdDisplayPlugin::customizeContext() { void DebugHmdDisplayPlugin::customizeContext() {
glewExperimental = true; glewExperimental = true;
GLenum err = glewInit(); glewInit();
glGetError(); // clear the potential error from glewExperimental glGetError(); // clear the potential error from glewExperimental
Parent::customizeContext(); Parent::customizeContext();
} }

View file

@ -23,7 +23,6 @@
#include <CursorManager.h> #include <CursorManager.h>
#include <gl/GLWidget.h> #include <gl/GLWidget.h>
#include <shared/NsightHelpers.h> #include <shared/NsightHelpers.h>
#include <GeometryCache.h>
#include <gpu/Context.h> #include <gpu/Context.h>
#include <gpu/StandardShaderLib.h> #include <gpu/StandardShaderLib.h>
@ -31,17 +30,14 @@
#include <PathUtils.h> #include <PathUtils.h>
#include "hmd_reproject_vert.h"
#include "hmd_reproject_frag.h"
#include "../Logging.h" #include "../Logging.h"
#include "../CompositorHelper.h" #include "../CompositorHelper.h"
static const QString MONO_PREVIEW = "Mono Preview"; static const QString MONO_PREVIEW = "Mono Preview";
static const QString REPROJECTION = "Allow Reprojection"; static const QString REPROJECTION = "Allow Reprojection";
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate"; static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
static const QString DEVELOPER_MENU_PATH = "Developer>" + DisplayPlugin::MENU_PATH(); static const QString DEVELOPER_MENU_PATH = "Developer>" + DisplayPlugin::MENU_PATH();
static const bool DEFAULT_MONO_VIEW = true; static const bool DEFAULT_MONO_VIEW = true;
static const int NUMBER_OF_HANDS = 2;
static const glm::mat4 IDENTITY_MATRIX; static const glm::mat4 IDENTITY_MATRIX;
//#define LIVE_SHADER_RELOAD 1 //#define LIVE_SHADER_RELOAD 1
@ -137,11 +133,11 @@ void HmdDisplayPlugin::uncustomizeContext() {
_laserProgram.reset(); _laserProgram.reset();
_laserGeometry.reset(); _laserGeometry.reset();
#endif #endif
getGLBackend()->setCameraCorrection(mat4());
Parent::uncustomizeContext(); Parent::uncustomizeContext();
} }
void HmdDisplayPlugin::OverlayRender::build() { void HmdDisplayPlugin::OverlayRenderer::build() {
auto geometryCache = DependencyManager::get<GeometryCache>();
vertices = std::make_shared<gpu::Buffer>(); vertices = std::make_shared<gpu::Buffer>();
indices = std::make_shared<gpu::Buffer>(); indices = std::make_shared<gpu::Buffer>();
@ -204,7 +200,7 @@ void HmdDisplayPlugin::OverlayRender::build() {
updatePipeline(); updatePipeline();
} }
void HmdDisplayPlugin::OverlayRender::updatePipeline() { void HmdDisplayPlugin::OverlayRenderer::updatePipeline() {
static const QString vsFile = PathUtils::resourcesPath() + "/shaders/hmd_ui_glow.vert"; static const QString vsFile = PathUtils::resourcesPath() + "/shaders/hmd_ui_glow.vert";
static const QString fsFile = PathUtils::resourcesPath() + "/shaders/hmd_ui_glow.frag"; static const QString fsFile = PathUtils::resourcesPath() + "/shaders/hmd_ui_glow.frag";
@ -239,7 +235,7 @@ void HmdDisplayPlugin::OverlayRender::updatePipeline() {
} }
} }
void HmdDisplayPlugin::OverlayRender::render(HmdDisplayPlugin& plugin) { void HmdDisplayPlugin::OverlayRenderer::render(HmdDisplayPlugin& plugin) {
for_each_eye([&](Eye eye){ for_each_eye([&](Eye eye){
uniforms.mvp = mvps[eye]; uniforms.mvp = mvps[eye];
uniformBuffers[eye]->setSubData(0, uniforms); uniformBuffers[eye]->setSubData(0, uniforms);
@ -264,42 +260,6 @@ void HmdDisplayPlugin::OverlayRender::render(HmdDisplayPlugin& plugin) {
plugin._backend->render(batch); plugin._backend->render(batch);
} }
#if 0
void HmdDisplayPlugin::updateReprojectionProgram() {
static const QString vsFile = PathUtils::resourcesPath() + "/shaders/hmd_reproject.vert";
static const QString fsFile = PathUtils::resourcesPath() + "/shaders/hmd_reproject.frag";
#if LIVE_SHADER_RELOAD
static qint64 vsBuiltAge = 0;
static qint64 fsBuiltAge = 0;
QFileInfo vsInfo(vsFile);
QFileInfo fsInfo(fsFile);
auto vsAge = vsInfo.lastModified().toMSecsSinceEpoch();
auto fsAge = fsInfo.lastModified().toMSecsSinceEpoch();
if (!_reprojectionProgram || vsAge > vsBuiltAge || fsAge > fsBuiltAge) {
vsBuiltAge = vsAge;
fsBuiltAge = fsAge;
#else
if (!_reprojectionProgram) {
#endif
QString vsSource = readFile(vsFile);
QString fsSource = readFile(fsFile);
ProgramPtr program;
try {
compileProgram(program, vsSource.toLocal8Bit().toStdString(), fsSource.toLocal8Bit().toStdString());
if (program) {
using namespace oglplus;
_reprojectionUniforms.reprojectionMatrix = Uniform<glm::mat3>(*program, "reprojection").Location();
_reprojectionUniforms.inverseProjectionMatrix = Uniform<glm::mat4>(*program, "inverseProjections").Location();
_reprojectionUniforms.projectionMatrix = Uniform<glm::mat4>(*program, "projections").Location();
_reprojectionProgram = program;
}
} catch (std::runtime_error& error) {
qWarning() << "Error building reprojection shader " << error.what();
}
}
}
#endif
void HmdDisplayPlugin::updateLaserProgram() { void HmdDisplayPlugin::updateLaserProgram() {
#if 0 #if 0
static const QString vsFile = PathUtils::resourcesPath() + "/shaders/hmd_hand_lasers.vert"; static const QString vsFile = PathUtils::resourcesPath() + "/shaders/hmd_hand_lasers.vert";
@ -348,136 +308,20 @@ void HmdDisplayPlugin::updatePresentPose() {
_currentPresentFrameInfo.presentPose = _currentPresentFrameInfo.renderPose; _currentPresentFrameInfo.presentPose = _currentPresentFrameInfo.renderPose;
} }
//static const std::string HMD_REPROJECT_FRAG = R"SHADER(
//
//in vec2 varTexCoord0;
//
//out vec4 outFragColor;
//
//uniform sampler2D sampler;
//
//void main() {
// vec2 uv = varTexCoord0;
// outFragColor = texture(sampler, uv); // vec4(varTexCoord0, 0.0, 1.0);
//}
//
//)SHADER";
void HmdDisplayPlugin::SceneRenderer::build() {
static const QString vsFile = "C:/Users/bdavis/Git/hifi/interface/resources/shaders/hmd_reproject.vert";
static const QString fsFile = "C:/Users/bdavis/Git/hifi/interface/resources/shaders/hmd_reproject.frag";
#if 1 //LIVE_SHADER_RELOAD
static qint64 vsBuiltAge = 0;
static qint64 fsBuiltAge = 0;
QFileInfo vsInfo(vsFile);
QFileInfo fsInfo(fsFile);
auto vsAge = vsInfo.lastModified().toMSecsSinceEpoch();
auto fsAge = fsInfo.lastModified().toMSecsSinceEpoch();
if (!pipeline || vsAge > vsBuiltAge || fsAge > fsBuiltAge) {
vsBuiltAge = vsAge;
fsBuiltAge = fsAge;
#else
if (!pipeline) {
#endif
QString vsSource = readFile(vsFile);
QString fsSource = readFile(fsFile);
auto vs = gpu::Shader::createVertex(vsSource.toLocal8Bit().toStdString());
auto ps = gpu::Shader::createPixel(fsSource.toLocal8Bit().toStdString());
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::gl::GLBackend::makeProgram(*program);
uniformsLocation = program->getBuffers().findLocation("reprojectionBuffer");
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
pipeline = gpu::Pipeline::create(program, state);
}
if (!uniformBuffer) {
uniformBuffer = std::make_shared<gpu::Buffer>(sizeof(Uniforms), nullptr);
}
if (!vertices) {
static const uint16_t stacks = 128;
static const uint16_t slices = 64;
static const vec3 increment = vec3(1) / vec3(slices, stacks, 1);
std::vector<vec3> vertexBuffer;
vertexCount = stacks * slices * 3 * 2;
for (size_t x = 0; x < slices; ++x) {
for (size_t y = 0; y < stacks; ++y) {
vertexBuffer.push_back(vec3(x, y + 1, 0) * increment);
vertexBuffer.push_back(vec3(x, y, 0) * increment);
vertexBuffer.push_back(vec3(x + 1, y + 1, 0) * increment);
vertexBuffer.push_back(vec3(x + 1, y + 1, 0) * increment);
vertexBuffer.push_back(vec3(x, y, 0) * increment);
vertexBuffer.push_back(vec3(x + 1, y, 0) * increment);
}
}
vertices = std::make_shared<gpu::Buffer>();
vertices->setData(sizeof(vec3) * vertexBuffer.size(), (gpu::Byte*)vertexBuffer.data());
vertices->flush();
format = std::make_shared<gpu::Stream::Format>();
format->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
}
}
void HmdDisplayPlugin::SceneRenderer::update(const glm::mat4& rotation) {
build();
{
uniforms.rotation = mat4();
float correctionMagnitude = glm::angle(glm::quat_cast(rotation));
if (correctionMagnitude > 0.001f) {
uniforms.rotation = rotation;
}
static size_t i = 0;
if (0 == (++i % 10)) {
qDebug() << "Correction angle size " << correctionMagnitude;
}
}
uniformBuffer->setSubData(0, uniforms);
uniformBuffer->flush();
}
void HmdDisplayPlugin::SceneRenderer::render(gpu::Batch& batch) {
if (pipeline) {
batch.setPipeline(pipeline);
batch.setInputFormat(format);
batch.setInputBuffer(gpu::Stream::POSITION,
gpu::BufferView(vertices, 0, vertices->getSize(), sizeof(vec3), format->getAttributes().at(gpu::Stream::POSITION)._element));
batch.draw(gpu::TRIANGLES, vertexCount);
}
}
void HmdDisplayPlugin::compositeScene() { void HmdDisplayPlugin::compositeScene() {
{ gpu::Batch batch;
auto batchPose = glm::dmat3(glm::mat3(_currentFrame->pose)); batch.enableStereo(false);
auto currentPose = glm::dmat3(glm::mat3(_currentPresentFrameInfo.presentPose)); batch.setFramebuffer(_compositeFramebuffer);
auto correction = glm::inverse(batchPose) * currentPose;
_sceneRenderer.update(glm::mat4(glm::dmat4(correction)));
}
{ batch.setViewportTransform(ivec4(uvec2(), _renderTargetSize));
gpu::Batch batch; batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(1, 1, 0, 1));
batch.enableStereo(false); batch.clearViewTransform();
batch.setViewportTransform(ivec4(uvec2(), _renderTargetSize)); batch.setProjectionTransform(mat4());
batch.setFramebuffer(_compositeFramebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(1, 1, 0, 1)); batch.setPipeline(_presentPipeline);
_backend->render(batch); batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
} batch.draw(gpu::TRIANGLE_STRIP, 4);
_backend->render(batch);
{
gpu::Batch batch;
if (_sceneRenderer.uniformsLocation >= 0) {
batch.setUniformBuffer(_sceneRenderer.uniformsLocation, _sceneRenderer.uniformBuffer);
}
batch.setViewportTransform(ivec4(uvec2(), _renderTargetSize));
batch.setViewTransform(Transform());
batch.setProjectionTransform(mat4());
batch.setFramebuffer(_compositeFramebuffer);
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
_sceneRenderer.render(batch);
_backend->render(batch);
}
} }
void HmdDisplayPlugin::compositeOverlay() { void HmdDisplayPlugin::compositeOverlay() {
@ -627,7 +471,7 @@ void HmdDisplayPlugin::internalPresent() {
if (_enablePreview) { if (_enablePreview) {
gpu::Batch presentBatch; gpu::Batch presentBatch;
presentBatch.enableStereo(false); presentBatch.enableStereo(false);
presentBatch.setViewTransform(Transform()); presentBatch.clearViewTransform();
presentBatch.setFramebuffer(gpu::FramebufferPointer()); presentBatch.setFramebuffer(gpu::FramebufferPointer());
presentBatch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels())); presentBatch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels()));
presentBatch.setResourceTexture(0, _compositeTexture); presentBatch.setResourceTexture(0, _compositeTexture);
@ -663,6 +507,14 @@ void HmdDisplayPlugin::updateFrameData() {
} }
updatePresentPose(); updatePresentPose();
if (_currentFrame) {
auto batchPose = _currentFrame->pose;
auto currentPose = _currentPresentFrameInfo.presentPose;
auto correction = glm::inverse(batchPose) * currentPose;
getGLBackend()->setCameraCorrection(correction);
}
} }
glm::mat4 HmdDisplayPlugin::getHeadPose() const { glm::mat4 HmdDisplayPlugin::getHeadPose() const {

View file

@ -109,24 +109,7 @@ private:
glm::uvec2 _prevWindowSize { 0, 0 }; glm::uvec2 _prevWindowSize { 0, 0 };
qreal _prevDevicePixelRatio { 0 }; qreal _prevDevicePixelRatio { 0 };
struct SceneRenderer { struct OverlayRenderer {
int32_t uniformsLocation{ -1 };
uint32_t vertexCount;
struct Uniforms {
mat4 rotation;
} uniforms;
gpu::Stream::FormatPointer format;
gpu::BufferPointer vertices;
gpu::PipelinePointer pipeline;
gpu::BufferPointer uniformBuffer;
void build();
void update(const glm::mat4& rotation);
void render(gpu::Batch& batch);
} _sceneRenderer;
struct OverlayRender {
gpu::Stream::FormatPointer format; gpu::Stream::FormatPointer format;
gpu::BufferPointer vertices; gpu::BufferPointer vertices;
gpu::BufferPointer indices; gpu::BufferPointer indices;
@ -160,6 +143,7 @@ private:
void updatePipeline(); void updatePipeline();
void render(HmdDisplayPlugin& plugin); void render(HmdDisplayPlugin& plugin);
} _overlay; } _overlay;
#if 0 #if 0
ProgramPtr _previewProgram; ProgramPtr _previewProgram;
struct PreviewUniforms { struct PreviewUniforms {

View file

@ -71,7 +71,7 @@ glm::uvec2 InterleavedStereoDisplayPlugin::getRecommendedRenderSize() const {
void InterleavedStereoDisplayPlugin::internalPresent() { void InterleavedStereoDisplayPlugin::internalPresent() {
gpu::Batch presentBatch; gpu::Batch presentBatch;
presentBatch.enableStereo(false); presentBatch.enableStereo(false);
presentBatch.setViewTransform(Transform()); presentBatch.clearViewTransform();
presentBatch.setFramebuffer(gpu::FramebufferPointer()); presentBatch.setFramebuffer(gpu::FramebufferPointer());
presentBatch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels())); presentBatch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels()));
presentBatch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0)); presentBatch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));

View file

@ -1,109 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
struct ReprojectionData {
mat4 projections[2];
mat4 inverseProjections[2];
mat4 rotation;
};
layout(std140) uniform reprojectionBuffer {
ReprojectionData reprojection;
};
in vec2 varTexCoord0;
out vec4 outFragColor;
uniform sampler2D sampler;
vec4 toNdcSpaceFromUv(vec2 uv) {
vec4 result = vec4(uv, 0.0, 1.0);
result.xy *= 2.0;
result.xy -= 1.0;
return result;
}
vec4 toNdcSpaceFromStereoUv(vec2 uv) {
if (uv.x >= 0.5) {
uv.x -= 0.5;
}
uv.x *= 2.0;
return toNdcSpaceFromUv(uv);
}
vec2 toUvFromNdcSpace(vec4 ndc) {
ndc /= ndc.w;
vec2 result = ndc.xy;
result += 1.0;
result /= 2.0;
return result;
}
void main() {
vec2 uv = varTexCoord0;
mat4 eyeInverseProjection;
mat4 eyeProjection;
vec2 uvmin = vec2(0.0);
vec2 uvmax = vec2(1.0);
// determine the correct projection and inverse projection to use.
if (uv.x < 0.5) {
uvmax.x = 0.5;
eyeInverseProjection = reprojection.inverseProjections[0];
eyeProjection = reprojection.projections[0];
} else {
uvmin.x = 0.5;
uvmax.x = 1.0;
eyeInverseProjection = reprojection.inverseProjections[1];
eyeProjection = reprojection.projections[1];
}
// Account for stereo in calculating the per-eye NDC coordinates
vec4 ndcSpace = toNdcSpaceFromStereoUv(varTexCoord0);
// Convert from NDC to eyespace
vec4 eyeSpace = eyeInverseProjection * ndcSpace;
eyeSpace /= eyeSpace.w;
// Convert to a noramlized ray
vec3 ray = eyeSpace.xyz;
ray = normalize(ray);
// Adjust the ray by the rotation
vec4 ray4 = reprojection.rotation * vec4(ray, 1.0);
ray4 /= ray4.w;
ray = ray4.xyz;
// Project back on to the texture plane
ray *= eyeSpace.z / ray.z;
// Update the eyespace vector
eyeSpace.xyz = ray;
// Reproject back into NDC
ndcSpace = eyeProjection * eyeSpace;
// Calculate the new UV coordinates
if (uv.x >= 0.5) {
uv = toUvFromNdcSpace(ndcSpace);
uv.x += 1.0;
} else {
uv = toUvFromNdcSpace(ndcSpace);
}
uv.x /= 2.0;
if (any(greaterThan(uv, uvmax)) || any(lessThan(uv, uvmin))) {
outFragColor = vec4(0.0, 0.0, 0.0, 1.0);
} else {
outFragColor = texture(sampler, uv);
}
}

View file

@ -1,18 +0,0 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
layout(location = 0) out vec3 outPosition;
layout(location = 1) out vec2 outTexCoord;
void main() {
outTexCoord = TexCoord;
outPosition = Position;
gl_Position = vec4(Position, 1);
}

View file

@ -279,7 +279,8 @@ void GLBackend::render(Batch& batch) {
// Finalize the batch by moving all the instanced rendering into the command buffer // Finalize the batch by moving all the instanced rendering into the command buffer
batch.preExecute(); batch.preExecute();
_stereo._skybox = batch.isSkyboxEnabled(); _transform._skybox = _stereo._skybox = batch.isSkyboxEnabled();
// Allow the batch to override the rendering stereo settings // Allow the batch to override the rendering stereo settings
// for things like full framebuffer copy operations (deferred lighting passes) // for things like full framebuffer copy operations (deferred lighting passes)
bool savedStereo = _stereo._enable; bool savedStereo = _stereo._enable;

View file

@ -44,6 +44,7 @@ public:
~GLBackend(); ~GLBackend();
void setCameraCorrection(const Mat4& correction);
void render(Batch& batch) final; void render(Batch& batch) final;
// This call synchronize the Full Backend cache with the current GLState // This call synchronize the Full Backend cache with the current GLState
@ -245,7 +246,10 @@ protected:
GLuint _drawCallInfoBuffer { 0 }; GLuint _drawCallInfoBuffer { 0 };
GLuint _objectBufferTexture { 0 }; GLuint _objectBufferTexture { 0 };
size_t _cameraUboSize { 0 }; size_t _cameraUboSize { 0 };
bool _viewIsCamera{ false };
bool _skybox { false };
Transform _view; Transform _view;
Mat4 _correction;
Mat4 _projection; Mat4 _projection;
Vec4i _viewport { 0, 0, 1, 1 }; Vec4i _viewport { 0, 0, 1, 1 };
Vec2 _depthRange { 0.0f, 1.0f }; Vec2 _depthRange { 0.0f, 1.0f };

View file

@ -13,12 +13,17 @@
using namespace gpu; using namespace gpu;
using namespace gpu::gl; using namespace gpu::gl;
void GLBackend::setCameraCorrection(const Mat4& correction) {
_transform._correction = correction;
}
// Transform Stage // Transform Stage
void GLBackend::do_setModelTransform(Batch& batch, size_t paramOffset) { void GLBackend::do_setModelTransform(Batch& batch, size_t paramOffset) {
} }
void GLBackend::do_setViewTransform(Batch& batch, size_t paramOffset) { void GLBackend::do_setViewTransform(Batch& batch, size_t paramOffset) {
_transform._view = batch._transforms.get(batch._params[paramOffset]._uint); _transform._view = batch._transforms.get(batch._params[paramOffset]._uint);
_transform._viewIsCamera = batch._params[paramOffset + 1]._uint != 0;
_transform._invalidView = true; _transform._invalidView = true;
} }
@ -82,6 +87,16 @@ void GLBackend::TransformStageState::preUpdate(size_t commandIndex, const Stereo
} }
if (_invalidView) { if (_invalidView) {
// Apply the correction
if (_viewIsCamera && _correction != glm::mat4()) {
PROFILE_RANGE_EX("Correct Camera!", 0xFFFF0000, 1);
Transform result;
_view.mult(result, _view, _correction);
if (_skybox) {
result.setTranslation(vec3());
}
_view = result;
}
// This is when the _view matrix gets assigned // This is when the _view matrix gets assigned
_view.getInverseMatrix(_camera._view); _view.getInverseMatrix(_camera._view);
} }

View file

@ -232,10 +232,11 @@ void Batch::setModelTransform(const Transform& model) {
_invalidModel = true; _invalidModel = true;
} }
void Batch::setViewTransform(const Transform& view) { void Batch::setViewTransform(const Transform& view, bool camera) {
ADD_COMMAND(setViewTransform); ADD_COMMAND(setViewTransform);
uint cameraFlag = camera ? 1 : 0;
_params.emplace_back(_transforms.cache(view)); _params.emplace_back(_transforms.cache(view));
_params.emplace_back(cameraFlag);
} }
void Batch::setProjectionTransform(const Mat4& proj) { void Batch::setProjectionTransform(const Mat4& proj) {

View file

@ -172,7 +172,8 @@ public:
// WARNING: ViewTransform transform from eye space to world space, its inverse is composed // WARNING: ViewTransform transform from eye space to world space, its inverse is composed
// with the ModelTransform to create the equivalent of the gl ModelViewMatrix // with the ModelTransform to create the equivalent of the gl ModelViewMatrix
void setModelTransform(const Transform& model); void setModelTransform(const Transform& model);
void setViewTransform(const Transform& view); void clearViewTransform() { setViewTransform(Transform(), false); }
void setViewTransform(const Transform& view, bool camera = true);
void setProjectionTransform(const Mat4& proj); void setProjectionTransform(const Mat4& proj);
// Viewport is xy = low left corner in framebuffer, zw = width height of the viewport, expressed in pixels // Viewport is xy = low left corner in framebuffer, zw = width height of the viewport, expressed in pixels
void setViewportTransform(const Vec4i& viewport); void setViewportTransform(const Vec4i& viewport);

View file

@ -353,7 +353,7 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
batch.setViewportTransform(args->_viewport); batch.setViewportTransform(args->_viewport);
batch.setProjectionTransform(glm::mat4()); batch.setProjectionTransform(glm::mat4());
batch.setViewTransform(Transform()); batch.clearViewTransform();
Transform model; Transform model;
model.setTranslation(glm::vec3(sMin, tMin, 0.0f)); model.setTranslation(glm::vec3(sMin, tMin, 0.0f));

View file

@ -119,7 +119,7 @@ void Antialiasing::run(const render::SceneContextPointer& sceneContext, const re
args->getViewFrustum().evalProjectionMatrix(projMat); args->getViewFrustum().evalProjectionMatrix(projMat);
args->getViewFrustum().evalViewTransform(viewMat); args->getViewFrustum().evalViewTransform(viewMat);
batch.setProjectionTransform(projMat); batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat); batch.setViewTransform(viewMat, true);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
// FXAA step // FXAA step

View file

@ -394,7 +394,7 @@ void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const Ren
args->getViewFrustum().evalProjectionMatrix(projMat); args->getViewFrustum().evalProjectionMatrix(projMat);
args->getViewFrustum().evalViewTransform(viewMat); args->getViewFrustum().evalViewTransform(viewMat);
batch.setProjectionTransform(projMat); batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat); batch.setViewTransform(viewMat, true);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
// TODO REMOVE: Temporary until UI // TODO REMOVE: Temporary until UI

View file

@ -562,7 +562,7 @@ void RenderDeferredLocals::run(const render::SceneContextPointer& sceneContext,
auto textureFrameTransform = gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(deferredFramebuffer->getFrameSize(), monoViewport); auto textureFrameTransform = gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(deferredFramebuffer->getFrameSize(), monoViewport);
batch.setProjectionTransform(monoProjMat); batch.setProjectionTransform(monoProjMat);
batch.setViewTransform(monoViewTransform); batch.setViewTransform(monoViewTransform, true);
// Splat Point lights // Splat Point lights
if (points && !deferredLightingEffect->_pointLights.empty()) { if (points && !deferredLightingEffect->_pointLights.empty()) {

View file

@ -55,7 +55,7 @@ void RenderShadowMap::run(const render::SceneContextPointer& sceneContext, const
vec4(vec3(1.0, 1.0, 1.0), 0.0), 1.0, 0, true); vec4(vec3(1.0, 1.0, 1.0), 0.0), 1.0, 0, true);
batch.setProjectionTransform(shadow.getProjection()); batch.setProjectionTransform(shadow.getProjection());
batch.setViewTransform(shadow.getView()); batch.setViewTransform(shadow.getView(), false);
auto shadowPipeline = _shapePlumber->pickPipeline(args, ShapeKey()); auto shadowPipeline = _shapePlumber->pickPipeline(args, ShapeKey());
auto shadowSkinnedPipeline = _shapePlumber->pickPipeline(args, ShapeKey::Builder().withSkinned()); auto shadowSkinnedPipeline = _shapePlumber->pickPipeline(args, ShapeKey::Builder().withSkinned());

View file

@ -173,7 +173,7 @@ void LinearDepthPass::run(const render::SceneContextPointer& sceneContext, const
batch.setViewportTransform(depthViewport); batch.setViewportTransform(depthViewport);
batch.setProjectionTransform(glm::mat4()); batch.setProjectionTransform(glm::mat4());
batch.setViewTransform(Transform()); batch.clearViewTransform();
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(_linearDepthFramebuffer->getDepthFrameSize(), depthViewport)); batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(_linearDepthFramebuffer->getDepthFrameSize(), depthViewport));
batch.setUniformBuffer(DepthLinearPass_FrameTransformSlot, frameTransform->getFrameTransformBuffer()); batch.setUniformBuffer(DepthLinearPass_FrameTransformSlot, frameTransform->getFrameTransformBuffer());
@ -459,7 +459,7 @@ void SurfaceGeometryPass::run(const render::SceneContextPointer& sceneContext, c
batch.enableStereo(false); batch.enableStereo(false);
batch.setProjectionTransform(glm::mat4()); batch.setProjectionTransform(glm::mat4());
batch.setViewTransform(Transform()); batch.clearViewTransform();
batch.setViewportTransform(curvatureViewport); batch.setViewportTransform(curvatureViewport);
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(_surfaceGeometryFramebuffer->getSourceFrameSize(), curvatureViewport)); batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(_surfaceGeometryFramebuffer->getSourceFrameSize(), curvatureViewport));

View file

@ -72,7 +72,7 @@ void ToneMappingEffect::render(RenderArgs* args, const gpu::TexturePointer& ligh
batch.setViewportTransform(args->_viewport); batch.setViewportTransform(args->_viewport);
batch.setProjectionTransform(glm::mat4()); batch.setProjectionTransform(glm::mat4());
batch.setViewTransform(Transform()); batch.clearViewTransform();
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, args->_viewport)); batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, args->_viewport));
batch.setPipeline(_blitLightBuffer); batch.setPipeline(_blitLightBuffer);

View file

@ -103,7 +103,7 @@ void DrawSceneOctree::run(const SceneContextPointer& sceneContext,
batch.setViewportTransform(args->_viewport); batch.setViewportTransform(args->_viewport);
batch.setProjectionTransform(projMat); batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat); batch.setViewTransform(viewMat, true);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
// bind the one gpu::Pipeline we need // bind the one gpu::Pipeline we need
@ -153,7 +153,7 @@ void DrawSceneOctree::run(const SceneContextPointer& sceneContext,
Transform crosshairModel; Transform crosshairModel;
crosshairModel.setTranslation(glm::vec3(0.0, 0.0, -1000.0)); crosshairModel.setTranslation(glm::vec3(0.0, 0.0, -1000.0));
crosshairModel.setScale(1000.0 * tan(glm::radians(angle))); // Scaling at the actual tan of the lod angle => Multiplied by TWO crosshairModel.setScale(1000.0 * tan(glm::radians(angle))); // Scaling at the actual tan of the lod angle => Multiplied by TWO
batch.setViewTransform(Transform()); batch.clearViewTransform();
batch.setModelTransform(crosshairModel); batch.setModelTransform(crosshairModel);
batch.setPipeline(getDrawLODReticlePipeline()); batch.setPipeline(getDrawLODReticlePipeline());
batch.draw(gpu::TRIANGLE_STRIP, 4, 0); batch.draw(gpu::TRIANGLE_STRIP, 4, 0);
@ -211,7 +211,7 @@ void DrawItemSelection::run(const SceneContextPointer& sceneContext,
batch.setViewportTransform(args->_viewport); batch.setViewportTransform(args->_viewport);
batch.setProjectionTransform(projMat); batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat); batch.setViewTransform(viewMat, true);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
// bind the one gpu::Pipeline we need // bind the one gpu::Pipeline we need

View file

@ -172,7 +172,7 @@ void DrawStatus::run(const SceneContextPointer& sceneContext,
batch.setViewportTransform(args->_viewport); batch.setViewportTransform(args->_viewport);
batch.setProjectionTransform(projMat); batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat); batch.setViewTransform(viewMat, true);
batch.setModelTransform(Transform()); batch.setModelTransform(Transform());
// bind the one gpu::Pipeline we need // bind the one gpu::Pipeline we need

View file

@ -8,10 +8,6 @@
#include "NsightHelpers.h" #include "NsightHelpers.h"
#ifdef _WIN32
#if defined(NSIGHT_FOUND)
#include "nvToolsExt.h"
#include <QtCore/QCoreApplication>
#include <QtCore/QThread> #include <QtCore/QThread>
QThread* RENDER_THREAD = nullptr; QThread* RENDER_THREAD = nullptr;
@ -20,6 +16,10 @@ bool isRenderThread() {
return QThread::currentThread() == RENDER_THREAD; return QThread::currentThread() == RENDER_THREAD;
} }
#ifdef _WIN32
#if defined(NSIGHT_FOUND)
#include "nvToolsExt.h"
ProfileRange::ProfileRange(const char *name) { ProfileRange::ProfileRange(const char *name) {
if (!isRenderThread()) { if (!isRenderThread()) {
return; return;

View file

@ -9,14 +9,16 @@
#ifndef hifi_gl_NsightHelpers_h #ifndef hifi_gl_NsightHelpers_h
#define hifi_gl_NsightHelpers_h #define hifi_gl_NsightHelpers_h
class QThread;
// FIXME find a better place for this, probably in the GL library
extern QThread* RENDER_THREAD;
extern bool isRenderThread();
#ifdef _WIN32 #ifdef _WIN32
#include <stdint.h> #include <stdint.h>
#include <QtCore/QThread> #include <QtCore/QThread>
extern QThread* RENDER_THREAD;
extern bool isRenderThread();
class ProfileRange { class ProfileRange {
public: public:
ProfileRange(const char *name); ProfileRange(const char *name);

View file

@ -43,7 +43,6 @@ static mat4 _sensorResetMat;
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } }; static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
bool _openVrDisplayActive { false }; bool _openVrDisplayActive { false };
bool OpenVrDisplayPlugin::isSupported() const { bool OpenVrDisplayPlugin::isSupported() const {
return openVrSupported(); return openVrSupported();
} }
@ -253,17 +252,19 @@ void OpenVrDisplayPlugin::postPreview() {
// Flush and wait for swap. // Flush and wait for swap.
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex) PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
PoseData nextRender; PoseData nextRender, nextSim;
nextRender.frameIndex = presentCount(); nextRender.frameIndex = presentCount();
vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nullptr, 0); vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nextSim.vrPoses, vr::k_unMaxTrackedDeviceCount);
glm::mat4 resetMat; glm::mat4 resetMat;
withPresentThreadLock([&] { withPresentThreadLock([&] {
resetMat = _sensorResetMat; resetMat = _sensorResetMat;
}); });
nextRender.update(resetMat); nextRender.update(resetMat);
nextSim.update(resetMat);
withPresentThreadLock([&] { withPresentThreadLock([&] {
_nextSimPoseData = nextRender; _nextSimPoseData = nextSim;
}); });
_nextRenderPoseData = nextRender; _nextRenderPoseData = nextRender;
_hmdActivityLevel = vr::k_EDeviceActivityLevel_UserInteraction; // _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd); _hmdActivityLevel = vr::k_EDeviceActivityLevel_UserInteraction; // _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
@ -275,7 +276,6 @@ bool OpenVrDisplayPlugin::isHmdMounted() const {
void OpenVrDisplayPlugin::updatePresentPose() { void OpenVrDisplayPlugin::updatePresentPose() {
_currentPresentFrameInfo.presentPose = _nextRenderPoseData.poses[vr::k_unTrackedDeviceIndex_Hmd]; _currentPresentFrameInfo.presentPose = _nextRenderPoseData.poses[vr::k_unTrackedDeviceIndex_Hmd];
//_currentPresentFrameInfo.presentPose = _currentPresentFrameInfo.renderPose;
} }
bool OpenVrDisplayPlugin::suppressKeyboard() { bool OpenVrDisplayPlugin::suppressKeyboard() {

View file

@ -241,7 +241,7 @@ public:
{ {
auto geometryCache = DependencyManager::get<GeometryCache>(); auto geometryCache = DependencyManager::get<GeometryCache>();
gpu::Batch presentBatch; gpu::Batch presentBatch;
presentBatch.setViewTransform(Transform()); presentBatch.clearViewTransform();
presentBatch.setFramebuffer(gpu::FramebufferPointer()); presentBatch.setFramebuffer(gpu::FramebufferPointer());
presentBatch.setResourceTexture(0, frame->framebuffer->getRenderBuffer(0)); presentBatch.setResourceTexture(0, frame->framebuffer->getRenderBuffer(0));
presentBatch.setPipeline(_presentPipeline); presentBatch.setPipeline(_presentPipeline);