mirror of
https://github.com/JulianGro/overte.git
synced 2025-04-13 21:26:40 +02:00
Some work to try to find the odd resolution bug
This commit is contained in:
parent
cf739db3a5
commit
9f0201878d
11 changed files with 216 additions and 91 deletions
|
@ -20,6 +20,7 @@
|
|||
#include <gpu/Context.h>
|
||||
#include <shaders/Shaders.h>
|
||||
#include <render/ShapePipeline.h>
|
||||
#include <MathUtils.h>
|
||||
|
||||
#include "RenderUtilsLogging.h"
|
||||
|
||||
|
@ -40,13 +41,18 @@ gpu::PipelinePointer AmbientOcclusionEffect::_buildNormalsPipeline;
|
|||
AmbientOcclusionFramebuffer::AmbientOcclusionFramebuffer() {
|
||||
}
|
||||
|
||||
bool AmbientOcclusionFramebuffer::updateLinearDepth(const gpu::TexturePointer& linearDepthBuffer) {
|
||||
bool AmbientOcclusionFramebuffer::update(const gpu::TexturePointer& linearDepthBuffer, int resolutionLevel, bool isStereo) {
|
||||
// If the depth buffer or size changed, we need to delete our FBOs
|
||||
bool reset = false;
|
||||
if ((_linearDepthTexture != linearDepthBuffer)) {
|
||||
if (_linearDepthTexture != linearDepthBuffer) {
|
||||
_linearDepthTexture = linearDepthBuffer;
|
||||
reset = true;
|
||||
}
|
||||
if (_resolutionLevel != resolutionLevel || isStereo != _isStereo) {
|
||||
_resolutionLevel = resolutionLevel;
|
||||
_isStereo = isStereo;
|
||||
reset = true;
|
||||
}
|
||||
if (_linearDepthTexture) {
|
||||
auto newFrameSize = glm::ivec2(_linearDepthTexture->getDimensions());
|
||||
if (_frameSize != newFrameSize) {
|
||||
|
@ -76,17 +82,38 @@ gpu::TexturePointer AmbientOcclusionFramebuffer::getLinearDepthTexture() {
|
|||
}
|
||||
|
||||
void AmbientOcclusionFramebuffer::allocate() {
|
||||
auto width = _frameSize.x;
|
||||
auto height = _frameSize.y;
|
||||
auto format = gpu::Element::COLOR_R_8;
|
||||
// Full frame
|
||||
{
|
||||
auto width = _frameSize.x;
|
||||
auto height = _frameSize.y;
|
||||
auto format = gpu::Element::COLOR_R_8;
|
||||
|
||||
_occlusionTexture = gpu::Texture::createRenderBuffer(format, width, height, gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR, gpu::Sampler::WRAP_CLAMP));
|
||||
_occlusionFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("occlusion"));
|
||||
_occlusionFramebuffer->setRenderBuffer(0, _occlusionTexture);
|
||||
_occlusionTexture = gpu::Texture::createRenderBuffer(format, width, height, gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR, gpu::Sampler::WRAP_CLAMP));
|
||||
_occlusionFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("occlusion"));
|
||||
_occlusionFramebuffer->setRenderBuffer(0, _occlusionTexture);
|
||||
|
||||
_occlusionBlurredTexture = gpu::Texture::createRenderBuffer(format, width, height, gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR, gpu::Sampler::WRAP_CLAMP));
|
||||
_occlusionBlurredFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("occlusionBlurred"));
|
||||
_occlusionBlurredFramebuffer->setRenderBuffer(0, _occlusionBlurredTexture);
|
||||
_occlusionBlurredTexture = gpu::Texture::createRenderBuffer(format, width, height, gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR, gpu::Sampler::WRAP_CLAMP));
|
||||
_occlusionBlurredFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("occlusionBlurred"));
|
||||
_occlusionBlurredFramebuffer->setRenderBuffer(0, _occlusionBlurredTexture);
|
||||
}
|
||||
|
||||
// Lower res frame
|
||||
{
|
||||
auto sideSize = _frameSize;
|
||||
if (_isStereo) {
|
||||
sideSize.x >>= 1;
|
||||
}
|
||||
sideSize = divideRoundUp(sideSize, 1 << _resolutionLevel);
|
||||
if (_isStereo) {
|
||||
sideSize.x <<= 1;
|
||||
}
|
||||
auto width = sideSize.x;
|
||||
auto height = sideSize.y;
|
||||
|
||||
_normalTexture = gpu::Texture::createRenderBuffer(gpu::Element::COLOR_RGBA_32, width, height, gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT, gpu::Sampler::WRAP_CLAMP));
|
||||
_normalFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("ssaoNormals"));
|
||||
_normalFramebuffer->setRenderBuffer(0, _normalTexture);
|
||||
}
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer AmbientOcclusionFramebuffer::getOcclusionFramebuffer() {
|
||||
|
@ -117,31 +144,17 @@ gpu::TexturePointer AmbientOcclusionFramebuffer::getOcclusionBlurredTexture() {
|
|||
return _occlusionBlurredTexture;
|
||||
}
|
||||
|
||||
void AmbientOcclusionFramebuffer::allocate(int resolutionLevel) {
|
||||
auto width = _frameSize.x >> resolutionLevel;
|
||||
auto height = _frameSize.y >> resolutionLevel;
|
||||
|
||||
_normalTexture = gpu::Texture::createRenderBuffer(gpu::Element::COLOR_R11G11B10, width, height, gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT, gpu::Sampler::WRAP_CLAMP));
|
||||
_normalFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("ssaoNormals"));
|
||||
_normalFramebuffer->setRenderBuffer(0, _normalTexture);
|
||||
_resolutionLevel = resolutionLevel;
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer AmbientOcclusionFramebuffer::getNormalFramebuffer(int resolutionLevel) {
|
||||
if (!_normalFramebuffer || resolutionLevel != _resolutionLevel) {
|
||||
allocate(resolutionLevel);
|
||||
gpu::FramebufferPointer AmbientOcclusionFramebuffer::getNormalFramebuffer() {
|
||||
if (!_normalFramebuffer) {
|
||||
allocate();
|
||||
}
|
||||
return _normalFramebuffer;
|
||||
}
|
||||
|
||||
gpu::TexturePointer AmbientOcclusionFramebuffer::getNormalTexture(int resolutionLevel) {
|
||||
if (!_normalTexture || resolutionLevel != _resolutionLevel) {
|
||||
allocate(resolutionLevel);
|
||||
}
|
||||
return _normalTexture;
|
||||
}
|
||||
|
||||
gpu::TexturePointer AmbientOcclusionFramebuffer::getNormalTexture() {
|
||||
if (!_normalTexture) {
|
||||
allocate();
|
||||
}
|
||||
return _normalTexture;
|
||||
}
|
||||
|
||||
|
@ -377,6 +390,28 @@ void AmbientOcclusionEffect::updateBlurParameters() {
|
|||
vblur.scaleHeight.z = frameSize.y;
|
||||
}
|
||||
|
||||
void AmbientOcclusionEffect::updateFramebufferSizes() {
|
||||
auto& params = _aoParametersBuffer.edit();
|
||||
const int widthScale = _framebuffer->isStereo() & 1;
|
||||
auto sourceFrameSize = _framebuffer->getSourceFrameSize();
|
||||
const int resolutionLevel = _aoParametersBuffer.get().getResolutionLevel();
|
||||
// Depth is at maximum half depth
|
||||
const int depthResolutionLevel = std::min(1, resolutionLevel);
|
||||
|
||||
sourceFrameSize.x >>= widthScale;
|
||||
|
||||
params._sideSizes[0].x = _framebuffer->getNormalTexture()->getWidth() >> widthScale;
|
||||
params._sideSizes[0].y = _framebuffer->getNormalTexture()->getHeight();
|
||||
params._sideSizes[0].z = resolutionLevel;
|
||||
params._sideSizes[0].w = depthResolutionLevel;
|
||||
|
||||
params._sideSizes[1].x = params._sideSizes[0].x;
|
||||
params._sideSizes[1].y = params._sideSizes[0].y;
|
||||
auto occlusionSplitSize = divideRoundUp(sourceFrameSize, 1 << (resolutionLevel + SSAO_USE_QUAD_SPLIT));
|
||||
params._sideSizes[1].z = occlusionSplitSize.x;
|
||||
params._sideSizes[1].w = occlusionSplitSize.y;
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& AmbientOcclusionEffect::getOcclusionPipeline() {
|
||||
if (!_occlusionPipeline) {
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::render_utils::program::ssao_makeOcclusion);
|
||||
|
@ -466,11 +501,20 @@ void AmbientOcclusionEffect::run(const render::RenderContextPointer& renderConte
|
|||
const auto& frameTransform = inputs.get0();
|
||||
const auto& linearDepthFramebuffer = inputs.get2();
|
||||
|
||||
const int resolutionLevel = _aoParametersBuffer->getResolutionLevel();
|
||||
const auto resolutionScale = powf(0.5f, resolutionLevel);
|
||||
|
||||
auto linearDepthTexture = linearDepthFramebuffer->getLinearDepthTexture();
|
||||
auto occlusionDepthTexture = linearDepthTexture;
|
||||
auto sourceViewport = args->_viewport;
|
||||
auto occlusionViewport = sourceViewport;
|
||||
// divideRoundUp is used two compute the quarter or half resolution render sizes.
|
||||
// We need to take the rounded up resolution.
|
||||
auto occlusionViewport = divideRoundUp(sourceViewport, 1 << resolutionLevel);
|
||||
auto firstBlurViewport = sourceViewport;
|
||||
#if SSAO_USE_QUAD_SPLIT
|
||||
auto splitViewport = divideRoundUp(sourceViewport, 1 << (resolutionLevel + SSAO_USE_QUAD_SPLIT));
|
||||
#endif
|
||||
firstBlurViewport.w = divideRoundUp(firstBlurViewport.w, 1 << resolutionLevel);
|
||||
|
||||
if (!_gpuTimer) {
|
||||
_gpuTimer = std::make_shared < gpu::RangeTimer>(__FUNCTION__);
|
||||
|
@ -480,16 +524,13 @@ void AmbientOcclusionEffect::run(const render::RenderContextPointer& renderConte
|
|||
_framebuffer = std::make_shared<AmbientOcclusionFramebuffer>();
|
||||
}
|
||||
|
||||
const int resolutionLevel = _aoParametersBuffer->getResolutionLevel();
|
||||
const auto resolutionScale = powf(0.5f, resolutionLevel);
|
||||
if (resolutionLevel > 0) {
|
||||
occlusionViewport = occlusionViewport >> resolutionLevel;
|
||||
firstBlurViewport.w = firstBlurViewport.w >> resolutionLevel;
|
||||
occlusionDepthTexture = linearDepthFramebuffer->getHalfLinearDepthTexture();
|
||||
}
|
||||
|
||||
if (_framebuffer->updateLinearDepth(linearDepthTexture)) {
|
||||
if (_framebuffer->update(linearDepthTexture, resolutionLevel, args->isStereo())) {
|
||||
updateBlurParameters();
|
||||
updateFramebufferSizes();
|
||||
}
|
||||
|
||||
auto occlusionFBO = _framebuffer->getOcclusionFramebuffer();
|
||||
|
@ -499,7 +540,6 @@ void AmbientOcclusionEffect::run(const render::RenderContextPointer& renderConte
|
|||
outputs.edit1() = _aoParametersBuffer;
|
||||
|
||||
auto framebufferSize = _framebuffer->getSourceFrameSize();
|
||||
|
||||
auto occlusionPipeline = getOcclusionPipeline();
|
||||
auto firstHBlurPipeline = getHBlurPipeline();
|
||||
auto lastVBlurPipeline = getVBlurPipeline();
|
||||
|
@ -509,8 +549,8 @@ void AmbientOcclusionEffect::run(const render::RenderContextPointer& renderConte
|
|||
#if SSAO_USE_QUAD_SPLIT
|
||||
auto gatherPipeline = getGatherPipeline();
|
||||
auto buildNormalsPipeline = getBuildNormalsPipeline();
|
||||
auto occlusionNormalFramebuffer = _framebuffer->getNormalFramebuffer(resolutionLevel);
|
||||
auto occlusionNormalTexture = _framebuffer->getNormalTexture(resolutionLevel);
|
||||
auto occlusionNormalFramebuffer = _framebuffer->getNormalFramebuffer();
|
||||
auto occlusionNormalTexture = _framebuffer->getNormalTexture();
|
||||
#endif
|
||||
|
||||
// Update sample rotation
|
||||
|
@ -567,7 +607,6 @@ void AmbientOcclusionEffect::run(const render::RenderContextPointer& renderConte
|
|||
#else
|
||||
batch.setFramebuffer(occlusionFBO);
|
||||
#endif
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(1.0f));
|
||||
batch.setPipeline(occlusionPipeline);
|
||||
batch.setResourceTexture(render_utils::slot::texture::SsaoDepth, occlusionDepthTexture);
|
||||
|
||||
|
|
|
@ -31,20 +31,19 @@ public:
|
|||
gpu::FramebufferPointer getOcclusionBlurredFramebuffer();
|
||||
gpu::TexturePointer getOcclusionBlurredTexture();
|
||||
|
||||
gpu::FramebufferPointer getNormalFramebuffer(int resolutionLevel);
|
||||
gpu::TexturePointer getNormalTexture(int resolutionLevel);
|
||||
gpu::FramebufferPointer getNormalFramebuffer();
|
||||
gpu::TexturePointer getNormalTexture();
|
||||
|
||||
// Update the source framebuffer size which will drive the allocation of all the other resources.
|
||||
bool updateLinearDepth(const gpu::TexturePointer& linearDepthBuffer);
|
||||
bool update(const gpu::TexturePointer& linearDepthBuffer, int resolutionLevel, bool isStereo);
|
||||
gpu::TexturePointer getLinearDepthTexture();
|
||||
const glm::ivec2& getSourceFrameSize() const { return _frameSize; }
|
||||
|
||||
bool isStereo() const { return _isStereo; }
|
||||
|
||||
protected:
|
||||
|
||||
void clear();
|
||||
void allocate();
|
||||
void allocate(int resolutionLevel);
|
||||
|
||||
gpu::TexturePointer _linearDepthTexture;
|
||||
|
||||
|
@ -60,6 +59,7 @@ protected:
|
|||
|
||||
glm::ivec2 _frameSize;
|
||||
int _resolutionLevel{ 0 };
|
||||
bool _isStereo{ false };
|
||||
};
|
||||
|
||||
using AmbientOcclusionFramebufferPointer = std::shared_ptr<AmbientOcclusionFramebuffer>;
|
||||
|
@ -167,6 +167,7 @@ private:
|
|||
|
||||
void updateGaussianDistribution();
|
||||
void updateBlurParameters();
|
||||
void updateFramebufferSizes();
|
||||
|
||||
AOParametersBuffer _aoParametersBuffer;
|
||||
FrameParametersBuffer _aoFrameParametersBuffer[SSAO_SPLIT_COUNT];
|
||||
|
|
|
@ -124,17 +124,18 @@ vec2 getStereoSideSize(int resolutionLevel) {
|
|||
return vec2(getStereoSideWidth(resolutionLevel), getStereoSideHeight(resolutionLevel));
|
||||
}
|
||||
|
||||
ivec4 getStereoSideInfo(int xPos, int resolutionLevel) {
|
||||
int sideWidth = int(getStereoSideWidth(resolutionLevel));
|
||||
ivec4 getStereoSideInfoFromWidth(int xPos, int sideWidth) {
|
||||
return ivec4(xPos < sideWidth ? ivec2(0, 0) : ivec2(1, sideWidth), sideWidth, isStereo());
|
||||
}
|
||||
|
||||
int getStereoSide(ivec4 sideInfo) {
|
||||
return sideInfo.x;
|
||||
ivec4 getStereoSideInfo(int xPos, int resolutionLevel) {
|
||||
int sideWidth = int(getStereoSideWidth(resolutionLevel));
|
||||
return getStereoSideInfoFromWidth(xPos, sideWidth);
|
||||
}
|
||||
|
||||
bool isStereoFromInfo(ivec4 sideInfo) {
|
||||
return sideInfo.w != 0;
|
||||
|
||||
int getStereoSide(ivec4 sideInfo) {
|
||||
return sideInfo.x;
|
||||
}
|
||||
|
||||
float evalZeyeFromZdb(float depth) {
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
#include "SurfaceGeometryPass.h"
|
||||
|
||||
#include <limits>
|
||||
#include <MathUtils.h>
|
||||
|
||||
#include <gpu/Context.h>
|
||||
#include <shaders/Shaders.h>
|
||||
|
@ -28,7 +29,7 @@ namespace ru {
|
|||
LinearDepthFramebuffer::LinearDepthFramebuffer() {
|
||||
}
|
||||
|
||||
void LinearDepthFramebuffer::update(const gpu::TexturePointer& depthBuffer, const gpu::TexturePointer& normalTexture) {
|
||||
void LinearDepthFramebuffer::update(const gpu::TexturePointer& depthBuffer, const gpu::TexturePointer& normalTexture, bool isStereo) {
|
||||
//If the depth buffer or size changed, we need to delete our FBOs
|
||||
bool reset = false;
|
||||
if (_primaryDepthTexture != depthBuffer || _normalTexture != normalTexture) {
|
||||
|
@ -38,10 +39,17 @@ void LinearDepthFramebuffer::update(const gpu::TexturePointer& depthBuffer, cons
|
|||
}
|
||||
if (_primaryDepthTexture) {
|
||||
auto newFrameSize = glm::ivec2(_primaryDepthTexture->getDimensions());
|
||||
if (_frameSize != newFrameSize) {
|
||||
if (_frameSize != newFrameSize || _isStereo != isStereo) {
|
||||
_frameSize = newFrameSize;
|
||||
_halfFrameSize = newFrameSize >> 1;
|
||||
|
||||
_halfFrameSize = _frameSize;
|
||||
if (isStereo) {
|
||||
_halfFrameSize.x >>= 1;
|
||||
}
|
||||
_halfFrameSize = divideRoundUp(_halfFrameSize, 2);
|
||||
if (isStereo) {
|
||||
_halfFrameSize.x <<= 1;
|
||||
}
|
||||
_isStereo = isStereo;
|
||||
reset = true;
|
||||
}
|
||||
}
|
||||
|
@ -66,7 +74,7 @@ void LinearDepthFramebuffer::allocate() {
|
|||
|
||||
// For Linear Depth:
|
||||
const uint16_t LINEAR_DEPTH_MAX_MIP_LEVEL = 5;
|
||||
// Point sampling of the depth is need for the AmbientOcclusionEffect in HBAO, as well as the clamp to edge
|
||||
// Point sampling of the depth is needed for the AmbientOcclusionEffect in HBAO, as well as the clamp to edge
|
||||
const auto depthSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_POINT, gpu::Sampler::WRAP_CLAMP);
|
||||
_linearDepthTexture = gpu::Texture::createRenderBuffer(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::RED), width, height, LINEAR_DEPTH_MAX_MIP_LEVEL,
|
||||
depthSampler);
|
||||
|
@ -153,7 +161,7 @@ void LinearDepthPass::run(const render::RenderContextPointer& renderContext, con
|
|||
auto depthBuffer = deferredFramebuffer->getPrimaryDepthTexture();
|
||||
auto normalTexture = deferredFramebuffer->getDeferredNormalTexture();
|
||||
|
||||
_linearDepthFramebuffer->update(depthBuffer, normalTexture);
|
||||
_linearDepthFramebuffer->update(depthBuffer, normalTexture, args->isStereo());
|
||||
|
||||
auto linearDepthFBO = _linearDepthFramebuffer->getLinearDepthFramebuffer();
|
||||
auto linearDepthTexture = _linearDepthFramebuffer->getLinearDepthTexture();
|
||||
|
@ -172,7 +180,7 @@ void LinearDepthPass::run(const render::RenderContextPointer& renderContext, con
|
|||
auto downsamplePipeline = getDownsamplePipeline(renderContext);
|
||||
|
||||
auto depthViewport = args->_viewport;
|
||||
auto halfViewport = depthViewport >> 1;
|
||||
auto halfViewport = divideRoundUp(depthViewport, 2);
|
||||
float clearLinearDepth = args->getViewFrustum().getFarClip() * 2.0f;
|
||||
|
||||
gpu::doInBatch("LinearDepthPass::run", args->_context, [=](gpu::Batch& batch) {
|
||||
|
@ -195,7 +203,9 @@ void LinearDepthPass::run(const render::RenderContextPointer& renderContext, con
|
|||
|
||||
// Downsample
|
||||
batch.setViewportTransform(halfViewport);
|
||||
|
||||
Transform model;
|
||||
model.setScale( glm::vec3((depthViewport.z >> 1) / float(halfViewport.z), (depthViewport.w >> 1) / float(halfViewport.w), 1.0f) );
|
||||
batch.setModelTransform(model);
|
||||
batch.setFramebuffer(downsampleFBO);
|
||||
batch.setResourceTexture(ru::Texture::SurfaceGeometryDepth, linearDepthTexture);
|
||||
batch.setResourceTexture(ru::Texture::SurfaceGeometryNormal, normalTexture);
|
||||
|
|
|
@ -35,7 +35,7 @@ public:
|
|||
gpu::TexturePointer getHalfNormalTexture();
|
||||
|
||||
// Update the depth buffer which will drive the allocation of all the other resources according to its size.
|
||||
void update(const gpu::TexturePointer& depthBuffer, const gpu::TexturePointer& normalTexture);
|
||||
void update(const gpu::TexturePointer& depthBuffer, const gpu::TexturePointer& normalTexture, bool isStereo);
|
||||
const glm::ivec2& getDepthFrameSize() const { return _frameSize; }
|
||||
|
||||
void setResolutionLevel(int level) { _resolutionLevel = std::max(0, level); }
|
||||
|
@ -59,6 +59,7 @@ protected:
|
|||
glm::ivec2 _frameSize;
|
||||
glm::ivec2 _halfFrameSize;
|
||||
int _resolutionLevel{ 0 };
|
||||
bool _isStereo{ false };
|
||||
};
|
||||
|
||||
using LinearDepthFramebufferPointer = std::shared_ptr<LinearDepthFramebuffer>;
|
||||
|
|
|
@ -47,11 +47,32 @@ layout(binding=RENDER_UTILS_BUFFER_SSAO_FRAME_PARAMS) uniform ambientOcclusionFr
|
|||
float getPerspectiveScale() {
|
||||
return (params._resolutionInfo.z);
|
||||
}
|
||||
int getResolutionLevel() {
|
||||
|
||||
int getResolutionLevel() {
|
||||
return int(params._resolutionInfo.x);
|
||||
}
|
||||
|
||||
vec2 getNormalsSideSize() {
|
||||
return params._sideSizes[0].xy;
|
||||
}
|
||||
int getNormalsResolutionLevel() {
|
||||
return int(params._sideSizes[0].z);
|
||||
}
|
||||
int getDepthResolutionLevel() {
|
||||
return int(params._sideSizes[0].w);
|
||||
}
|
||||
vec2 getOcclusionSideSize() {
|
||||
return params._sideSizes[1].xy;
|
||||
}
|
||||
vec2 getOcclusionSplitSideSize() {
|
||||
return params._sideSizes[1].zw;
|
||||
}
|
||||
|
||||
ivec2 getWidthHeightRoundUp(int resolutionLevel) {
|
||||
ivec2 fullRes = ivec2(getWidthHeight(0));
|
||||
int resolutionDivisor = 1 << resolutionLevel;
|
||||
return (fullRes + resolutionDivisor - 1) / resolutionDivisor;
|
||||
}
|
||||
|
||||
float getRadius() {
|
||||
return params._radiusInfo.x;
|
||||
}
|
||||
|
@ -205,6 +226,26 @@ layout(binding=RENDER_UTILS_TEXTURE_SSAO_NORMAL) uniform sampler2D normalTex;
|
|||
ivec2 getDepthTextureSize(int level) {
|
||||
return textureSize(depthPyramidTex, level);
|
||||
}
|
||||
ivec2 getDepthTextureSideSize(int level) {
|
||||
ivec2 size = getDepthTextureSize(level);
|
||||
size.x >>= int(isStereo()) & 1;
|
||||
return size;
|
||||
}
|
||||
|
||||
ivec2 getNormalTextureSize(int level) {
|
||||
return textureSize(normalTex, level);
|
||||
}
|
||||
ivec2 getNormalTextureSideSize(int level) {
|
||||
ivec2 size = getNormalTextureSize(level);
|
||||
size.x >>= int(isStereo()) & 1;
|
||||
return size;
|
||||
}
|
||||
|
||||
vec2 getStereoSideSizeRoundUp(int resolutionLevel) {
|
||||
ivec2 fullRes = ivec2(getStereoSideSize(0));
|
||||
int resolutionDivisor = 1 << resolutionLevel;
|
||||
return vec2((fullRes + resolutionDivisor - 1) / resolutionDivisor);
|
||||
}
|
||||
|
||||
float getZEyeAtPixel(ivec2 pixel, int level) {
|
||||
return -texelFetch(depthPyramidTex, pixel, level).x;
|
||||
|
@ -254,8 +295,8 @@ vec3 getMinDelta(vec3 centralPoint, vec3 offsetPointPos, vec3 offsetPointNeg) {
|
|||
return dot(delta0, delta0) < dot(delta1, delta1) ? delta0 : delta1;
|
||||
}
|
||||
|
||||
vec3 buildNormal(ivec4 side, vec2 fragUVPos, ivec2 depthTexFragPixelPos, vec3 fragPosition, ivec2 depthTextureScale, vec2 sideImageSize) {
|
||||
vec2 uvScale = vec2(1.0) / (sideImageSize * depthTextureScale);
|
||||
vec3 buildNormal(ivec4 side, vec2 fragUVPos, ivec2 depthTexFragPixelPos, vec3 fragPosition, vec2 depthTextureSize) {
|
||||
vec2 uvScale = vec2(1.0) / depthTextureSize;
|
||||
vec3 fragPositionDxPos = buildPosition(side, fragUVPos, depthTexFragPixelPos, ivec2(1, 0), uvScale);
|
||||
vec3 fragPositionDyPos = buildPosition(side, fragUVPos, depthTexFragPixelPos, ivec2(0, 1), uvScale);
|
||||
vec3 fragPositionDxNeg = buildPosition(side, fragUVPos, depthTexFragPixelPos, ivec2(-1, 0), uvScale);
|
||||
|
@ -368,7 +409,7 @@ float computeHorizon(ivec4 side, vec2 shadedPixelPos, vec2 sideImageSize, vec2 d
|
|||
float evalVisibilityHBAO(ivec4 side, vec2 shadedPixelPos, vec2 sideImageSize, vec2 deltaTap, float diskPixelRadius,
|
||||
vec3 fragPositionES, vec3 fragNormalES) {
|
||||
vec2 searchVec = deltaTap * diskPixelRadius;
|
||||
float obscurance;
|
||||
float obscurance = 0.0;
|
||||
|
||||
// Forward search for h1
|
||||
obscurance = computeHorizon(side, shadedPixelPos, sideImageSize, deltaTap, fragPositionES, fragNormalES, searchVec, diskPixelRadius);
|
||||
|
@ -376,7 +417,7 @@ float evalVisibilityHBAO(ivec4 side, vec2 shadedPixelPos, vec2 sideImageSize, ve
|
|||
// Backward search for h2
|
||||
obscurance += computeHorizon(side, shadedPixelPos, sideImageSize, deltaTap, fragPositionES, fragNormalES, -searchVec, diskPixelRadius);
|
||||
|
||||
return obscurance * 0.5 / PI;
|
||||
return obscurance;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
|
|
@ -18,28 +18,30 @@
|
|||
layout(location=0) out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
vec2 sideImageSize = getStereoSideSize(getResolutionLevel());
|
||||
ivec2 renderSize = ivec2(sideImageSize) << ivec2(int(isStereo()) & 1, 0);
|
||||
|
||||
// Pixel being shaded
|
||||
vec2 fragCoord = gl_FragCoord.xy;
|
||||
ivec2 fragPixelPos = ivec2(fragCoord.xy);
|
||||
|
||||
// Fetch the z under the pixel (stereo or not)
|
||||
ivec2 depthTextureScale = getDepthTextureSize(0) / renderSize;
|
||||
ivec2 depthTexFragPixelPos = fragPixelPos * depthTextureScale;
|
||||
// Fetch the z under the pixel (stereo or not) from full res depth
|
||||
int depthTextureRatio = 1 << getNormalsResolutionLevel();
|
||||
ivec2 depthTexFragPixelPos = fragPixelPos * depthTextureRatio;
|
||||
float Zeye = getZEyeAtPixel(depthTexFragPixelPos, 0);
|
||||
|
||||
// Stereo side info
|
||||
ivec4 side = getStereoSideInfo(fragPixelPos.x, getResolutionLevel());
|
||||
// Stereo side info based on the real viewport size of this pass
|
||||
ivec2 sideNormalsSize = ivec2( getNormalsSideSize() );
|
||||
ivec4 side = getStereoSideInfoFromWidth(fragPixelPos.x, sideNormalsSize.x);
|
||||
|
||||
// From now on, fragPixelPos is the pixel pos in the side
|
||||
vec2 depthSideSize = getDepthTextureSideSize(0);
|
||||
vec2 sideImageSize = depthSideSize / float(depthTextureRatio);
|
||||
fragPixelPos.x -= side.y;
|
||||
vec2 fragUVPos = (vec2(fragPixelPos) + vec2(0.5)) / sideImageSize;
|
||||
|
||||
// The position and normal of the pixel fragment in Eye space
|
||||
vec3 fragPositionES = evalEyePositionFromZeye(side.x, Zeye, fragUVPos);
|
||||
vec3 fragNormalES = buildNormal(side, fragUVPos, depthTexFragPixelPos, fragPositionES, depthTextureScale, sideImageSize);
|
||||
vec3 fragNormalES = buildNormal(side, fragUVPos, depthTexFragPixelPos, fragPositionES, depthSideSize);
|
||||
vec3 absFragNormalES = abs(fragNormalES);
|
||||
|
||||
fragNormalES /= max(absFragNormalES.z, max(absFragNormalES.x, absFragNormalES.y));
|
||||
outFragColor = vec4(vec3(fragNormalES)*0.5 + vec3(0.5), 1.0);
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ void main(void) {
|
|||
// result (at the resolution level, of course)
|
||||
ivec2 destPixelCoord = ivec2(gl_FragCoord.xy);
|
||||
ivec2 sourcePixelCoord = destPixelCoord / 2;
|
||||
ivec2 splitImageSize = ivec2(getWidthHeight(getResolutionLevel()+1));
|
||||
ivec2 splitImageSize = getWidthHeightRoundUp(getResolutionLevel()+1);
|
||||
|
||||
sourcePixelCoord += (destPixelCoord & ivec2(1)) * splitImageSize;
|
||||
|
||||
|
|
|
@ -24,36 +24,36 @@
|
|||
layout(location=0) out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
vec2 sideImageSize = getStereoSideSize(getResolutionLevel());
|
||||
ivec2 renderSize = ivec2(sideImageSize) << ivec2(int(isStereo()) & 1, 0);
|
||||
|
||||
// Pixel being shaded
|
||||
vec2 fragCoord = gl_FragCoord.xy;
|
||||
ivec2 fragPixelPos = ivec2(fragCoord.xy);
|
||||
#if SSAO_USE_QUAD_SPLIT
|
||||
ivec2 splitImageSize = ivec2(getWidthHeight(getResolutionLevel()+1));
|
||||
ivec2 splitImageSize = ivec2(getOcclusionSplitSideSize());
|
||||
fragPixelPos = ((fragPixelPos - getPixelOffset()*splitImageSize) * 2) + getPixelOffset();
|
||||
#endif
|
||||
|
||||
// Fetch the z under the pixel (stereo or not)
|
||||
ivec2 depthTextureScale = getDepthTextureSize(0) / renderSize;
|
||||
ivec2 depthTexFragPixelPos = fragPixelPos * depthTextureScale;
|
||||
int depthTextureRatio = 1 << (getResolutionLevel() - getDepthResolutionLevel());
|
||||
ivec2 depthTexFragPixelPos = fragPixelPos * depthTextureRatio;
|
||||
float Zeye = getZEyeAtPixel(depthTexFragPixelPos, 0);
|
||||
#if SSAO_USE_QUAD_SPLIT
|
||||
vec3 fragNormalES = getNormalEyeAtPixel(fragPixelPos, 0);
|
||||
#endif
|
||||
|
||||
// Stereo side info
|
||||
ivec4 side = getStereoSideInfo(fragPixelPos.x, getResolutionLevel());
|
||||
// Stereo side info based on the real viewport size of this pass
|
||||
ivec2 sideOcclusionSize = ivec2( getOcclusionSideSize() );
|
||||
ivec4 side = getStereoSideInfoFromWidth(fragPixelPos.x, sideOcclusionSize.x);
|
||||
|
||||
// From now on, fragPixelPos is the pixel pos in the side
|
||||
vec2 depthSideSize = getDepthTextureSideSize(0);
|
||||
vec2 sideImageSize = depthSideSize / float(depthTextureRatio);
|
||||
fragPixelPos.x -= side.y;
|
||||
vec2 fragUVPos = (vec2(fragPixelPos) + vec2(0.5)) / sideImageSize;
|
||||
|
||||
// The position and normal of the pixel fragment in Eye space
|
||||
vec3 fragPositionES = evalEyePositionFromZeye(side.x, Zeye, fragUVPos);
|
||||
#if !SSAO_USE_QUAD_SPLIT
|
||||
vec3 fragNormalES = buildNormal(side, fragUVPos, depthTexFragPixelPos, fragPositionES, depthTextureScale, sideImageSize);
|
||||
vec3 fragNormalES = buildNormal(side, fragUVPos, depthTexFragPixelPos, fragPositionES, depthSideSize);
|
||||
#endif
|
||||
|
||||
// Choose the screen-space sample radius
|
||||
|
@ -68,20 +68,28 @@ void main(void) {
|
|||
// Accumulate the obscurance for each samples
|
||||
float obscuranceSum = 0.0;
|
||||
int numSamples = int(getNumSamples());
|
||||
float invNumSamples = getInvNumSamples();
|
||||
|
||||
// Steps are in the depth texture resolution
|
||||
depthTexFragPixelPos = fragPixelPos * depthTextureRatio;
|
||||
for (int i = 0; i < numSamples; ++i) {
|
||||
#if SSAO_USE_HORIZON_BASED
|
||||
vec3 deltaTap = getUnitTapLocation(i, 1.0, randomPatternRotationAngle, PI);
|
||||
obscuranceSum += evalVisibilityHBAO(side, vec2(fragPixelPos), sideImageSize, deltaTap.xy, diskPixelRadius, fragPositionES, fragNormalES);
|
||||
obscuranceSum += evalVisibilityHBAO(side, vec2(depthTexFragPixelPos), depthSideSize, deltaTap.xy, diskPixelRadius, fragPositionES, fragNormalES);
|
||||
#else
|
||||
vec3 tap = getTapLocationClampedSSAO(i, randomPatternRotationAngle, diskPixelRadius, fragPixelPos, sideImageSize);
|
||||
vec2 tapPixelPos = vec2(fragPixelPos) + tap.xy;
|
||||
vec3 tapUVZ = fetchTap(side, tapPixelPos, tap.z, sideImageSize);
|
||||
vec3 tap = getTapLocationClampedSSAO(i, randomPatternRotationAngle, diskPixelRadius, depthTexFragPixelPos, depthSideSize);
|
||||
vec2 tapPixelPos = vec2(depthTexFragPixelPos) + tap.xy;
|
||||
vec3 tapUVZ = fetchTap(side, tapPixelPos, tap.z, depthSideSize);
|
||||
vec3 tapPositionES = evalEyePositionFromZeye(side.x, tapUVZ.z, tapUVZ.xy);
|
||||
obscuranceSum += float(tap.z > 0.0) * evalVisibilitySSAO(fragPositionES, fragNormalES, tapPositionES);
|
||||
#endif
|
||||
}
|
||||
|
||||
float occlusion = clamp(1.0 - obscuranceSum * getObscuranceScaling() * getInvNumSamples(), 0.0, 1.0);
|
||||
#if SSAO_USE_HORIZON_BASED
|
||||
obscuranceSum *= 0.5 / PI;
|
||||
#endif
|
||||
|
||||
float occlusion = clamp(1.0 - obscuranceSum * getObscuranceScaling() * invNumSamples, 0.0, 1.0);
|
||||
|
||||
outFragColor = vec4(vec3(occlusion), 1.0);
|
||||
}
|
||||
|
|
|
@ -40,6 +40,7 @@ struct AmbientOcclusionParams {
|
|||
SSAO_VEC4 _sampleInfo;
|
||||
SSAO_VEC4 _blurInfo;
|
||||
float _blurFilterTaps[SSAO_BLUR_GAUSSIAN_COEFS_COUNT];
|
||||
SSAO_VEC4 _sideSizes[2];
|
||||
};
|
||||
|
||||
struct AmbientOcclusionFrameParams {
|
||||
|
|
21
libraries/shared/src/MathUtils.h
Normal file
21
libraries/shared/src/MathUtils.h
Normal file
|
@ -0,0 +1,21 @@
|
|||
//
|
||||
// MathUtils.h
|
||||
// libraries/shared/src
|
||||
//
|
||||
// Created by Olivier Prat on 9/21/18.
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_MathUtils_h
|
||||
#define hifi_MathUtils_h
|
||||
|
||||
template <class T>
|
||||
T divideRoundUp(const T& numerator, int divisor) {
|
||||
return (numerator + divisor - T(1)) / divisor;
|
||||
}
|
||||
|
||||
#endif // hifi_MathUtils_h
|
||||
|
Loading…
Reference in a new issue