mirror of
https://github.com/overte-org/overte.git
synced 2025-04-14 07:47:30 +02:00
Adding the depth aware blur
This commit is contained in:
parent
28886f6ec3
commit
c325cc50dd
10 changed files with 120 additions and 14 deletions
|
@ -111,9 +111,9 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
|||
addJob<DrawBackgroundDeferred>("DrawBackgroundDeferred", background);
|
||||
|
||||
// Opaque all rendered, generate surface geometry buffers
|
||||
const auto curvatureFramebuffer = addJob<SurfaceGeometryPass>("SurfaceGeometry", deferredFrameTransform);
|
||||
const auto curvatureFramebufferAndDepth = addJob<SurfaceGeometryPass>("SurfaceGeometry", deferredFrameTransform);
|
||||
|
||||
addJob<render::BlurGaussian>("DiffuseCurvature", curvatureFramebuffer);
|
||||
addJob<render::BlurGaussianDepthAware>("DiffuseCurvature", curvatureFramebufferAndDepth);
|
||||
|
||||
|
||||
// AO job
|
||||
|
|
|
@ -45,7 +45,7 @@ void SurfaceGeometryPass::configure(const Config& config) {
|
|||
}
|
||||
}
|
||||
|
||||
void SurfaceGeometryPass::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, gpu::FramebufferPointer& curvatureFramebuffer) {
|
||||
void SurfaceGeometryPass::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, InputPair& curvatureAndDepth) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
|
||||
|
@ -59,7 +59,8 @@ void SurfaceGeometryPass::run(const render::SceneContextPointer& sceneContext, c
|
|||
|
||||
auto pyramidTexture = framebufferCache->getDepthPyramidTexture();
|
||||
auto curvatureFBO = framebufferCache->getCurvatureFramebuffer();
|
||||
curvatureFramebuffer = curvatureFBO;
|
||||
curvatureAndDepth.editFirst() = curvatureFBO;
|
||||
curvatureAndDepth.editSecond() = pyramidTexture;
|
||||
|
||||
auto curvatureTexture = framebufferCache->getCurvatureTexture();
|
||||
|
||||
|
|
|
@ -40,13 +40,14 @@ signals:
|
|||
|
||||
class SurfaceGeometryPass {
|
||||
public:
|
||||
using InputPair = render::VaryingPair<gpu::FramebufferPointer, gpu::TexturePointer>;
|
||||
using Config = SurfaceGeometryPassConfig;
|
||||
using JobModel = render::Job::ModelIO<SurfaceGeometryPass, DeferredFrameTransformPointer, gpu::FramebufferPointer, Config>;
|
||||
using JobModel = render::Job::ModelIO<SurfaceGeometryPass, DeferredFrameTransformPointer, InputPair, Config>;
|
||||
|
||||
SurfaceGeometryPass();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, gpu::FramebufferPointer& curvatureFramebuffer);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, InputPair& curvatureAndDepth);
|
||||
|
||||
float getCurvatureDepthThreshold() const { return _parametersBuffer.get<Parameters>().curvatureInfo.x; }
|
||||
float getCurvatureBasisScale() const { return _parametersBuffer.get<Parameters>().curvatureInfo.y; }
|
||||
|
|
|
@ -80,6 +80,8 @@ float scatterCurve = 0.25f;
|
|||
in vec2 varTexCoord0;
|
||||
out vec4 _fragColor;
|
||||
|
||||
uniform vec3 uniformLightVector = vec3(1.0);
|
||||
|
||||
void main(void) {
|
||||
// DeferredTransform deferredTransform = getDeferredTransform();
|
||||
// DeferredFragment frag = unpackDeferredFragment(deferredTransform, varTexCoord0);
|
||||
|
@ -91,11 +93,18 @@ void main(void) {
|
|||
vec3 bentNormalN = normal;
|
||||
vec3 bentNormalR = normalize( (diffusedCurvature.xyz - 0.5f) * 2.0f );
|
||||
float curvature = abs(diffusedCurvature.w * 2 - 1) * 0.5f * scatterCurve + scatterBase;
|
||||
// _fragColor = vec4(vec3(diffusedCurvature.xyz), 1.0);
|
||||
|
||||
// --> Calculate the light vector.
|
||||
vec3 lightVector = normalize(vec3(1.0f, 1.0f, 1.0f)); //normalize(lightPos - sourcePos.xyz);
|
||||
vec3 lightVector = normalize(uniformLightVector); //normalize(lightPos - sourcePos.xyz);
|
||||
|
||||
_fragColor = vec4(fetchBRDF(dot(bentNormalR, lightVector), abs(diffusedCurvature.w * 2 - 1)), 1.0);
|
||||
// _fragColor = vec4(fetchBRDF(dot(bentNormalR, lightVector), abs(diffusedCurvature.w * 2 - 1)), 1.0);
|
||||
|
||||
_fragColor = vec4(bentNormalR * lightVector, 1.0);
|
||||
|
||||
//_fragColor = vec4(vec3(bentNormalR * 0.5 + 0.5), 1.0);
|
||||
|
||||
|
||||
/*
|
||||
// --> Optimise for skin diffusion profile.
|
||||
float diffuseBlendedR = dot(normalize(mix( bentNormalN.xyz, bentNormalN, normalBendR * normalBendFactor)), lightVector);
|
||||
|
|
|
@ -26,6 +26,7 @@ enum BlurShaderBufferSlots {
|
|||
};
|
||||
enum BlurShaderMapSlots {
|
||||
BlurTask_SourceSlot = 0,
|
||||
BlurTask_DepthSlot,
|
||||
};
|
||||
|
||||
const float BLUR_NUM_SAMPLES = 7.0f;
|
||||
|
@ -57,6 +58,20 @@ void BlurParams::setFilterRadiusScale(float scale) {
|
|||
}
|
||||
}
|
||||
|
||||
void BlurParams::setDepthPerspective(float oneOverTan2FOV) {
|
||||
auto depthInfo = _parametersBuffer.get<Params>().depthInfo;
|
||||
if (oneOverTan2FOV != depthInfo.w) {
|
||||
_parametersBuffer.edit<Params>().depthInfo.w = oneOverTan2FOV;
|
||||
}
|
||||
}
|
||||
|
||||
void BlurParams::setDepthThreshold(float threshold) {
|
||||
auto depthInfo = _parametersBuffer.get<Params>().depthInfo;
|
||||
if (threshold != depthInfo.x) {
|
||||
_parametersBuffer.edit<Params>().depthInfo.x = threshold;
|
||||
}
|
||||
}
|
||||
|
||||
BlurGaussian::BlurGaussian() {
|
||||
_parameters = std::make_shared<BlurParams>();
|
||||
}
|
||||
|
@ -200,6 +215,7 @@ gpu::PipelinePointer BlurGaussianDepthAware::getBlurVPipeline() {
|
|||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("blurParamsBuffer"), BlurTask_ParamsSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("sourceMap"), BlurTask_SourceSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), BlurTask_DepthSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
@ -222,6 +238,7 @@ gpu::PipelinePointer BlurGaussianDepthAware::getBlurHPipeline() {
|
|||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("blurParamsBuffer"), BlurTask_ParamsSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("sourceMap"), BlurTask_SourceSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), BlurTask_DepthSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
@ -270,6 +287,7 @@ bool BlurGaussianDepthAware::updateBlurringResources(const gpu::FramebufferPoint
|
|||
|
||||
void BlurGaussianDepthAware::configure(const Config& config) {
|
||||
_parameters->setFilterRadiusScale(config.filterScale);
|
||||
_parameters->setDepthThreshold(config.depthThreshold);
|
||||
}
|
||||
|
||||
|
||||
|
@ -292,6 +310,7 @@ void BlurGaussianDepthAware::run(const SceneContextPointer& sceneContext, const
|
|||
auto blurHPipeline = getBlurHPipeline();
|
||||
|
||||
_parameters->setWidthHeight(args->_viewport.z, args->_viewport.w, args->_context->isStereo());
|
||||
_parameters->setDepthPerspective(args->getViewFrustum().getProjection()[1][1]);
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
@ -299,6 +318,8 @@ void BlurGaussianDepthAware::run(const SceneContextPointer& sceneContext, const
|
|||
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, _parameters->_parametersBuffer);
|
||||
|
||||
batch.setResourceTexture(BlurTask_DepthSlot, depthTexture);
|
||||
|
||||
batch.setFramebuffer(blurringResources.blurringFramebuffer);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
|
||||
|
@ -312,6 +333,7 @@ void BlurGaussianDepthAware::run(const SceneContextPointer& sceneContext, const
|
|||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, nullptr);
|
||||
batch.setResourceTexture(BlurTask_DepthSlot, nullptr);
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, nullptr);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -24,6 +24,9 @@ public:
|
|||
|
||||
void setFilterRadiusScale(float scale);
|
||||
|
||||
void setDepthPerspective(float oneOverTan2FOV);
|
||||
void setDepthThreshold(float threshold);
|
||||
|
||||
// Class describing the uniform buffer with all the parameters common to the blur shaders
|
||||
class Params {
|
||||
public:
|
||||
|
@ -33,6 +36,9 @@ public:
|
|||
// Filter info (radius scale
|
||||
glm::vec4 filterInfo{ 1.0f, 0.0f, 0.0f, 0.0f };
|
||||
|
||||
// Depth info (radius scale
|
||||
glm::vec4 depthInfo{ 1.0f, 0.0f, 0.0f, 0.0f };
|
||||
|
||||
// stereo info if blurring a stereo render
|
||||
glm::vec4 stereoInfo{ 0.0f };
|
||||
|
||||
|
@ -89,10 +95,21 @@ protected:
|
|||
bool updateBlurringResources(const gpu::FramebufferPointer& sourceFramebuffer, BlurringResources& blurringResources);
|
||||
};
|
||||
|
||||
class BlurGaussianDepthAwareConfig : public BlurGaussianConfig {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(float depthThreshold MEMBER depthThreshold NOTIFY dirty) // expose enabled flag
|
||||
public:
|
||||
|
||||
float depthThreshold{ 2.0f };
|
||||
signals:
|
||||
void dirty();
|
||||
protected:
|
||||
};
|
||||
|
||||
class BlurGaussianDepthAware {
|
||||
public:
|
||||
using InputPair = VaryingPair<gpu::FramebufferPointer, gpu::TexturePointer>;
|
||||
using Config = BlurGaussianConfig;
|
||||
using Config = BlurGaussianDepthAwareConfig;
|
||||
using JobModel = Job::ModelI<BlurGaussianDepthAware, InputPair, Config>;
|
||||
|
||||
BlurGaussianDepthAware();
|
||||
|
|
|
@ -24,6 +24,7 @@ const float gaussianDistributionOffset[NUM_TAPS] = float[](
|
|||
struct BlurParameters {
|
||||
vec4 resolutionInfo;
|
||||
vec4 filterInfo;
|
||||
vec4 depthInfo;
|
||||
vec4 stereoInfo;
|
||||
};
|
||||
|
||||
|
@ -35,6 +36,19 @@ vec2 getViewportInvWidthHeight() {
|
|||
return parameters.resolutionInfo.zw;
|
||||
}
|
||||
|
||||
float getFilterScale() {
|
||||
return parameters.filterInfo.x;
|
||||
}
|
||||
|
||||
|
||||
float getDepthThreshold() {
|
||||
return parameters.depthInfo.x;
|
||||
}
|
||||
|
||||
float getDepthPerspective() {
|
||||
return parameters.depthInfo.w;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
|
@ -64,3 +78,47 @@ vec4 pixelShaderGaussian(vec2 texcoord, vec2 direction, vec2 pixelStep) {
|
|||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareBlurGaussianDepthAware()@>
|
||||
|
||||
<$declareBlurUniforms()$>
|
||||
|
||||
uniform sampler2D sourceMap;
|
||||
uniform sampler2D depthMap;
|
||||
|
||||
vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep) {
|
||||
|
||||
float sampleDepth = texture(depthMap, texcoord).x;
|
||||
vec4 sampleCenter = texture(sourceMap, texcoord);
|
||||
|
||||
// Calculate the width scale.
|
||||
float distanceToProjectionWindow = getDepthPerspective();
|
||||
|
||||
float depthThreshold = getDepthThreshold();
|
||||
|
||||
// Calculate the final step to fetch the surrounding pixels.
|
||||
float filterScale = getFilterScale();
|
||||
float scale = distanceToProjectionWindow / sampleDepth;
|
||||
|
||||
vec2 finalStep = filterScale * scale * direction * pixelStep;
|
||||
|
||||
vec4 srcBlurred = vec4(0.0);
|
||||
|
||||
for(int i = 1; i < NUM_TAPS; i++) {
|
||||
// Fetch color and depth for current sample.
|
||||
vec2 sampleCoord = texcoord + (gaussianDistributionOffset[i] * finalStep);
|
||||
float srcDepth = texture(depthMap, sampleCoord).x;
|
||||
vec4 srcSample = texture(sourceMap, sampleCoord);
|
||||
|
||||
|
||||
// If the difference in depth is huge, we lerp color back.
|
||||
float s = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepth - sampleDepth), 0.0, 1.0);
|
||||
srcSample = mix(srcSample, sampleCenter, s);
|
||||
|
||||
// Accumulate.
|
||||
srcBlurred += gaussianDistributionCurve[i] * srcSample;
|
||||
}
|
||||
|
||||
return srcBlurred;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
|
|
@ -10,8 +10,7 @@
|
|||
//
|
||||
|
||||
<@include BlurTask.slh@>
|
||||
<$declareBlurGaussian()$>
|
||||
|
||||
<$declareBlurGaussianDepthAware()$>
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
|
||||
|
|
|
@ -10,8 +10,7 @@
|
|||
//
|
||||
|
||||
<@include BlurTask.slh@>
|
||||
<$declareBlurGaussian()$>
|
||||
|
||||
<$declareBlurGaussianDepthAware()$>
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ Column {
|
|||
|
||||
Column{
|
||||
Repeater {
|
||||
model: [ "Blur Scale:filterScale:4.0" ]
|
||||
model: [ "Blur Scale:filterScale:2.0", "Blur Depth Threshold:depthThreshold:100.0" ]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: false
|
||||
|
|
Loading…
Reference in a new issue