save the current state of adding debug and evolving the ao job

This commit is contained in:
samcake 2016-07-31 16:05:25 -07:00
parent afb1ebf42e
commit beb42332ef
9 changed files with 506 additions and 172 deletions

View file

@ -30,6 +30,7 @@
#include "ssao_makePyramid_frag.h"
#include "ssao_makeOcclusion_frag.h"
#include "ssao_debugOcclusion_frag.h"
#include "ssao_makeHorizontalBlur_frag.h"
#include "ssao_makeVerticalBlur_frag.h"
@ -179,10 +180,6 @@ const int AmbientOcclusionEffect_LinearDepthMapSlot = 0;
const int AmbientOcclusionEffect_OcclusionMapSlot = 0;
AmbientOcclusionEffect::AmbientOcclusionEffect() {
FrameTransform frameTransform;
_frameTransformBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(FrameTransform), (const gpu::Byte*) &frameTransform));
Parameters parameters;
_parametersBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Parameters), (const gpu::Byte*) &parameters));
}
void AmbientOcclusionEffect::configure(const Config& config) {
@ -192,41 +189,41 @@ void AmbientOcclusionEffect::configure(const Config& config) {
const double RADIUS_POWER = 6.0;
const auto& radius = config.radius;
if (radius != getRadius()) {
auto& current = _parametersBuffer.edit<Parameters>().radiusInfo;
if (radius != _parametersBuffer->getRadius()) {
auto& current = _parametersBuffer->radiusInfo;
current.x = radius;
current.y = radius * radius;
current.z = (float)(1.0 / pow((double)radius, RADIUS_POWER));
}
if (config.obscuranceLevel != getObscuranceLevel()) {
auto& current = _parametersBuffer.edit<Parameters>().radiusInfo;
if (config.obscuranceLevel != _parametersBuffer->getObscuranceLevel()) {
auto& current = _parametersBuffer->radiusInfo;
current.w = config.obscuranceLevel;
}
if (config.falloffBias != getFalloffBias()) {
auto& current = _parametersBuffer.edit<Parameters>().ditheringInfo;
if (config.falloffBias != _parametersBuffer->getFalloffBias()) {
auto& current = _parametersBuffer->ditheringInfo;
current.z = config.falloffBias;
}
if (config.edgeSharpness != getEdgeSharpness()) {
auto& current = _parametersBuffer.edit<Parameters>().blurInfo;
if (config.edgeSharpness != _parametersBuffer->getEdgeSharpness()) {
auto& current = _parametersBuffer->blurInfo;
current.x = config.edgeSharpness;
}
if (config.blurDeviation != getBlurDeviation()) {
auto& current = _parametersBuffer.edit<Parameters>().blurInfo;
if (config.blurDeviation != _parametersBuffer->getBlurDeviation()) {
auto& current = _parametersBuffer->blurInfo;
current.z = config.blurDeviation;
shouldUpdateGaussian = true;
}
if (config.numSpiralTurns != getNumSpiralTurns()) {
auto& current = _parametersBuffer.edit<Parameters>().sampleInfo;
if (config.numSpiralTurns != _parametersBuffer->getNumSpiralTurns()) {
auto& current = _parametersBuffer->sampleInfo;
current.z = config.numSpiralTurns;
}
if (config.numSamples != getNumSamples()) {
auto& current = _parametersBuffer.edit<Parameters>().sampleInfo;
if (config.numSamples != _parametersBuffer->getNumSamples()) {
auto& current = _parametersBuffer->sampleInfo;
current.x = config.numSamples;
current.y = 1.0f / config.numSamples;
}
@ -235,33 +232,36 @@ void AmbientOcclusionEffect::configure(const Config& config) {
_framebuffer = std::make_shared<AmbientOcclusionFramebuffer>();
}
if (config.perspectiveScale != _parametersBuffer->getPerspectiveScale()) {
_parametersBuffer->resolutionInfo.z = config.perspectiveScale;
}
_framebuffer->setResolutionLevel(config.resolutionLevel);
if (config.resolutionLevel != getResolutionLevel()) {
_parametersBuffer.edit<Parameters>().resolutionInfo.w = config.resolutionLevel;
if (config.resolutionLevel != _parametersBuffer->getResolutionLevel()) {
_parametersBuffer->resolutionInfo.w = config.resolutionLevel;
}
const auto& resolutionLevel = config.resolutionLevel;
if (resolutionLevel != getResolutionLevel()) {
auto& current = _parametersBuffer.edit<Parameters>().resolutionInfo;
if (resolutionLevel != _parametersBuffer->getResolutionLevel()) {
auto& current = _parametersBuffer->resolutionInfo;
current.x = (float)resolutionLevel;
// Communicate the change to the Framebuffer cache
// DependencyManager::get<FramebufferCache>()->setAmbientOcclusionResolutionLevel(resolutionLevel);
}
if (config.blurRadius != getBlurRadius()) {
auto& current = _parametersBuffer.edit<Parameters>().blurInfo;
if (config.blurRadius != _parametersBuffer->getBlurRadius()) {
auto& current = _parametersBuffer->blurInfo;
current.y = (float)config.blurRadius;
shouldUpdateGaussian = true;
}
if (config.ditheringEnabled != isDitheringEnabled()) {
auto& current = _parametersBuffer.edit<Parameters>().ditheringInfo;
if (config.ditheringEnabled != _parametersBuffer->isDitheringEnabled()) {
auto& current = _parametersBuffer->ditheringInfo;
current.x = (float)config.ditheringEnabled;
}
if (config.borderingEnabled != isBorderingEnabled()) {
auto& current = _parametersBuffer.edit<Parameters>().ditheringInfo;
if (config.borderingEnabled != _parametersBuffer->isBorderingEnabled()) {
auto& current = _parametersBuffer->ditheringInfo;
current.w = (float)config.borderingEnabled;
}
@ -277,7 +277,7 @@ const gpu::PipelinePointer& AmbientOcclusionEffect::getOcclusionPipeline() {
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionFrameTransformBuffer"), AmbientOcclusionEffect_FrameTransformSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("deferredFrameTransformBuffer"), AmbientOcclusionEffect_FrameTransformSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionParamsBuffer"), AmbientOcclusionEffect_ParamsSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("pyramidMap"), AmbientOcclusionEffect_LinearDepthMapSlot));
gpu::Shader::makeProgram(*program, slotBindings);
@ -339,14 +339,9 @@ const gpu::PipelinePointer& AmbientOcclusionEffect::getVBlurPipeline() {
return _vBlurPipeline;
}
void AmbientOcclusionEffect::setDepthInfo(float nearZ, float farZ) {
_frameTransformBuffer.edit<FrameTransform>().depthInfo = glm::vec4(nearZ*farZ, farZ -nearZ, -farZ, 0.0f);
}
void AmbientOcclusionEffect::updateGaussianDistribution() {
auto coefs = _parametersBuffer.edit<Parameters>()._gaussianCoefs;
GaussianDistribution::evalSampling(coefs, Parameters::GAUSSIAN_COEFS_LENGTH, getBlurRadius(), getBlurDeviation());
auto coefs = _parametersBuffer->_gaussianCoefs;
GaussianDistribution::evalSampling(coefs, Parameters::GAUSSIAN_COEFS_LENGTH, _parametersBuffer->getBlurRadius(), _parametersBuffer->getBlurDeviation());
}
void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs) {
@ -356,9 +351,9 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
RenderArgs* args = renderContext->args;
// FIXME: Different render modes should have different tasks
if (args->_renderMode != RenderArgs::DEFAULT_RENDER_MODE) {
return;
}
// if (args->_renderMode != RenderArgs::DEFAULT_RENDER_MODE) {
// return;
// }
const auto frameTransform = inputs.get0();
const auto deferredFramebuffer = inputs.get1();
@ -385,7 +380,8 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
auto occlusionFBO = _framebuffer->getOcclusionFramebuffer();
auto occlusionBlurredFBO = _framebuffer->getOcclusionBlurredFramebuffer();
outputs = _framebuffer;
outputs.edit0() = _framebuffer;
outputs.edit1() = _parametersBuffer;
auto framebufferSize = _framebuffer->getSourceFrameSize();
@ -394,39 +390,13 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
float tMin = args->_viewport.y / (float)framebufferSize.y;
float tHeight = args->_viewport.w / (float)framebufferSize.y;
auto resolutionLevel = getResolutionLevel();
auto resolutionLevel = _parametersBuffer->getResolutionLevel();
// Update the depth info with near and far (same for stereo)
setDepthInfo(args->getViewFrustum().getNearClip(), args->getViewFrustum().getFarClip());
_frameTransformBuffer.edit<FrameTransform>().pixelInfo = args->_viewport;
//_parametersBuffer.edit<Parameters>()._ditheringInfo.y += 0.25f;
// Running in stero ?
bool isStereo = args->_context->isStereo();
if (!isStereo) {
// Eval the mono projection
mat4 monoProjMat;
args->getViewFrustum().evalProjectionMatrix(monoProjMat);
_frameTransformBuffer.edit<FrameTransform>().projection[0] = monoProjMat;
_frameTransformBuffer.edit<FrameTransform>().stereoInfo = glm::vec4(0.0f, (float)args->_viewport.z, 0.0f, 0.0f);
} else {
mat4 projMats[2];
mat4 eyeViews[2];
args->_context->getStereoProjections(projMats);
args->_context->getStereoViews(eyeViews);
for (int i = 0; i < 2; i++) {
// Compose the mono Eye space to Stereo clip space Projection Matrix
auto sideViewMat = projMats[i] * eyeViews[i];
_frameTransformBuffer.edit<FrameTransform>().projection[i] = sideViewMat;
}
_frameTransformBuffer.edit<FrameTransform>().stereoInfo = glm::vec4(1.0f, (float)(args->_viewport.z >> 1), 0.0f, 1.0f);
}
auto occlusionPipeline = getOcclusionPipeline();
auto firstHBlurPipeline = getHBlurPipeline();
@ -446,11 +416,12 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
model.setScale(glm::vec3(sWidth, tHeight, 1.0f));
batch.setModelTransform(model);
batch.setUniformBuffer(AmbientOcclusionEffect_FrameTransformSlot, _frameTransformBuffer);
batch.setUniformBuffer(AmbientOcclusionEffect_FrameTransformSlot, frameTransform->getFrameTransformBuffer());
batch.setUniformBuffer(AmbientOcclusionEffect_ParamsSlot, _parametersBuffer);
// batch.generateTextureMips(linearDepthTexture);
// We need this with the mips levels
batch.generateTextureMips(linearDepthTexture);
// Occlusion pass
batch.setFramebuffer(occlusionFBO);
@ -460,7 +431,7 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
batch.draw(gpu::TRIANGLE_STRIP, 4);
/* if (getBlurRadius() > 0) {
if (_parametersBuffer->getBlurRadius() > 0) {
// Blur 1st pass
batch.setFramebuffer(occlusionBlurredFBO);
batch.setPipeline(firstHBlurPipeline);
@ -472,7 +443,7 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
batch.setPipeline(lastVBlurPipeline);
batch.setResourceTexture(AmbientOcclusionEffect_OcclusionMapSlot, occlusionBlurredFBO->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}*/
}
batch.setResourceTexture(AmbientOcclusionEffect_LinearDepthMapSlot, nullptr);
@ -484,3 +455,115 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
// Update the timer
std::static_pointer_cast<Config>(renderContext->jobConfig)->gpuTime = _gpuTimer.getAverage();
}
DebugAmbientOcclusion::DebugAmbientOcclusion() {
}
void DebugAmbientOcclusion::configure(const Config& config) {
_showCursorPixel = config.showCursorPixel;
auto cursorPos = glm::vec2(_parametersBuffer->pixelInfo);
if (cursorPos != config.debugCursorTexcoord) {
_parametersBuffer->pixelInfo = glm::vec4(config.debugCursorTexcoord, 0.0f, 0.0f);
}
}
const gpu::PipelinePointer& DebugAmbientOcclusion::getDebugPipeline() {
if (!_debugPipeline) {
auto vs = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
auto ps = gpu::Shader::createPixel(std::string(ssao_debugOcclusion_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("deferredFrameTransformBuffer"), AmbientOcclusionEffect_FrameTransformSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionParamsBuffer"), AmbientOcclusionEffect_ParamsSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("pyramidMap"), AmbientOcclusionEffect_LinearDepthMapSlot));
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setColorWriteMask(true, true, true, false);
// Good to go add the brand new pipeline
_debugPipeline = gpu::Pipeline::create(program, state);
}
return _debugPipeline;
}
void DebugAmbientOcclusion::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs) {
assert(renderContext->args);
assert(renderContext->args->hasViewFrustum());
RenderArgs* args = renderContext->args;
// FIXME: Different render modes should have different tasks
// if (args->_renderMode != RenderArgs::DEFAULT_RENDER_MODE) {
// return;
// }
const auto frameTransform = inputs.get0();
const auto deferredFramebuffer = inputs.get1();
const auto linearDepthFramebuffer = inputs.get2();
const auto ambientOcclusionUniforms = inputs.get3();
auto linearDepthTexture = linearDepthFramebuffer->getLinearDepthTexture();
auto normalTexture = deferredFramebuffer->getDeferredNormalTexture();
auto sourceViewport = args->_viewport;
auto occlusionViewport = sourceViewport;
auto resolutionLevel = ambientOcclusionUniforms->getResolutionLevel();
if (resolutionLevel > 0) {
linearDepthTexture = linearDepthFramebuffer->getHalfLinearDepthTexture();
normalTexture = linearDepthFramebuffer->getHalfNormalTexture();
occlusionViewport = occlusionViewport >> ambientOcclusionUniforms->getResolutionLevel();
}
auto framebufferSize = glm::ivec2(linearDepthTexture->getDimensions());
float sMin = args->_viewport.x / (float)framebufferSize.x;
float sWidth = args->_viewport.z / (float)framebufferSize.x;
float tMin = args->_viewport.y / (float)framebufferSize.y;
float tHeight = args->_viewport.w / (float)framebufferSize.y;
// Running in stero ?
bool isStereo = args->_context->isStereo();
auto debugPipeline = getDebugPipeline();
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
batch.enableStereo(false);
batch.setViewportTransform(sourceViewport);
batch.setProjectionTransform(glm::mat4());
batch.setViewTransform(Transform());
Transform model;
model.setTranslation(glm::vec3(sMin, tMin, 0.0f));
model.setScale(glm::vec3(sWidth, tHeight, 1.0f));
batch.setModelTransform(model);
batch.setUniformBuffer(AmbientOcclusionEffect_FrameTransformSlot, frameTransform->getFrameTransformBuffer());
batch.setUniformBuffer(AmbientOcclusionEffect_ParamsSlot, ambientOcclusionUniforms);
// We need this with the mips levels
batch.generateTextureMips(linearDepthTexture);
batch.setPipeline(debugPipeline);
batch.setResourceTexture(AmbientOcclusionEffect_LinearDepthMapSlot, linearDepthTexture);
batch.draw(gpu::TRIANGLE_STRIP, 4);
batch.setResourceTexture(AmbientOcclusionEffect_LinearDepthMapSlot, nullptr);
batch.setResourceTexture(AmbientOcclusionEffect_OcclusionMapSlot, nullptr);
});
}

View file

@ -63,6 +63,7 @@ class AmbientOcclusionEffectConfig : public render::Job::Config::Persistent {
Q_PROPERTY(bool ditheringEnabled MEMBER ditheringEnabled NOTIFY dirty)
Q_PROPERTY(bool borderingEnabled MEMBER borderingEnabled NOTIFY dirty)
Q_PROPERTY(float radius MEMBER radius WRITE setRadius)
Q_PROPERTY(float perspectiveScale MEMBER perspectiveScale WRITE setPerspectiveScale)
Q_PROPERTY(float obscuranceLevel MEMBER obscuranceLevel WRITE setObscuranceLevel)
Q_PROPERTY(float falloffBias MEMBER falloffBias WRITE setFalloffBias)
Q_PROPERTY(float edgeSharpness MEMBER edgeSharpness WRITE setEdgeSharpness)
@ -79,6 +80,7 @@ public:
const int MAX_BLUR_RADIUS = 6;
void setRadius(float newRadius) { radius = std::max(0.01f, newRadius); emit dirty(); }
void setPerspectiveScale(float scale) { perspectiveScale = scale; emit dirty(); }
void setObscuranceLevel(float level) { obscuranceLevel = std::max(0.01f, level); emit dirty(); }
void setFalloffBias(float bias) { falloffBias = std::max(0.0f, std::min(bias, 0.2f)); emit dirty(); }
void setEdgeSharpness(float sharpness) { edgeSharpness = std::max(0.0f, (float)sharpness); emit dirty(); }
@ -90,14 +92,15 @@ public:
double getGpuTime() { return gpuTime; }
float radius{ 0.5f };
float perspectiveScale{ 1.0f };
float obscuranceLevel{ 0.5f }; // intensify or dim down the obscurance effect
float falloffBias{ 0.01f };
float edgeSharpness{ 1.0f };
float blurDeviation{ 2.5f };
float numSpiralTurns{ 7.0f }; // defining an angle span to distribute the samples ray directions
int numSamples{ 11 };
int resolutionLevel{ 1 };
int blurRadius{ 4 }; // 0 means no blurring
int resolutionLevel{ 0 };
int blurRadius{ 0 }; // 0 means no blurring
bool ditheringEnabled{ true }; // randomize the distribution of rays per pixel, should always be true
bool borderingEnabled{ true }; // avoid evaluating information from non existing pixels out of the frame, should always be true
double gpuTime{ 0.0 };
@ -106,10 +109,25 @@ signals:
void dirty();
};
namespace gpu {
template <class T> class UniformBuffer : public gpu::BufferView {
public:
~UniformBuffer<T>() {};
UniformBuffer<T>() : gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(T), (const gpu::Byte*) &T())) {}
const T* operator ->() const { return &get<T>(); }
T* operator ->() { return &edit<T>(); }
};
}
class AmbientOcclusionEffect {
public:
using Inputs = render::VaryingSet3<DeferredFrameTransformPointer, DeferredFramebufferPointer, LinearDepthFramebufferPointer>;
using Outputs = AmbientOcclusionFramebufferPointer;
using Outputs = render::VaryingSet2<AmbientOcclusionFramebufferPointer, gpu::BufferView>;
using Config = AmbientOcclusionEffectConfig;
using JobModel = render::Job::ModelIO<AmbientOcclusionEffect, Inputs, Outputs, Config>;
@ -118,46 +136,12 @@ public:
void configure(const Config& config);
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs);
float getRadius() const { return _parametersBuffer.get<Parameters>().radiusInfo.x; }
float getObscuranceLevel() const { return _parametersBuffer.get<Parameters>().radiusInfo.w; }
float getFalloffBias() const { return (float)_parametersBuffer.get<Parameters>().ditheringInfo.z; }
float getEdgeSharpness() const { return (float)_parametersBuffer.get<Parameters>().blurInfo.x; }
float getBlurDeviation() const { return _parametersBuffer.get<Parameters>().blurInfo.z; }
float getNumSpiralTurns() const { return _parametersBuffer.get<Parameters>().sampleInfo.z; }
int getNumSamples() const { return (int)_parametersBuffer.get<Parameters>().sampleInfo.x; }
int getResolutionLevel() const { return _parametersBuffer.get<Parameters>().resolutionInfo.x; }
int getBlurRadius() const { return (int)_parametersBuffer.get<Parameters>().blurInfo.y; }
bool isDitheringEnabled() const { return _parametersBuffer.get<Parameters>().ditheringInfo.x; }
bool isBorderingEnabled() const { return _parametersBuffer.get<Parameters>().ditheringInfo.w; }
private:
void updateGaussianDistribution();
void setDepthInfo(float nearZ, float farZ);
typedef gpu::BufferView UniformBufferView;
// Class describing the uniform buffer with the transform info common to the AO shaders
// It s changing every frame
class FrameTransform {
public:
// Pixel info is { viemport width height and stereo on off}
glm::vec4 pixelInfo;
// Depth info is { n.f, f - n, -f}
glm::vec4 depthInfo;
// Stereo info
glm::vec4 stereoInfo { 0.0 };
// Mono proj matrix or Left and Right proj matrix going from Mono Eye space to side clip space
glm::mat4 projection[2];
FrameTransform() {}
};
gpu::BufferView _frameTransformBuffer;
// Class describing the uniform buffer with all the parameters common to the AO shaders
class Parameters {
public:
// Resolution info
glm::vec4 resolutionInfo { -1.0f, 0.0f, 0.0f, 0.0f };
glm::vec4 resolutionInfo { -1.0f, 0.0f, 1.0f, 0.0f };
// radius info is { R, R^2, 1 / R^6, ObscuranceScale}
glm::vec4 radiusInfo{ 0.5f, 0.5f * 0.5f, 1.0f / (0.25f * 0.25f * 0.25f), 1.0f };
// Dithering info
@ -169,10 +153,28 @@ private:
// gaussian distribution coefficients first is the sampling radius (max is 6)
const static int GAUSSIAN_COEFS_LENGTH = 8;
float _gaussianCoefs[GAUSSIAN_COEFS_LENGTH];
Parameters() {}
int getResolutionLevel() const { return resolutionInfo.x; }
float getRadius() const { return radiusInfo.x; }
float getPerspectiveScale() const { return resolutionInfo.z; }
float getObscuranceLevel() const { return radiusInfo.w; }
float getFalloffBias() const { return (float)ditheringInfo.z; }
float getEdgeSharpness() const { return (float)blurInfo.x; }
float getBlurDeviation() const { return blurInfo.z; }
float getNumSpiralTurns() const { return sampleInfo.z; }
int getNumSamples() const { return (int)sampleInfo.x; }
int getBlurRadius() const { return (int)blurInfo.y; }
bool isDitheringEnabled() const { return ditheringInfo.x; }
bool isBorderingEnabled() const { return ditheringInfo.w; }
};
gpu::BufferView _parametersBuffer;
using ParametersBuffer = gpu::UniformBuffer<Parameters>;
private:
void updateGaussianDistribution();
ParametersBuffer _parametersBuffer;
const gpu::PipelinePointer& getOcclusionPipeline();
const gpu::PipelinePointer& getHBlurPipeline(); // first
@ -185,6 +187,55 @@ private:
AmbientOcclusionFramebufferPointer _framebuffer;
gpu::RangeTimer _gpuTimer;
friend class DebugAmbientOcclusion;
};
class DebugAmbientOcclusionConfig : public render::Job::Config {
Q_OBJECT
Q_PROPERTY(bool showCursorPixel MEMBER showCursorPixel NOTIFY dirty)
Q_PROPERTY(glm::vec2 debugCursorTexcoord MEMBER debugCursorTexcoord NOTIFY dirty)
public:
DebugAmbientOcclusionConfig() : render::Job::Config(true) {}
bool showCursorPixel{ false };
glm::vec2 debugCursorTexcoord{ 0.5, 0.5 };
signals:
void dirty();
};
class DebugAmbientOcclusion {
public:
using Inputs = render::VaryingSet4<DeferredFrameTransformPointer, DeferredFramebufferPointer, LinearDepthFramebufferPointer, AmbientOcclusionEffect::ParametersBuffer>;
using Config = DebugAmbientOcclusionConfig;
using JobModel = render::Job::ModelI<DebugAmbientOcclusion, Inputs, Config>;
DebugAmbientOcclusion();
void configure(const Config& config);
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs);
private:
// Class describing the uniform buffer with all the parameters common to the debug AO shaders
class Parameters {
public:
// Pixel info
glm::vec4 pixelInfo { 0.0f, 0.0f, 0.0f, 0.0f };
Parameters() {}
};
gpu::UniformBuffer<Parameters> _parametersBuffer;
const gpu::PipelinePointer& getDebugPipeline();
gpu::PipelinePointer _debugPipeline;
bool _showCursorPixel{ false };
};
#endif // hifi_AmbientOcclusionEffect_h

View file

@ -81,6 +81,9 @@ bool isStereo() {
float getStereoSideWidth(int resolutionLevel) {
return float(int(frameTransform._stereoInfo.y) >> resolutionLevel);
}
float getStereoSideHeight(int resolutionLevel) {
return float(int(frameTransform._pixelInfo.w) >> resolutionLevel);
}
ivec4 getStereoSideInfo(int xPos, int resolutionLevel) {
int sideWidth = int(getStereoSideWidth(resolutionLevel));

View file

@ -142,7 +142,9 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
// AO job
const auto ambientOcclusionInputs = AmbientOcclusionEffect::Inputs(deferredFrameTransform, deferredFramebuffer, linearDepthTarget).hasVarying();
const auto ambientOcclusionFramebuffer = addJob<AmbientOcclusionEffect>("AmbientOcclusion", ambientOcclusionInputs);
const auto ambientOcclusionOutputs = addJob<AmbientOcclusionEffect>("AmbientOcclusion", ambientOcclusionInputs);
const auto ambientOcclusionFramebuffer = ambientOcclusionOutputs.getN<AmbientOcclusionEffect::Outputs>(0);
const auto ambientOcclusionUniforms = ambientOcclusionOutputs.getN<AmbientOcclusionEffect::Outputs>(1);
// Draw Lights just add the lights to the current list of lights to deal with. NOt really gpu job for now.
addJob<DrawLight>("DrawLight", lights);
@ -178,6 +180,9 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
{
addJob<DebugSubsurfaceScattering>("DebugScattering", deferredLightingInputs);
const auto debugAmbientOcclusionInputs = DebugAmbientOcclusion::Inputs(deferredFrameTransform, deferredFramebuffer, linearDepthTarget, ambientOcclusionUniforms).hasVarying();
addJob<DebugAmbientOcclusion>("DebugAmbientOcclusion", debugAmbientOcclusionInputs);
// Debugging Deferred buffer job
const auto debugFramebuffers = render::Varying(DebugDeferredBuffer::Inputs(deferredFramebuffer, linearDepthTarget, surfaceGeometryFramebuffer, ambientOcclusionFramebuffer));
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", debugFramebuffers);

View file

@ -30,13 +30,8 @@ vec2 unpackOcclusionDepth(vec3 raw) {
<@endfunc@>
<@func declareAmbientOcclusion()@>
struct AmbientOcclusionFrameTransform {
vec4 _pixelInfo;
vec4 _depthInfo;
vec4 _stereoInfo;
mat4 _projection[2];
};
<@include DeferredTransform.slh@>
<$declareDeferredFrameTransform()$>
struct AmbientOcclusionParams {
vec4 _resolutionInfo;
@ -47,55 +42,20 @@ struct AmbientOcclusionParams {
float _gaussianCoefs[8];
};
uniform ambientOcclusionFrameTransformBuffer {
AmbientOcclusionFrameTransform frameTransform;
};
uniform ambientOcclusionParamsBuffer {
AmbientOcclusionParams params;
};
float getPerspectiveScale() {
return (params._resolutionInfo.z);
}
int getResolutionLevel() {
return int(params._resolutionInfo.x);
}
vec2 getWidthHeight() {
return vec2(ivec2(frameTransform._pixelInfo.zw) >> getResolutionLevel());
}
float getProjScale() {
return getWidthHeight().y * frameTransform._projection[0][1][1] * 0.5;
}
mat4 getProjection(int side) {
return frameTransform._projection[side];
}
bool isStereo() {
return frameTransform._stereoInfo.x > 0.0f;
}
float getStereoSideWidth() {
return float(int(frameTransform._stereoInfo.y) >> getResolutionLevel());
}
float getStereoSideHeight() {
return float(int(frameTransform._pixelInfo.w) >> getResolutionLevel());
}
ivec3 getStereoSideInfo(int xPos) {
int sideWidth = int(getStereoSideWidth());
return ivec3(xPos < sideWidth ? ivec2(0, 0) : ivec2(1, sideWidth), sideWidth);
}
float evalZeyeFromZdb(float depth) {
return frameTransform._depthInfo.x / (depth * frameTransform._depthInfo.y + frameTransform._depthInfo.z);
}
vec3 evalEyeNormal(vec3 C) {
//return normalize(cross(dFdy(C), dFdx(C)));
return normalize(cross(dFdx(C), dFdy(C)));
}
float getRadius() {
return params._radiusInfo.x;
}
@ -191,7 +151,7 @@ vec2 evalTapWeightedValue(ivec3 side, int r, ivec2 ssC, float key) {
ivec2 tapOffset = <$axis$> * (r * RADIUS_SCALE);
ivec2 ssP = (ssC + tapOffset);
if ((ssP.x < side.y || ssP.x >= side.z + side.y) || (ssP.y < 0 || ssP.y >= int(getWidthHeight().y))) {
if ((ssP.x < side.y || ssP.x >= side.z + side.y) || (ssP.y < 0 || ssP.y >= int(getWidthHeight(getResolutionLevel()).y))) {
return vec2(0.0);
}
vec2 tapOZ = fetchOcclusionDepth(ssC + tapOffset);
@ -209,7 +169,7 @@ vec3 getBlurredOcclusion(vec2 coord) {
ivec2 ssC = ivec2(coord);
// Stereo side info
ivec3 side = getStereoSideInfo(ssC.x);
ivec4 side = getStereoSideInfo(ssC.x, getResolutionLevel());
vec3 rawSample;
vec2 occlusionDepth = fetchOcclusionDepthRaw(ssC, rawSample);
@ -223,11 +183,11 @@ vec3 getBlurredOcclusion(vec2 coord) {
int blurRadius = getBlurRadius();
// negative side first
for (int r = -blurRadius; r <= -1; ++r) {
weightedSums += evalTapWeightedValue(side, r, ssC, key);
weightedSums += evalTapWeightedValue(side.xyz, r, ssC, key);
}
// then positive side
for (int r = 1; r <= blurRadius; ++r) {
weightedSums += evalTapWeightedValue(side, r, ssC, key);
weightedSums += evalTapWeightedValue(side.xyz, r, ssC, key);
}
// Final normalization

View file

@ -0,0 +1,162 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// Created by Sam Gateau on 1/1/16.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include ssao.slh@>
<$declareAmbientOcclusion()$>
<$declarePackOcclusionDepth()$>
const int LOG_MAX_OFFSET = 3;
const int MAX_MIP_LEVEL = 5;
// the depth pyramid texture
uniform sampler2D pyramidMap;
float getZEye(ivec2 pixel) {
return -texelFetch(pyramidMap, pixel, getResolutionLevel()).x;
}
/*
vec3 evalEyePositionFromZeye(int side, float Zeye, vec2 texcoord) {
// compute the view space position using the depth
// basically manually pick the proj matrix components to do the inverse
float Xe = (-Zeye * (texcoord.x * 2.0 - 1.0) - Zeye * frameTransform._projection[side][2][0] - frameTransform._projection[side][3][0]) / frameTransform._projection[side][0][0];
float Ye = (-Zeye * (texcoord.y * 2.0 - 1.0) - Zeye * frameTransform._projection[side][2][1] - frameTransform._projection[side][3][1]) / frameTransform._projection[side][1][1];
return vec3(Xe, Ye, Zeye);
}
*/
out vec4 outFragColor;
uniform sampler2D normalMap;
float getAngleDithering(in ivec2 pixelPos) {
// Hash function used in the AlchemyAO paper
return isDitheringEnabled() * (3 * pixelPos.x ^ pixelPos.y + pixelPos.x * pixelPos.y) * 10 + getFrameDithering();
}
const float TWO_PI = 6.28;
vec2 tapLocation(int sampleNumber, float spinAngle, out float ssR){
// Radius relative to ssR
float alpha = float(sampleNumber + 0.5) * getInvNumSamples();
float angle = alpha * (getNumSpiralTurns() * TWO_PI) + spinAngle;
ssR = alpha;
return vec2(cos(angle), sin(angle));
}
vec3 getOffsetPosition(ivec3 side, ivec2 ssC, vec2 unitOffset, float ssR) {
// Derivation:
// mipLevel = floor(log(ssR / MAX_OFFSET));
int mipLevel = clamp(findMSB(int(ssR)) - LOG_MAX_OFFSET, 0, MAX_MIP_LEVEL);
ivec2 ssOffset = ivec2(ssR * unitOffset);
ivec2 ssP = ssOffset + ssC;
if (bool(isBorderingEnabled())) {
ssP.x = ((ssP.x < 0 || ssP.x >= side.z) ? ssC.x - ssOffset.x : ssP.x);
ssP.y = ((ssP.y < 0 || ssP.y >= int(getWidthHeight(getResolutionLevel()).y)) ? ssC.y - ssOffset.y : ssP.y);
}
ivec2 ssPFull = ivec2(ssP.x + side.y, ssP.y);
vec3 P;
// We need to divide by 2^mipLevel to read the appropriately scaled coordinate from a MIP-map.
// Manually clamp to the texture size because texelFetch bypasses the texture unit
ivec2 mipP = clamp(ssPFull >> mipLevel, ivec2(0), textureSize(pyramidMap, getResolutionLevel() + mipLevel) - ivec2(1));
P.z = -texelFetch(pyramidMap, mipP, getResolutionLevel() + mipLevel).r;
// Offset to pixel center
vec2 tapUV = (vec2(ssP) + vec2(0.5)) / float(side.z);
P = evalEyePositionFromZeye(side.x, P.z, tapUV);
return P;
}
float sampleAO(in ivec3 side, in ivec2 ssC, in vec3 C, in vec3 n_C, in float ssDiskRadius, in int tapIndex, in float randomPatternRotationAngle) {
// Offset on the unit disk, spun for this pixel
float ssR;
vec2 unitOffset = tapLocation(tapIndex, randomPatternRotationAngle, ssR);
ssR *= ssDiskRadius;
// The occluding point in camera space
vec3 Q = getOffsetPosition(side, ssC, unitOffset, ssR);
vec3 v = Q - C;
float vv = dot(v, v);
float vn = dot(v, n_C);
// Fall off function as recommended in SAO paper
const float epsilon = 0.01;
float f = max(getRadius2() - vv, 0.0);
return f * f * f * max((vn - getFalloffBias()) / (epsilon + vv), 0.0);
}
void main(void) {
// Pixel being shaded
ivec2 ssC = ivec2(gl_FragCoord.xy);
// Fetch the z under the pixel (stereo or not)
float Zeye = getZEye(ssC);
// Stereo side info
ivec4 side = getStereoSideInfo(ssC.x, getResolutionLevel());
// From now on, ssC is the pixel pos in the side
ssC.x -= side.y;
vec2 fragPos = (vec2(ssC) + vec2(0.5)) / vec2(getStereoSideWidth(getResolutionLevel()), getStereoSideHeight(getResolutionLevel()));
// The position and normal of the pixel fragment in Eye space
vec3 Cp = evalEyePositionFromZeye(side.x, Zeye, fragPos);
vec3 Cn = evalEyeNormal(Cp);
// Choose the screen-space sample radius
// proportional to the projected area of the sphere
float ssDiskRadius = -( getProjScale(getResolutionLevel()) * getRadius() / Cp.z ) * getPerspectiveScale();
// Let's make noise
float randomPatternRotationAngle = getAngleDithering(ssC);
// Accumulate the Obscurance for each samples
float sum = 0.0;
for (int i = 0; i < getNumSamples(); ++i) {
sum += sampleAO(side.xyz, ssC, Cp, Cn, ssDiskRadius, i, randomPatternRotationAngle);
}
float A = max(0.0, 1.0 - sum * getObscuranceScaling() * 5.0 * getInvNumSamples());
<! // KEEP IT for Debugging
// Bilateral box-filter over a quad for free, respecting depth edges
// (the difference that this makes is subtle)
if (abs(dFdx(Cp.z)) < 0.02) {
A -= dFdx(A) * ((ssC.x & 1) - 0.5);
}
if (abs(dFdy(Cp.z)) < 0.02) {
A -= dFdy(A) * ((ssC.y & 1) - 0.5);
}
!>
outFragColor = vec4(packOcclusionDepth(A, CSZToDephtKey(Cp.z)), 1.0);
// KEEP IT for Debugging
// Debug Normal: outFragColor = vec4((Cn + vec3(1.0))* 0.5, 1.0);
// Debug Radius outFragColor = vec4(vec3(ssDiskRadius / 100.0), 1.0);
// Debug MaxMiplevel outFragColor = vec4(1.0 - vec3(float(clamp(findMSB(int(ssDiskRadius)) - LOG_MAX_OFFSET, 0, MAX_MIP_LEVEL))/ float(MAX_MIP_LEVEL)), 1.0);
// Debug OffsetPosition
// float ssR;
// vec2 unitOffset = tapLocation(int(getNumSamples() - 1), 0, ssR);
// vec3 Q = getOffsetPosition(side, ssC, unitOffset, ssR * ssDiskRadius);
//outFragColor = vec4(vec3(Q.x / 10.0, Q.y / 2.0, -Q.z/ 3.0), 1.0);
// vec3 v = normalize(Q - Cp);
//outFragColor = vec4((v + vec3(1.0))* 0.5, 1.0);
// outFragColor = vec4((Cn + vec3(1.0))* 0.5, 1.0);
//outFragColor = vec4(vec3(ssDiskRadius / 100.0), 1.0);
}

View file

@ -21,9 +21,9 @@ const int MAX_MIP_LEVEL = 5;
uniform sampler2D pyramidMap;
float getZEye(ivec2 pixel) {
return -texelFetch(pyramidMap, pixel, 0/*getResolutionLevel()*/).x;
return -texelFetch(pyramidMap, pixel, getResolutionLevel()).x;
}
/*
vec3 evalEyePositionFromZeye(int side, float Zeye, vec2 texcoord) {
// compute the view space position using the depth
// basically manually pick the proj matrix components to do the inverse
@ -31,7 +31,7 @@ vec3 evalEyePositionFromZeye(int side, float Zeye, vec2 texcoord) {
float Ye = (-Zeye * (texcoord.y * 2.0 - 1.0) - Zeye * frameTransform._projection[side][2][1] - frameTransform._projection[side][3][1]) / frameTransform._projection[side][1][1];
return vec3(Xe, Ye, Zeye);
}
*/
out vec4 outFragColor;
uniform sampler2D normalMap;
@ -61,7 +61,7 @@ vec3 getOffsetPosition(ivec3 side, ivec2 ssC, vec2 unitOffset, float ssR) {
ivec2 ssP = ssOffset + ssC;
if (bool(isBorderingEnabled())) {
ssP.x = ((ssP.x < 0 || ssP.x >= side.z) ? ssC.x - ssOffset.x : ssP.x);
ssP.y = ((ssP.y < 0 || ssP.y >= int(getWidthHeight().y)) ? ssC.y - ssOffset.y : ssP.y);
ssP.y = ((ssP.y < 0 || ssP.y >= int(getWidthHeight(getResolutionLevel()).y)) ? ssC.y - ssOffset.y : ssP.y);
}
ivec2 ssPFull = ivec2(ssP.x + side.y, ssP.y);
@ -108,11 +108,11 @@ void main(void) {
float Zeye = getZEye(ssC);
// Stereo side info
ivec3 side = getStereoSideInfo(ssC.x);
ivec4 side = getStereoSideInfo(ssC.x, getResolutionLevel());
// From now on, ssC is the pixel pos in the side
ssC.x -= side.y;
vec2 fragPos = (vec2(ssC) + 0.5) / vec2(getStereoSideWidth(), getStereoSideHeight());
vec2 fragPos = (vec2(ssC) + vec2(0.5)) / vec2(getStereoSideWidth(getResolutionLevel()), getStereoSideHeight(getResolutionLevel()));
// The position and normal of the pixel fragment in Eye space
vec3 Cp = evalEyePositionFromZeye(side.x, Zeye, fragPos);
@ -120,7 +120,7 @@ void main(void) {
// Choose the screen-space sample radius
// proportional to the projected area of the sphere
float ssDiskRadius = -getProjScale() * getRadius() / Cp.z;
float ssDiskRadius = -( getProjScale(getResolutionLevel()) * getRadius() / Cp.z ) * getPerspectiveScale();
// Let's make noise
float randomPatternRotationAngle = getAngleDithering(ssC);
@ -128,7 +128,7 @@ void main(void) {
// Accumulate the Obscurance for each samples
float sum = 0.0;
for (int i = 0; i < getNumSamples(); ++i) {
sum += sampleAO(side, ssC, Cp, Cn, ssDiskRadius, i, randomPatternRotationAngle);
sum += sampleAO(side.xyz, ssC, Cp, Cn, ssDiskRadius, i, randomPatternRotationAngle);
}
float A = max(0.0, 1.0 - sum * getObscuranceScaling() * 5.0 * getInvNumSamples());
@ -158,4 +158,5 @@ void main(void) {
// vec3 v = normalize(Q - Cp);
//outFragColor = vec4((v + vec3(1.0))* 0.5, 1.0);
// outFragColor = vec4((Cn + vec3(1.0))* 0.5, 1.0);
//outFragColor = vec4(vec3(ssDiskRadius / 100.0), 1.0);
}

View file

@ -0,0 +1,50 @@
//
// surfaceGeometryPass.qml
//
// Created by Sam Gateau on 6/6/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import "configSlider"
Column {
spacing: 8
Column {
id: surfaceGeometry
spacing: 10
Column{
Repeater {
model: [
"Radius:radius:2.0:false",
"Level:obscuranceLevel:1.0:false",
"Scale:perspectiveScale:2.0:false",
]
ConfigSlider {
label: qsTr(modelData.split(":")[0])
integral: (modelData.split(":")[3] == 'true')
config: Render.getConfig("AmbientOcclusion")
property: modelData.split(":")[1]
max: modelData.split(":")[2]
min: 0.0
}
}
Repeater {
model: [
"resolutionLevel:resolutionLevel",
"ditheringEnabled:ditheringEnabled",
"borderingEnabled:borderingEnabled",
]
CheckBox {
text: qsTr(modelData.split(":")[0])
checked: Render.getConfig("AmbientOcclusion")[modelData.split(":")[1]]
onCheckedChanged: { Render.getConfig("AmbientOcclusion")[modelData.split(":")[1]] = checked }
}
}
}
}
}

View file

@ -0,0 +1,19 @@
//
// debugSurfaceGeometryPass.js
//
// Created by Sam Gateau on 6/6/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
// Set up the qml ui
var qml = Script.resolvePath('ambientOcclusionPass.qml');
var window = new OverlayWindow({
title: 'Ambient Occlusion Pass',
source: qml,
width: 400, height: 170,
});
window.setPosition(Window.innerWidth - 420, 50 + 550 + 50);
window.closed.connect(function() { Script.stop(); });