Good state

This commit is contained in:
sam 2016-08-08 02:28:25 -07:00
parent e6c74b29c7
commit db8bf78dd1
9 changed files with 57 additions and 40 deletions

View file

@ -375,7 +375,7 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
auto resolutionLevel = _parametersBuffer->getResolutionLevel();
//_parametersBuffer.edit<Parameters>()._ditheringInfo.y += 0.25f;
_parametersBuffer->ditheringInfo.y += 0.25f;
// Running in stero ?
bool isStereo = args->_context->isStereo();
@ -403,7 +403,7 @@ void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext
// We need this with the mips levels
// batch.generateTextureMips(_framebuffer->getLinearDepthTexture());
batch.generateTextureMips(_framebuffer->getLinearDepthTexture());
// Occlusion pass
batch.setFramebuffer(occlusionFBO);

View file

@ -94,8 +94,8 @@ public:
float blurDeviation{ 2.5f };
float numSpiralTurns{ 7.0f }; // defining an angle span to distribute the samples ray directions
int numSamples{ 11 };
int resolutionLevel{ 1 };
int blurRadius{ 3 }; // 0 means no blurring
int resolutionLevel{ 0 };
int blurRadius{ 4 }; // 0 means no blurring
bool ditheringEnabled{ true }; // randomize the distribution of taps per pixel, should always be true
bool borderingEnabled{ true }; // avoid evaluating information from non existing pixels out of the frame, should always be true
bool fetchMipsEnabled{ true }; // fetch taps in sub mips to otpimize cache, should always be true

View file

@ -124,10 +124,6 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
const auto linearDepthPassInputs = LinearDepthPass::Inputs(deferredFrameTransform, deferredFramebuffer).hasVarying();
const auto linearDepthPassOutputs = addJob<LinearDepthPass>("LinearDepth", linearDepthPassInputs);
const auto linearDepthTarget = linearDepthPassOutputs.getN<LinearDepthPass::Outputs>(0);
// const auto linearDepthTexture = linearDepthPassOutputs.getN<LinearDepthPass::Outputs>(2);
// const auto halfLinearDepthTexture = linearDepthPassOutputs.getN<LinearDepthPass::Outputs>(3);
// const auto halfNormalTexture = linearDepthPassOutputs.getN<LinearDepthPass::Outputs>(4);
// Curvature pass
const auto surfaceGeometryPassInputs = SurfaceGeometryPass::Inputs(deferredFrameTransform, deferredFramebuffer, linearDepthTarget).hasVarying();
@ -150,8 +146,8 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
addJob<DrawLight>("DrawLight", lights);
const auto deferredLightingInputs = RenderDeferred::Inputs(deferredFrameTransform, deferredFramebuffer, lightingModel,
surfaceGeometryFramebuffer, ambientOcclusionFramebuffer, scatteringResource).hasVarying();
surfaceGeometryFramebuffer, ambientOcclusionFramebuffer, scatteringResource).hasVarying();
// DeferredBuffer is complete, now let's shade it into the LightingBuffer
addJob<RenderDeferred>("RenderDeferred", deferredLightingInputs);
@ -178,17 +174,16 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
// Debugging stages
{
// Debugging Deferred buffer job
const auto debugFramebuffers = render::Varying(DebugDeferredBuffer::Inputs(deferredFramebuffer, linearDepthTarget, surfaceGeometryFramebuffer, ambientOcclusionFramebuffer));
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", debugFramebuffers);
// Debugging Deferred buffer job
const auto debugFramebuffers = render::Varying(DebugDeferredBuffer::Inputs(deferredFramebuffer, linearDepthTarget, surfaceGeometryFramebuffer, ambientOcclusionFramebuffer));
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", debugFramebuffers);
addJob<DebugSubsurfaceScattering>("DebugScattering", deferredLightingInputs);
const auto debugAmbientOcclusionInputs = DebugAmbientOcclusion::Inputs(deferredFrameTransform, deferredFramebuffer, linearDepthTarget, ambientOcclusionUniforms).hasVarying();
addJob<DebugAmbientOcclusion>("DebugAmbientOcclusion", debugAmbientOcclusionInputs);
// Scene Octree Debuging job
{
addJob<DrawSceneOctree>("DrawSceneOctree", spatialSelection);

View file

@ -74,7 +74,7 @@ void LinearDepthFramebuffer::allocate() {
// For Linear Depth:
_linearDepthTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::RGB), width, height,
gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT)));
_linearDepthTexture->autoGenerateMips(5);
// _linearDepthTexture->autoGenerateMips(5);
_linearDepthFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
_linearDepthFramebuffer->setRenderBuffer(0, _linearDepthTexture);
_linearDepthFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, _primaryDepthTexture->getTexelFormat());

View file

@ -133,7 +133,7 @@ float getBlurCoef(int c) {
<@func declareSamplingDisk()@>
float getAngleDitheringWorldPos(in vec3 pixelWorldPos) {
vec3 worldPosFract = fract(pixelWorldPos * 0.4);
vec3 worldPosFract = fract(pixelWorldPos * 1.0);
ivec3 pixelPos = ivec3(worldPosFract * 256);
@ -240,34 +240,49 @@ const int LOG_MAX_OFFSET = 3;
const int MAX_MIP_LEVEL = 5;
int evalMipFromRadius(float radius) {
// mipLevel = floor(log(ssR / MAX_OFFSET));
return doFetchMips() * clamp(findMSB(int(radius)) - LOG_MAX_OFFSET, 0, MAX_MIP_LEVEL);
return clamp(findMSB(int(radius)) - LOG_MAX_OFFSET, 0, MAX_MIP_LEVEL);
}
vec3 getOffsetPosition(ivec3 side, ivec2 ssC, vec3 tap, vec2 imageSize) {
int mipLevel = evalMipFromRadius(tap.z);
vec3 fetchTapUnfiltered(ivec3 side, ivec2 ssC, vec3 tap, vec2 imageSize) {
ivec2 ssP = ivec2(tap.xy) + ssC;
ivec2 ssPFull = ivec2(ssP.x + side.y, ssP.y);
vec2 tapUV = (vec2(ssP) + vec2(0.5)) / imageSize;
vec3 P;
P.xy = tapUV;
P.z = -texture(pyramidMap, tapUV).x;
return P;
}
vec3 fetchTap(ivec3 side, ivec2 ssC, vec3 tap, vec2 imageSize) {
int mipLevel = evalMipFromRadius(tap.z * doFetchMips());
ivec2 ssP = ivec2(tap.xy) + ssC;
ivec2 ssPFull = ivec2(ssP.x + side.y, ssP.y);
// We need to divide by 2^mipLevel to read the appropriately scaled coordinate from a MIP-map.
// Manually clamp to the texture size because texelFetch bypasses the texture unit
// ivec2 mipSize = textureSize(pyramidMap, mipLevel);
// ivec2 mipSize = max(ivec2(imageSize) >> mipLevel, ivec2(1));
// ivec2 mipSize = textureSize(pyramidMap, mipLevel);
ivec2 mipSize = max(ivec2(imageSize) >> mipLevel, ivec2(1));
// ivec2 mipP = clamp(ssPFull >> mipLevel, ivec2(0), mipSize - ivec2(1));
ivec2 mipP = clamp(ssPFull >> mipLevel, ivec2(0), mipSize - ivec2(1));
vec2 tapUV = (vec2(ssP) + vec2(0.5)) / imageSize;
// vec2 tapUV = (vec2(mipP) + vec2(0.5)) / vec2(mipSize);
// vec2 tapUV = (vec2(ssP) + vec2(0.5)) / imageSize;
vec2 tapUV = (vec2(mipP) + vec2(0.5)) / vec2(mipSize);
vec3 P;
// P.z = -texelFetch(pyramidMap, mipP, mipLevel).r;
P.z = -textureLod(pyramidMap, tapUV, float(mipLevel)).r;
P.xy = tapUV;
P.z = -texelFetch(pyramidMap, mipP, mipLevel).x;
// P.z = -textureLod(pyramidMap, tapUV, float(mipLevel)).x;
// Offset to pixel center
P = evalEyePositionFromZeye(side.x, P.z, tapUV);
return P;
return P;
}
<@endfunc@>

View file

@ -72,8 +72,9 @@ void main(void) {
}
// Let's make noise
// float randomPatternRotationAngle = getAngleDithering(ssC);
float randomPatternRotationAngle = getAngleDitheringWorldPos(Cp);
//float randomPatternRotationAngle = getAngleDithering(ssC);
vec3 wCp = (getViewInverse() * vec4(Cp, 1.0)).xyz;
float randomPatternRotationAngle = getAngleDitheringWorldPos(wCp);
// Accumulate the Obscurance for each samples
@ -84,14 +85,17 @@ void main(void) {
for (int i = 0; i < getNumSamples(); ++i) {
vec3 tap = getTapLocationClamped(i, randomPatternRotationAngle, ssDiskRadius, cursorPixelPos, imageSize);
// The occluding point in camera space
vec2 fragToTap = vec2(ssC) + tap.xy - fragCoord.xy;
if (dot(fragToTap,fragToTap) < keepTapRadius) {
keep = true;
keepedMip = evalMipFromRadius(tap.z);
keepedMip = evalMipFromRadius(tap.z * doFetchMips());
}
vec3 Q = getOffsetPosition(side.xyz, ssC, tap, imageSize);
vec3 tapUVZ = fetchTap(side.xyz, ssC, tap, imageSize);
vec3 Q = evalEyePositionFromZeye(side.x, tapUVZ.z, tapUVZ.xy);
sum += float(tap.z > 0.0) * evalAO(Cp, Cn, Q);
}

View file

@ -44,15 +44,18 @@ void main(void) {
float ssDiskRadius = evalDiskRadius(Cp.z, imageSize);
// Let's make noise
//float randomPatternRotationAngle = getAngleDithering(ssC);
float randomPatternRotationAngle = getAngleDitheringWorldPos(Cp);
// float randomPatternRotationAngle = getAngleDithering(ssC);
vec3 wCp = (getViewInverse() * vec4(Cp, 1.0)).xyz;
float randomPatternRotationAngle = getAngleDitheringWorldPos(wCp);
// Accumulate the Obscurance for each samples
float sum = 0.0;
for (int i = 0; i < getNumSamples(); ++i) {
vec3 tap = getTapLocationClamped(i, randomPatternRotationAngle, ssDiskRadius, ssC, imageSize);
vec3 Q = getOffsetPosition(side.xyz, ssC, tap, imageSize);
vec3 tapUVZ = fetchTap(side.xyz, ssC, tap, imageSize);
vec3 Q = evalEyePositionFromZeye(side.x, tapUVZ.z, tapUVZ.xy);
sum += float(tap.z > 0.0) * evalAO(Cp, Cn, Q);
}

View file

@ -44,8 +44,8 @@ Column {
model: [
"resolutionLevel:resolutionLevel",
"ditheringEnabled:ditheringEnabled",
"borderingEnabled:borderingEnabled",
"fetchMipsEnabled:fetchMipsEnabled"
"fetchMipsEnabled:fetchMipsEnabled",
"borderingEnabled:borderingEnabled"
]
CheckBox {
text: qsTr(modelData.split(":")[0])

View file

@ -13,7 +13,7 @@ var qml = Script.resolvePath('ambientOcclusionPass.qml');
var window = new OverlayWindow({
title: 'Ambient Occlusion Pass',
source: qml,
width: 400, height: 200,
width: 400, height: 250,
});
window.setPosition(Window.innerWidth - 420, 50 + 550 + 50);
window.closed.connect(function() { Script.stop(); });