mirror of
https://github.com/overte-org/overte.git
synced 2025-04-14 07:47:30 +02:00
Many improvments
This commit is contained in:
parent
a307c826ae
commit
28886f6ec3
16 changed files with 206 additions and 44 deletions
|
@ -48,6 +48,8 @@ enum Slot {
|
|||
Shadow,
|
||||
Pyramid,
|
||||
Curvature,
|
||||
DiffusedCurvature,
|
||||
Scattering,
|
||||
AmbientOcclusion,
|
||||
AmbientOcclusionBlurred
|
||||
};
|
||||
|
@ -147,6 +149,21 @@ static const std::string DEFAULT_CURVATURE_SHADER{
|
|||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_NORMAL_CURVATURE_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
//" return vec4(pow(vec3(texture(curvatureMap, uv).a), vec3(1.0 / 2.2)), 1.0);"
|
||||
" return vec4(pow(vec3(texture(curvatureMap, uv).xyz), vec3(1.0 / 2.2)), 1.0);"
|
||||
//" return vec4(vec3(1.0 - textureLod(pyramidMap, uv, 3).x * 0.01), 1.0);"
|
||||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_SCATTERING_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
// " return vec4(pow(vec3(texture(scatteringMap, uv).xyz), vec3(1.0 / 2.2)), 1.0);"
|
||||
" return vec4(vec3(texture(scatteringMap, uv).xyz), 1.0);"
|
||||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_AMBIENT_OCCLUSION_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
" return vec4(vec3(texture(obscuranceMap, uv).x), 1.0);"
|
||||
|
@ -214,6 +231,10 @@ std::string DebugDeferredBuffer::getShaderSourceCode(Mode mode, std::string cust
|
|||
return DEFAULT_PYRAMID_DEPTH_SHADER;
|
||||
case CurvatureMode:
|
||||
return DEFAULT_CURVATURE_SHADER;
|
||||
case NormalCurvatureMode:
|
||||
return DEFAULT_NORMAL_CURVATURE_SHADER;
|
||||
case ScatteringMode:
|
||||
return DEFAULT_SCATTERING_SHADER;
|
||||
case AmbientOcclusionMode:
|
||||
return DEFAULT_AMBIENT_OCCLUSION_SHADER;
|
||||
case AmbientOcclusionBlurredMode:
|
||||
|
@ -269,6 +290,8 @@ const gpu::PipelinePointer& DebugDeferredBuffer::getPipeline(Mode mode, std::str
|
|||
slotBindings.insert(gpu::Shader::Binding("shadowMap", Shadow));
|
||||
slotBindings.insert(gpu::Shader::Binding("pyramidMap", Pyramid));
|
||||
slotBindings.insert(gpu::Shader::Binding("curvatureMap", Curvature));
|
||||
slotBindings.insert(gpu::Shader::Binding("diffusedCurvatureMap", DiffusedCurvature));
|
||||
slotBindings.insert(gpu::Shader::Binding("scatteringMap", Scattering));
|
||||
slotBindings.insert(gpu::Shader::Binding("occlusionBlurredMap", AmbientOcclusionBlurred));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
|
@ -294,11 +317,13 @@ void DebugDeferredBuffer::configure(const Config& config) {
|
|||
_size = config.size;
|
||||
}
|
||||
|
||||
void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const gpu::FramebufferPointer& inputBuffer) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
auto& scatteringFramebuffer = inputBuffer;
|
||||
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
@ -329,6 +354,8 @@ void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const Ren
|
|||
batch.setResourceTexture(Shadow, lightStage.lights[0]->shadow.framebuffer->getDepthStencilBuffer());
|
||||
batch.setResourceTexture(Pyramid, framebufferCache->getDepthPyramidTexture());
|
||||
batch.setResourceTexture(Curvature, framebufferCache->getCurvatureTexture());
|
||||
//batch.setResourceTexture(DiffusedCurvature, diffusedCurvatureBuffer);
|
||||
batch.setResourceTexture(Scattering, scatteringFramebuffer->getRenderBuffer(0));
|
||||
if (DependencyManager::get<DeferredLightingEffect>()->isAmbientOcclusionEnabled()) {
|
||||
batch.setResourceTexture(AmbientOcclusion, framebufferCache->getOcclusionTexture());
|
||||
} else {
|
||||
|
|
|
@ -35,12 +35,12 @@ signals:
|
|||
class DebugDeferredBuffer {
|
||||
public:
|
||||
using Config = DebugDeferredBufferConfig;
|
||||
using JobModel = render::Job::Model<DebugDeferredBuffer, Config>;
|
||||
using JobModel = render::Job::ModelI<DebugDeferredBuffer, gpu::FramebufferPointer, Config>;
|
||||
|
||||
DebugDeferredBuffer();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const gpu::FramebufferPointer& inputBuffer);
|
||||
|
||||
protected:
|
||||
friend class DebugDeferredBufferConfig;
|
||||
|
@ -60,6 +60,8 @@ protected:
|
|||
ShadowMode,
|
||||
PyramidDepthMode,
|
||||
CurvatureMode,
|
||||
NormalCurvatureMode,
|
||||
ScatteringMode,
|
||||
AmbientOcclusionMode,
|
||||
AmbientOcclusionBlurredMode,
|
||||
CustomMode // Needs to stay last
|
||||
|
|
|
@ -50,6 +50,9 @@ public:
|
|||
gpu::FramebufferPointer getCurvatureFramebuffer();
|
||||
gpu::TexturePointer getCurvatureTexture();
|
||||
|
||||
gpu::FramebufferPointer getScatteringFramebuffer();
|
||||
gpu::TexturePointer getScatteringTexture();
|
||||
|
||||
void setAmbientOcclusionResolutionLevel(int level);
|
||||
gpu::FramebufferPointer getOcclusionFramebuffer();
|
||||
gpu::TexturePointer getOcclusionTexture();
|
||||
|
|
|
@ -122,9 +122,12 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
|||
// Draw Lights just add the lights to the current list of lights to deal with. NOt really gpu job for now.
|
||||
addJob<DrawLight>("DrawLight", lights);
|
||||
|
||||
const auto scatteringFramebuffer = addJob<SubsurfaceScattering>("Scattering", deferredFrameTransform);
|
||||
|
||||
// DeferredBuffer is complete, now let's shade it into the LightingBuffer
|
||||
addJob<RenderDeferred>("RenderDeferred");
|
||||
|
||||
|
||||
// AA job to be revisited
|
||||
addJob<Antialiasing>("Antialiasing");
|
||||
|
||||
|
@ -138,12 +141,11 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
|||
addJob<DrawOverlay3D>("DrawOverlay3DOpaque", overlayOpaques, true);
|
||||
addJob<DrawOverlay3D>("DrawOverlay3DTransparent", overlayTransparents, false);
|
||||
|
||||
addJob<SubsurfaceScattering>("Scattering", deferredFrameTransform);
|
||||
|
||||
|
||||
// Debugging stages
|
||||
{
|
||||
// Debugging Deferred buffer job
|
||||
addJob<DebugDeferredBuffer>("DebugDeferredBuffer");
|
||||
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", scatteringFramebuffer);
|
||||
|
||||
// Scene Octree Debuging job
|
||||
{
|
||||
|
|
|
@ -34,6 +34,8 @@ void SubsurfaceScattering::configure(const Config& config) {
|
|||
if (config.depthThreshold != getCurvatureDepthThreshold()) {
|
||||
_parametersBuffer.edit<Parameters>().curvatureInfo.x = config.depthThreshold;
|
||||
}
|
||||
|
||||
_showLUT = config.showLUT;
|
||||
}
|
||||
|
||||
|
||||
|
@ -66,7 +68,65 @@ gpu::PipelinePointer SubsurfaceScattering::getScatteringPipeline() {
|
|||
}
|
||||
|
||||
|
||||
void SubsurfaceScattering::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, gpu::FramebufferPointer& curvatureFramebuffer) {
|
||||
gpu::PipelinePointer _showLUTPipeline;
|
||||
gpu::PipelinePointer getShowLUTPipeline();
|
||||
gpu::PipelinePointer SubsurfaceScattering::getShowLUTPipeline() {
|
||||
if (!_showLUTPipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::StandardShaderLib::getDrawTextureOpaquePS();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
// slotBindings.insert(gpu::Shader::Binding(std::string("deferredFrameTransformBuffer"), SubsurfaceScattering_FrameTransformSlot));
|
||||
// slotBindings.insert(gpu::Shader::Binding(std::string("sourceMap"), BlurTask_SourceSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
// Stencil test the curvature pass for objects pixels only, not the background
|
||||
// state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::NOT_EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
|
||||
|
||||
_showLUTPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _showLUTPipeline;
|
||||
}
|
||||
|
||||
bool SubsurfaceScattering::updateScatteringFramebuffer(const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& scatteringFramebuffer) {
|
||||
if (!sourceFramebuffer) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!_scatteringFramebuffer) {
|
||||
_scatteringFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
|
||||
// attach depthStencil if present in source
|
||||
if (sourceFramebuffer->hasDepthStencil()) {
|
||||
_scatteringFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat());
|
||||
}
|
||||
auto blurringSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT);
|
||||
auto blurringTarget = gpu::TexturePointer(gpu::Texture::create2D(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), blurringSampler));
|
||||
_scatteringFramebuffer->setRenderBuffer(0, blurringTarget);
|
||||
} else {
|
||||
// it would be easier to just call resize on the bluredFramebuffer and let it work if needed but the source might loose it's depth buffer when doing so
|
||||
if ((_scatteringFramebuffer->getWidth() != sourceFramebuffer->getWidth()) || (_scatteringFramebuffer->getHeight() != sourceFramebuffer->getHeight())) {
|
||||
_scatteringFramebuffer->resize(sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), sourceFramebuffer->getNumSamples());
|
||||
if (sourceFramebuffer->hasDepthStencil()) {
|
||||
_scatteringFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!scatteringFramebuffer) {
|
||||
scatteringFramebuffer = _scatteringFramebuffer;
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
|
||||
void SubsurfaceScattering::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, gpu::FramebufferPointer& scatteringFramebuffer) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
|
||||
|
@ -76,16 +136,21 @@ void SubsurfaceScattering::run(const render::SceneContextPointer& sceneContext,
|
|||
_scatteringTable = SubsurfaceScattering::generatePreIntegratedScattering(args);
|
||||
}
|
||||
|
||||
|
||||
auto pipeline = getScatteringPipeline();
|
||||
|
||||
auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
|
||||
// if (curvatureFramebuffer->getRenderBuffer(0))
|
||||
|
||||
if (!updateScatteringFramebuffer(framebufferCache->getCurvatureFramebuffer(), scatteringFramebuffer)) {
|
||||
return;
|
||||
}
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setViewportTransform(args->_viewport >> 1);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
||||
batch.setFramebuffer(_scatteringFramebuffer);
|
||||
|
||||
batch.setUniformBuffer(SubsurfaceScattering_FrameTransformSlot, frameTransform->getFrameTransformBuffer());
|
||||
|
||||
|
@ -94,6 +159,14 @@ void SubsurfaceScattering::run(const render::SceneContextPointer& sceneContext,
|
|||
batch.setResourceTexture(SubsurfaceScattering_CurvatureMapSlot, framebufferCache->getCurvatureTexture());
|
||||
batch.setResourceTexture(SubsurfaceScattering_ScatteringTableSlot, _scatteringTable);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
if (_showLUT) {
|
||||
auto viewportSize = std::min(args->_viewport.z, args->_viewport.w) >> 1;
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, viewportSize, viewportSize));
|
||||
batch.setPipeline(getShowLUTPipeline());
|
||||
batch.setResourceTexture(0, _scatteringTable);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -149,7 +222,7 @@ vec3 integrate(double cosTheta, double skinRadius) {
|
|||
|
||||
double a = -(_PI);
|
||||
|
||||
double inc = 0.01;
|
||||
double inc = 0.005;
|
||||
|
||||
while (a <= (_PI)) {
|
||||
double sampleAngle = theta + a;
|
||||
|
@ -306,7 +379,7 @@ gpu::TexturePointer SubsurfaceScattering::generatePreIntegratedScattering(Render
|
|||
|
||||
const int WIDTH = 128;
|
||||
const int HEIGHT = 128;
|
||||
auto scatteringLUT = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, WIDTH, HEIGHT));
|
||||
auto scatteringLUT = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, WIDTH, HEIGHT, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
diffuseScatter(scatteringLUT);
|
||||
//diffuseScatterGPU(profileMap, scatteringLUT, args);
|
||||
return scatteringLUT;
|
||||
|
|
|
@ -20,10 +20,12 @@
|
|||
class SubsurfaceScatteringConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(float depthThreshold MEMBER depthThreshold NOTIFY dirty)
|
||||
Q_PROPERTY(bool showLUT MEMBER showLUT NOTIFY dirty)
|
||||
public:
|
||||
SubsurfaceScatteringConfig() : render::Job::Config(true) {}
|
||||
|
||||
float depthThreshold{ 0.1f };
|
||||
bool showLUT{ true };
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
|
@ -37,7 +39,7 @@ public:
|
|||
SubsurfaceScattering();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, gpu::FramebufferPointer& curvatureFramebuffer);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, gpu::FramebufferPointer& scatteringFramebuffer);
|
||||
|
||||
float getCurvatureDepthThreshold() const { return _parametersBuffer.get<Parameters>().curvatureInfo.x; }
|
||||
|
||||
|
@ -63,9 +65,16 @@ private:
|
|||
gpu::TexturePointer _scatteringTable;
|
||||
|
||||
|
||||
gpu::PipelinePointer _scatteringPipeline;
|
||||
bool updateScatteringFramebuffer(const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& scatteringFramebuffer);
|
||||
gpu::FramebufferPointer _scatteringFramebuffer;
|
||||
|
||||
|
||||
gpu::PipelinePointer _scatteringPipeline;
|
||||
gpu::PipelinePointer getScatteringPipeline();
|
||||
|
||||
gpu::PipelinePointer _showLUTPipeline;
|
||||
gpu::PipelinePointer getShowLUTPipeline();
|
||||
bool _showLUT{ false };
|
||||
};
|
||||
|
||||
#endif // hifi_SubsurfaceScattering_h
|
||||
|
|
|
@ -28,7 +28,7 @@ public:
|
|||
|
||||
float depthThreshold{ 0.1f };
|
||||
float basisScale{ 1.0f };
|
||||
float curvatureScale{ 100.0f };
|
||||
float curvatureScale{ 1.0f }; // Mean curvature value scaling (SI SI Dimension is [1/meters])
|
||||
|
||||
double getGpuTime() { return gpuTime; }
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ uniform sampler2D pyramidMap;
|
|||
uniform sampler2D occlusionMap;
|
||||
uniform sampler2D occlusionBlurredMap;
|
||||
uniform sampler2D curvatureMap;
|
||||
uniform sampler2D scatteringMap;
|
||||
|
||||
in vec2 uv;
|
||||
out vec4 outFragColor;
|
||||
|
|
|
@ -65,8 +65,8 @@ vec4 fetchCurvature(vec2 texcoord) {
|
|||
|
||||
uniform sampler2D scatteringLUT;
|
||||
|
||||
vec3 fetchBRDF(float curvature, float LdotN) {
|
||||
return texture(scatteringLUT, vec2(curvature, LdotN)).xyz;
|
||||
vec3 fetchBRDF(float LdotN, float curvature) {
|
||||
return texture(scatteringLUT, vec2( LdotN * 0.5 + 0.5, curvature)).xyz;
|
||||
}
|
||||
|
||||
// Scattering parameters
|
||||
|
@ -93,9 +93,10 @@ void main(void) {
|
|||
float curvature = abs(diffusedCurvature.w * 2 - 1) * 0.5f * scatterCurve + scatterBase;
|
||||
|
||||
// --> Calculate the light vector.
|
||||
vec3 lightVector = normalize(vec3(-1.0f, -1.0f, -1.0f)); //normalize(lightPos - sourcePos.xyz);
|
||||
vec3 lightVector = normalize(vec3(1.0f, 1.0f, 1.0f)); //normalize(lightPos - sourcePos.xyz);
|
||||
|
||||
|
||||
_fragColor = vec4(fetchBRDF(dot(bentNormalR, lightVector), abs(diffusedCurvature.w * 2 - 1)), 1.0);
|
||||
/*
|
||||
// --> Optimise for skin diffusion profile.
|
||||
float diffuseBlendedR = dot(normalize(mix( bentNormalN.xyz, bentNormalN, normalBendR * normalBendFactor)), lightVector);
|
||||
float diffuseBlendedG = dot(normalize(mix(normal.xyz, bentNormalN, normalBendG * normalBendFactor)), lightVector);
|
||||
|
@ -108,7 +109,7 @@ void main(void) {
|
|||
vec3 bdrfB = fetchBRDF(diffuseBlendedB, curvature);
|
||||
vec3 bdrf = vec3( bdrfR.x, bdrfG.y, bdrfB.z);
|
||||
bdrf *= bdrf;
|
||||
_fragColor = vec4(vec3(bdrf.xyz), 1.0);
|
||||
_fragColor = vec4(vec3(bdrf.xyz), 1.0);*/
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -126,7 +126,6 @@ void main(void) {
|
|||
|
||||
// Calculate the perspective scale.
|
||||
float perspectiveScale = (-getProjScaleEye() / Zeye);
|
||||
float pixPerspectiveScaleInv = 1.0 / (perspectiveScale);
|
||||
|
||||
vec2 viewportScale = perspectiveScale * getInvWidthHeight();
|
||||
|
||||
|
@ -150,8 +149,21 @@ void main(void) {
|
|||
vec4 py = vec4(worldPos, 1.0) + vec4(0.0f, dist, 0.0f, 0.0f);
|
||||
vec4 pz = vec4(worldPos, 1.0) + vec4(0.0f, 0.0f, dist, 0.0f);
|
||||
|
||||
px = frameTransform._view * px;
|
||||
py = frameTransform._view * py;
|
||||
pz = frameTransform._view * pz;
|
||||
|
||||
/*
|
||||
if (texcoordPos.y > 0.5) {
|
||||
outFragColor = vec4(fract(px.xyz), 1.0);
|
||||
} else {
|
||||
outFragColor = vec4(fract(eyePos.xyz), 1.0);
|
||||
}
|
||||
return;
|
||||
*/
|
||||
|
||||
// Project px, py pz to homogeneous clip space
|
||||
mat4 viewProj = getProjection(stereoSide.x) * frameTransform._view;
|
||||
mat4 viewProj = getProjection(stereoSide.x);
|
||||
px = viewProj * px;
|
||||
py = viewProj * py;
|
||||
pz = viewProj * pz;
|
||||
|
@ -162,7 +174,17 @@ void main(void) {
|
|||
pz.xy /= pz.w;
|
||||
|
||||
vec2 nclipPos = (texcoordPos - 0.5) * 2.0;
|
||||
/*
|
||||
if (texcoordPos.y > 0.5) {
|
||||
outFragColor = vec4(px.xy * 0.5 + 0.5, 0.0, 1.0);
|
||||
} else {
|
||||
outFragColor = vec4(nclipPos * 0.5 + 0.5, 0.0, 1.0);
|
||||
}
|
||||
return;
|
||||
*/
|
||||
|
||||
float pixPerspectiveScaleInv = 1.0 / (perspectiveScale);
|
||||
//vec2 pixPerspectiveScaleInv = 1.0 / viewportScale;
|
||||
px.xy = (px.xy - nclipPos) * pixPerspectiveScaleInv;
|
||||
py.xy = (py.xy - nclipPos) * pixPerspectiveScaleInv;
|
||||
pz.xy = (pz.xy - nclipPos) * pixPerspectiveScaleInv;
|
||||
|
@ -174,6 +196,6 @@ void main(void) {
|
|||
|
||||
// Calculate the mean curvature
|
||||
float meanCurvature = ((dFdx.x + dFdy.y + dFdz.z) * 0.33333333333333333) * params.curvatureInfo.w;
|
||||
|
||||
outFragColor = vec4(vec3(worldNormal + 1.0) * 0.5, (meanCurvature + 1.0) * 0.5);
|
||||
// outFragColor = vec4((vec3(dFdx.x, dFdy.y, dFdz.z) * params.curvatureInfo.w + 1.0) * 0.5, (meanCurvature + 1.0) * 0.5);
|
||||
}
|
||||
|
|
|
@ -279,8 +279,8 @@ void BlurGaussianDepthAware::run(const SceneContextPointer& sceneContext, const
|
|||
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
auto& sourceFramebuffer = SourceAndDepth.first.template get<gpu::FramebufferPointer>();
|
||||
auto& depthTexture = SourceAndDepth.first.template get<gpu::TexturePointer>();
|
||||
auto& sourceFramebuffer = SourceAndDepth.getFirst();
|
||||
auto& depthTexture = SourceAndDepth.getSecond();
|
||||
|
||||
BlurringResources blurringResources;
|
||||
if (!updateBlurringResources(sourceFramebuffer, blurringResources)) {
|
||||
|
|
|
@ -89,15 +89,6 @@ protected:
|
|||
bool updateBlurringResources(const gpu::FramebufferPointer& sourceFramebuffer, BlurringResources& blurringResources);
|
||||
};
|
||||
|
||||
|
||||
template < class T0, class T1 >
|
||||
class VaryingPair : public std::pair<Varying, Varying> {
|
||||
public:
|
||||
using Parent = std::pair<Varying, Varying>;
|
||||
|
||||
VaryingPair() : Parent(Varying(T0()), T1()) {}
|
||||
};
|
||||
|
||||
class BlurGaussianDepthAware {
|
||||
public:
|
||||
using InputPair = VaryingPair<gpu::FramebufferPointer, gpu::TexturePointer>;
|
||||
|
|
|
@ -140,16 +140,6 @@ namespace render {
|
|||
int getNumItems() { return numItems; }
|
||||
};
|
||||
|
||||
template < class T, int NUM >
|
||||
class VaryingArray : public std::array<Varying, NUM> {
|
||||
public:
|
||||
VaryingArray() {
|
||||
for (size_t i = 0; i < NUM; i++) {
|
||||
(*this)[i] = Varying(T());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template <int NUM_FILTERS>
|
||||
class MultiFilterItem {
|
||||
public:
|
||||
|
|
|
@ -57,6 +57,31 @@ protected:
|
|||
std::shared_ptr<Concept> _concept;
|
||||
};
|
||||
|
||||
|
||||
template < class T0, class T1 >
|
||||
class VaryingPair : public std::pair<Varying, Varying> {
|
||||
public:
|
||||
using Parent = std::pair<Varying, Varying>;
|
||||
|
||||
VaryingPair() : Parent(Varying(T0()), T1()) {}
|
||||
|
||||
const T0& getFirst() const { return first.get<T0>(); }
|
||||
T0& editFirst() { return first.edit<T0>(); }
|
||||
|
||||
const T1& getSecond() const { return second.get<T1>(); }
|
||||
T1& editSecond() { return second.edit<T1>(); }
|
||||
};
|
||||
|
||||
template < class T, int NUM >
|
||||
class VaryingArray : public std::array<Varying, NUM> {
|
||||
public:
|
||||
VaryingArray() {
|
||||
for (size_t i = 0; i < NUM; i++) {
|
||||
(*this)[i] = Varying(T());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class Job;
|
||||
class Task;
|
||||
class JobNoIO {};
|
||||
|
|
|
@ -48,6 +48,8 @@ Column {
|
|||
"Shadow",
|
||||
"Pyramid Depth",
|
||||
"Curvature",
|
||||
"NormalCurvature",
|
||||
"Scattering",
|
||||
"Ambient Occlusion",
|
||||
"Ambient Occlusion Blurred",
|
||||
"Custom Shader"
|
||||
|
|
|
@ -29,5 +29,19 @@ Column {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
Column{
|
||||
Repeater {
|
||||
model: [ "Blur Scale:filterScale:4.0" ]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: false
|
||||
config: Render.getConfig("DiffuseCurvature")
|
||||
property: modelData.split(":")[1]
|
||||
max: modelData.split(":")[2]
|
||||
min: 0.0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue