mirror of
https://github.com/overte-org/overte.git
synced 2025-04-10 01:15:06 +02:00
commit
54d2f42fa4
11 changed files with 68 additions and 41 deletions
|
@ -263,6 +263,12 @@ Item {
|
|||
}
|
||||
StatText {
|
||||
text: "GPU: " + root.gpuFrameTime.toFixed(1) + " ms"
|
||||
}
|
||||
StatText {
|
||||
text: "GPU (Per pixel): " + root.gpuFrameTimePerPixel.toFixed(5) + " ns/pp"
|
||||
}
|
||||
StatText {
|
||||
text: "GPU frame size: " + root.gpuFrameSize.x + " x " + root.gpuFrameSize.y
|
||||
}
|
||||
StatText {
|
||||
text: "Triangles: " + root.triangles +
|
||||
|
|
|
@ -333,7 +333,13 @@ void Stats::updateStats(bool force) {
|
|||
}
|
||||
|
||||
auto gpuContext = qApp->getGPUContext();
|
||||
|
||||
auto displayPlugin = qApp->getActiveDisplayPlugin();
|
||||
if (displayPlugin) {
|
||||
QVector2D dims(displayPlugin->getRecommendedRenderSize().x, displayPlugin->getRecommendedRenderSize().y);
|
||||
dims *= displayPlugin->getRenderResolutionScale();
|
||||
STAT_UPDATE(gpuFrameSize, dims);
|
||||
STAT_UPDATE(gpuFrameTimePerPixel, (float)(gpuContext->getFrameTimerGPUAverage()*1000000.0 / double(dims.x()*dims.y())));
|
||||
}
|
||||
// Update Frame timing (in ms)
|
||||
STAT_UPDATE(gpuFrameTime, (float)gpuContext->getFrameTimerGPUAverage());
|
||||
STAT_UPDATE(batchFrameTime, (float)gpuContext->getFrameTimerBatchAverage());
|
||||
|
|
|
@ -276,7 +276,9 @@ class Stats : public QQuickItem {
|
|||
STATS_PROPERTY(int, gpuTextureExternalMemory, 0)
|
||||
STATS_PROPERTY(QString, gpuTextureMemoryPressureState, QString())
|
||||
STATS_PROPERTY(int, gpuFreeMemory, 0)
|
||||
STATS_PROPERTY(QVector2D, gpuFrameSize, QVector2D(0,0))
|
||||
STATS_PROPERTY(float, gpuFrameTime, 0)
|
||||
STATS_PROPERTY(float, gpuFrameTimePerPixel, 0)
|
||||
STATS_PROPERTY(float, batchFrameTime, 0)
|
||||
STATS_PROPERTY(float, engineFrameTime, 0)
|
||||
STATS_PROPERTY(float, avatarSimulationTime, 0)
|
||||
|
@ -962,6 +964,20 @@ signals:
|
|||
*/
|
||||
void gpuFrameTimeChanged();
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the value of the <code>gpuFrameTime</code> property changes.
|
||||
* @function Stats.gpuFrameTimeChanged
|
||||
* @returns {Signal}
|
||||
*/
|
||||
void gpuFrameSizeChanged();
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the value of the <code>gpuFrameTime</code> property changes.
|
||||
* @function Stats.gpuFrameTimeChanged
|
||||
* @returns {Signal}
|
||||
*/
|
||||
void gpuFrameTimePerPixelChanged();
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the value of the <code>batchFrameTime</code> property changes.
|
||||
* @function Stats.batchFrameTimeChanged
|
||||
|
|
|
@ -177,9 +177,9 @@ void Haze::setHazeBaseReference(const float hazeBaseReference) {
|
|||
|
||||
void Haze::setHazeBackgroundBlend(const float hazeBackgroundBlend) {
|
||||
auto& params = _hazeParametersBuffer.get<Parameters>();
|
||||
|
||||
if (params.hazeBackgroundBlend != hazeBackgroundBlend) {
|
||||
_hazeParametersBuffer.edit<Parameters>().hazeBackgroundBlend = hazeBackgroundBlend;
|
||||
auto newBlend = 1.0f - hazeBackgroundBlend;
|
||||
if (params.hazeBackgroundBlend != newBlend) {
|
||||
_hazeParametersBuffer.edit<Parameters>().hazeBackgroundBlend = newBlend;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -235,15 +235,14 @@ vec3 evalGlobalLightingAlphaBlendedWithHaze(
|
|||
|
||||
// Haze
|
||||
if ((isHazeEnabled() > 0.0) && (hazeParams.hazeMode & HAZE_MODE_IS_ACTIVE) == HAZE_MODE_IS_ACTIVE) {
|
||||
vec4 colorV4 = computeHazeColor(
|
||||
vec4(color, 1.0), // fragment original color
|
||||
vec4 hazeColor = computeHazeColor(
|
||||
positionES, // fragment position in eye coordinates
|
||||
fragPositionWS, // fragment position in world coordinates
|
||||
invViewMat[3].xyz, // eye position in world coordinates
|
||||
lightDirection // keylight direction vector in world coordinates
|
||||
);
|
||||
|
||||
color = colorV4.rgb;
|
||||
color = mix(color.rgb, hazeColor.rgb, hazeColor.a);
|
||||
}
|
||||
|
||||
return color;
|
||||
|
@ -273,15 +272,14 @@ vec3 evalGlobalLightingAlphaBlendedWithHaze(
|
|||
|
||||
// Haze
|
||||
if ((isHazeEnabled() > 0.0) && (hazeParams.hazeMode & HAZE_MODE_IS_ACTIVE) == HAZE_MODE_IS_ACTIVE) {
|
||||
vec4 colorV4 = computeHazeColor(
|
||||
vec4(color, 1.0), // fragment original color
|
||||
vec4 hazeColor = computeHazeColor(
|
||||
positionES, // fragment position in eye coordinates
|
||||
positionWS, // fragment position in world coordinates
|
||||
invViewMat[3].xyz, // eye position in world coordinates
|
||||
lightDirection // keylight direction vector
|
||||
);
|
||||
|
||||
color = colorV4.rgb;
|
||||
color = mix(color.rgb, hazeColor.rgb, hazeColor.a);
|
||||
}
|
||||
|
||||
return color;
|
||||
|
|
|
@ -107,11 +107,11 @@ void MakeHaze::run(const render::RenderContextPointer& renderContext, graphics::
|
|||
haze = _haze;
|
||||
}
|
||||
|
||||
// Buffer slots
|
||||
const int HazeEffect_ParamsSlot = 0;
|
||||
const int HazeEffect_TransformBufferSlot = 1;
|
||||
const int HazeEffect_ColorMapSlot = 2;
|
||||
const int HazeEffect_LinearDepthMapSlot = 3;
|
||||
const int HazeEffect_LightingMapSlot = 4;
|
||||
// Texture slots
|
||||
const int HazeEffect_LinearDepthMapSlot = 0;
|
||||
|
||||
void DrawHaze::configure(const Config& config) {
|
||||
}
|
||||
|
@ -122,11 +122,10 @@ void DrawHaze::run(const render::RenderContextPointer& renderContext, const Inpu
|
|||
return;
|
||||
}
|
||||
|
||||
const auto inputBuffer = inputs.get1()->getRenderBuffer(0);
|
||||
const auto outputBuffer = inputs.get1();
|
||||
const auto framebuffer = inputs.get2();
|
||||
const auto transformBuffer = inputs.get3();
|
||||
|
||||
auto outputBuffer = inputs.get4();
|
||||
const auto lightingModel = inputs.get4();
|
||||
|
||||
auto depthBuffer = framebuffer->getLinearDepthTexture();
|
||||
|
||||
|
@ -139,6 +138,10 @@ void DrawHaze::run(const render::RenderContextPointer& renderContext, const Inpu
|
|||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
state->setBlendFunction(true,
|
||||
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
|
||||
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
|
||||
|
||||
// Mask out haze on the tablet
|
||||
PrepareStencil::testMask(*state);
|
||||
|
||||
|
@ -148,15 +151,15 @@ void DrawHaze::run(const render::RenderContextPointer& renderContext, const Inpu
|
|||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("hazeBuffer"), HazeEffect_ParamsSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("deferredFrameTransformBuffer"), HazeEffect_TransformBufferSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("colorMap"), HazeEffect_ColorMapSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("lightingModelBuffer"), render::ShapePipeline::Slot::LIGHTING_MODEL));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("linearDepthMap"), HazeEffect_LinearDepthMapSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("keyLightBuffer"), HazeEffect_LightingMapSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("keyLightBuffer"), render::ShapePipeline::Slot::KEY_LIGHT));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
auto sourceFramebufferSize = glm::ivec2(inputBuffer->getDimensions());
|
||||
auto outputFramebufferSize = glm::ivec2(outputBuffer->getSize());
|
||||
|
||||
gpu::doInBatch("DrawHaze::run", args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
@ -165,7 +168,7 @@ void DrawHaze::run(const render::RenderContextPointer& renderContext, const Inpu
|
|||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.resetViewTransform();
|
||||
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(sourceFramebufferSize, args->_viewport));
|
||||
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(outputFramebufferSize, args->_viewport));
|
||||
|
||||
batch.setPipeline(_hazePipeline);
|
||||
|
||||
|
@ -181,17 +184,17 @@ void DrawHaze::run(const render::RenderContextPointer& renderContext, const Inpu
|
|||
}
|
||||
|
||||
batch.setUniformBuffer(HazeEffect_TransformBufferSlot, transformBuffer->getFrameTransformBuffer());
|
||||
batch.setUniformBuffer(render::ShapePipeline::Slot::LIGHTING_MODEL, lightingModel->getParametersBuffer());
|
||||
|
||||
auto lightStage = args->_scene->getStage<LightStage>();
|
||||
if (lightStage) {
|
||||
graphics::LightPointer keyLight;
|
||||
keyLight = lightStage->getCurrentKeyLight();
|
||||
if (keyLight) {
|
||||
batch.setUniformBuffer(HazeEffect_LightingMapSlot, keyLight->getLightSchemaBuffer());
|
||||
batch.setUniformBuffer(render::ShapePipeline::Slot::KEY_LIGHT, keyLight->getLightSchemaBuffer());
|
||||
}
|
||||
}
|
||||
|
||||
batch.setResourceTexture(HazeEffect_ColorMapSlot, inputBuffer);
|
||||
batch.setResourceTexture(HazeEffect_LinearDepthMapSlot, depthBuffer);
|
||||
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
#include <graphics/Haze.h>
|
||||
|
||||
#include "SurfaceGeometryPass.h"
|
||||
#include "LightingModel.h"
|
||||
|
||||
using LinearDepthFramebufferPointer = std::shared_ptr<LinearDepthFramebuffer>;
|
||||
|
||||
|
@ -159,7 +160,7 @@ public:
|
|||
|
||||
class DrawHaze {
|
||||
public:
|
||||
using Inputs = render::VaryingSet5<graphics::HazePointer, gpu::FramebufferPointer, LinearDepthFramebufferPointer, DeferredFrameTransformPointer, gpu::FramebufferPointer>;
|
||||
using Inputs = render::VaryingSet5<graphics::HazePointer, gpu::FramebufferPointer, LinearDepthFramebufferPointer, DeferredFrameTransformPointer, LightingModelPointer>;
|
||||
using Config = HazeConfig;
|
||||
using JobModel = render::Job::ModelI<DrawHaze, Inputs, Config>;
|
||||
|
||||
|
|
|
@ -228,15 +228,14 @@ vec3 evalGlobalLightingAlphaBlendedWithHaze(
|
|||
// Haze
|
||||
// FIXME - temporarily removed until we support it for forward...
|
||||
/* if ((hazeParams.hazeMode & HAZE_MODE_IS_ACTIVE) == HAZE_MODE_IS_ACTIVE) {
|
||||
vec4 colorV4 = computeHazeColor(
|
||||
vec4(color, 1.0), // fragment original color
|
||||
vec4 hazeColor = computeHazeColor(
|
||||
positionES, // fragment position in eye coordinates
|
||||
fragPositionWS, // fragment position in world coordinates
|
||||
invViewMat[3].xyz, // eye position in world coordinates
|
||||
lightDirection // keylight direction vector
|
||||
);
|
||||
|
||||
color = colorV4.rgb;
|
||||
color = mix(color.rgb, hazeColor.rgb, hazeColor.a);
|
||||
}*/
|
||||
|
||||
return color;
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
|
||||
<@include Haze.slh@>
|
||||
|
||||
uniform sampler2D colorMap;
|
||||
uniform sampler2D linearDepthMap;
|
||||
|
||||
vec4 unpackPositionFromZeye(vec2 texcoord) {
|
||||
|
@ -46,7 +45,6 @@ void main(void) {
|
|||
discard;
|
||||
}
|
||||
|
||||
vec4 fragColor = texture(colorMap, varTexCoord0);
|
||||
vec4 fragPositionES = unpackPositionFromZeye(varTexCoord0);
|
||||
|
||||
mat4 viewInverse = getViewInverse();
|
||||
|
@ -56,5 +54,8 @@ void main(void) {
|
|||
Light light = getKeyLight();
|
||||
vec3 lightDirectionWS = getLightDirection(light);
|
||||
|
||||
outFragColor = computeHazeColor(fragColor, fragPositionES.xyz, fragPositionWS.xyz, eyePositionWS.xyz, lightDirectionWS);
|
||||
outFragColor = computeHazeColor(fragPositionES.xyz, fragPositionWS.xyz, eyePositionWS.xyz, lightDirectionWS);
|
||||
if (outFragColor.a < 1e-4) {
|
||||
discard;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,22 +92,21 @@ vec3 computeHazeColorKeyLightAttenuation(vec3 color, vec3 lightDirectionWS, vec3
|
|||
}
|
||||
|
||||
// Input:
|
||||
// fragColor - fragment original color
|
||||
// fragPositionES - fragment position in eye coordinates
|
||||
// fragPositionWS - fragment position in world coordinates
|
||||
// eyePositionWS - eye position in world coordinates
|
||||
// Output:
|
||||
// fragment colour after haze effect
|
||||
// haze colour and alpha contains haze blend factor
|
||||
//
|
||||
// General algorithm taken from http://www.iquilezles.org/www/articles/fog/fog.htm, with permission
|
||||
//
|
||||
vec4 computeHazeColor(vec4 fragColor, vec3 fragPositionES, vec3 fragPositionWS, vec3 eyePositionWS, vec3 lightDirectionWS) {
|
||||
vec4 computeHazeColor(vec3 fragPositionES, vec3 fragPositionWS, vec3 eyePositionWS, vec3 lightDirectionWS) {
|
||||
// Distance to fragment
|
||||
float distance = length(fragPositionES);
|
||||
float eyeWorldHeight = eyePositionWS.y;
|
||||
|
||||
// Convert haze colour from uniform into a vec4
|
||||
vec4 hazeColor = vec4(hazeParams.hazeColor, 1.0);
|
||||
vec4 hazeColor = vec4(hazeParams.hazeColor, 1.0);
|
||||
|
||||
// Use the haze colour for the glare colour, if blend is not enabled
|
||||
vec4 blendedHazeColor;
|
||||
|
@ -149,13 +148,13 @@ vec4 computeHazeColor(vec4 fragColor, vec3 fragPositionES, vec3 fragPositionWS,
|
|||
vec3 hazeAmount = 1.0 - exp(-hazeIntegral);
|
||||
|
||||
// Compute color after haze effect
|
||||
potentialFragColor = mix(fragColor, vec4(1.0, 1.0, 1.0, 1.0), vec4(hazeAmount, 1.0));
|
||||
potentialFragColor = vec4(1.0, 1.0, 1.0, hazeAmount);
|
||||
} else if ((hazeParams.hazeMode & HAZE_MODE_IS_ALTITUDE_BASED) != HAZE_MODE_IS_ALTITUDE_BASED) {
|
||||
// Haze is based only on range
|
||||
float hazeAmount = 1.0 - exp(-distance * hazeParams.hazeRangeFactor);
|
||||
|
||||
// Compute color after haze effect
|
||||
potentialFragColor = mix(fragColor, blendedHazeColor, hazeAmount);
|
||||
potentialFragColor = vec4(blendedHazeColor.rgb, hazeAmount);
|
||||
} else {
|
||||
// Haze is based on both range and altitude
|
||||
// Taken from www.crytek.com/download/GDC2007_RealtimeAtmoFxInGamesRev.ppt
|
||||
|
@ -181,16 +180,14 @@ vec4 computeHazeColor(vec4 fragColor, vec3 fragPositionES, vec3 fragPositionWS,
|
|||
float hazeAmount = 1.0 - exp(-hazeIntegral);
|
||||
|
||||
// Compute color after haze effect
|
||||
potentialFragColor = mix(fragColor, blendedHazeColor, hazeAmount);
|
||||
potentialFragColor = vec4(blendedHazeColor.rgb, hazeAmount);
|
||||
}
|
||||
|
||||
// Mix with background at far range
|
||||
const float BLEND_DISTANCE = 27000.0f;
|
||||
vec4 outFragColor;
|
||||
vec4 outFragColor = potentialFragColor;
|
||||
if (distance > BLEND_DISTANCE) {
|
||||
outFragColor = mix(potentialFragColor, fragColor, hazeParams.backgroundBlend);
|
||||
} else {
|
||||
outFragColor = potentialFragColor;
|
||||
outFragColor.a *= hazeParams.backgroundBlend;
|
||||
}
|
||||
|
||||
return outFragColor;
|
||||
|
|
|
@ -174,7 +174,7 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren
|
|||
// Similar to light stage, background stage has been filled by several potential render items and resolved for the frame in this job
|
||||
task.addJob<DrawBackgroundStage>("DrawBackgroundDeferred", lightingModel);
|
||||
|
||||
const auto drawHazeInputs = render::Varying(DrawHaze::Inputs(hazeModel, lightingFramebuffer, linearDepthTarget, deferredFrameTransform, lightingFramebuffer));
|
||||
const auto drawHazeInputs = render::Varying(DrawHaze::Inputs(hazeModel, lightingFramebuffer, linearDepthTarget, deferredFrameTransform, lightingModel));
|
||||
task.addJob<DrawHaze>("DrawHazeDeferred", drawHazeInputs);
|
||||
|
||||
// Render transparent objects forward in LightingBuffer
|
||||
|
|
Loading…
Reference in a new issue