mirror of
https://github.com/lubosz/overte.git
synced 2025-04-19 16:44:04 +02:00
commit
70e068ddb7
102 changed files with 5307 additions and 745 deletions
|
@ -75,7 +75,7 @@ bool StereoDisplayPlugin::internalActivate() {
|
|||
_container->removeMenu(FRAMERATE);
|
||||
|
||||
_screen = qApp->primaryScreen();
|
||||
_container->setFullscreen(_screen);
|
||||
// _container->setFullscreen(_screen);
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
|
|
@ -41,5 +41,5 @@ void main(void) {
|
|||
vec3 yzDiffuseScaled = yzDiffuse.rgb * abs(worldNormal.x);
|
||||
vec4 diffuse = vec4(xyDiffuseScaled + xzDiffuseScaled + yzDiffuseScaled, 1.0);
|
||||
|
||||
packDeferredFragment(_normal, 1.0, vec3(diffuse), DEFAULT_ROUGHNESS, DEFAULT_METALLIC, DEFAULT_EMISSIVE, DEFAULT_OCCLUSION);
|
||||
packDeferredFragment(_normal, 1.0, vec3(diffuse), DEFAULT_ROUGHNESS, DEFAULT_METALLIC, DEFAULT_EMISSIVE, DEFAULT_OCCLUSION, DEFAULT_SCATTERING);
|
||||
}
|
||||
|
|
|
@ -462,7 +462,7 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
|
|||
QVector<ExtractedBlendshape> blendshapes;
|
||||
|
||||
QHash<QString, FBXModel> models;
|
||||
QHash<QString, Cluster> clusters;
|
||||
QHash<QString, Cluster> clusters;
|
||||
QHash<QString, AnimationCurve> animationCurves;
|
||||
|
||||
QHash<QString, QString> typeFlags;
|
||||
|
|
|
@ -167,6 +167,7 @@ public:
|
|||
FBXTexture metallicTexture;
|
||||
FBXTexture emissiveTexture;
|
||||
FBXTexture occlusionTexture;
|
||||
FBXTexture scatteringTexture;
|
||||
FBXTexture lightmapTexture;
|
||||
glm::vec2 lightmapParams{ 0.0f, 1.0f };
|
||||
|
||||
|
|
|
@ -253,6 +253,14 @@ void FBXReader::consolidateFBXMaterials() {
|
|||
}
|
||||
}
|
||||
|
||||
if (material.name.contains("body_mat") || material.name.contains("skin")) {
|
||||
material._material->setScattering(1.0);
|
||||
if (!material.emissiveTexture.isNull()) {
|
||||
material.scatteringTexture = material.emissiveTexture;
|
||||
material.emissiveTexture = FBXTexture();
|
||||
}
|
||||
}
|
||||
|
||||
if (material.opacity <= 0.0f) {
|
||||
material._material->setOpacity(1.0f);
|
||||
} else {
|
||||
|
|
|
@ -14,11 +14,16 @@
|
|||
#include <QObject>
|
||||
#include <QOpenGLDebugLogger>
|
||||
|
||||
void OpenGLDebug::log(const QOpenGLDebugMessage & debugMessage) {
|
||||
qDebug() << debugMessage;
|
||||
}
|
||||
|
||||
void setupDebugLogger(QObject* window) {
|
||||
QOpenGLDebugLogger* logger = new QOpenGLDebugLogger(window);
|
||||
logger->initialize(); // initializes in the current context, i.e. ctx
|
||||
logger->enableMessages();
|
||||
QObject::connect(logger, &QOpenGLDebugLogger::messageLogged, window, [&](const QOpenGLDebugMessage & debugMessage) {
|
||||
qDebug() << debugMessage;
|
||||
OpenGLDebug::log(debugMessage);
|
||||
|
||||
});
|
||||
}
|
|
@ -13,7 +13,13 @@
|
|||
#define hifi_QOpenGLDebugLoggerWrapper_h
|
||||
|
||||
class QObject;
|
||||
class QOpenGLDebugMessage;
|
||||
|
||||
void setupDebugLogger(QObject* window);
|
||||
|
||||
class OpenGLDebug {
|
||||
public:
|
||||
static void log(const QOpenGLDebugMessage & debugMessage);
|
||||
};
|
||||
|
||||
#endif // hifi_QOpenGLDebugLoggerWrapper_h
|
|
@ -700,7 +700,7 @@ bool compileShader(GLenum shaderDomain, const std::string& shaderSource, const s
|
|||
}
|
||||
qCWarning(gpugllogging) << "GLShader::compileShader - errors:";
|
||||
qCWarning(gpugllogging) << temp;
|
||||
delete[] temp;
|
||||
delete[] temp;
|
||||
|
||||
glDeleteShader(glshader);
|
||||
return false;
|
||||
|
|
|
@ -91,6 +91,9 @@ const Backend::TransformCamera& Backend::TransformCamera::recomputeDerived(const
|
|||
Mat4 viewUntranslated = _view;
|
||||
viewUntranslated[3] = Vec4(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
_projectionViewUntranslated = _projection * viewUntranslated;
|
||||
|
||||
_stereoInfo = Vec4(0.0f);
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
@ -104,7 +107,9 @@ Backend::TransformCamera Backend::TransformCamera::getEyeCamera(int eye, const S
|
|||
}
|
||||
result._projection = _stereo._eyeProjections[eye];
|
||||
result.recomputeDerived(offsetTransform);
|
||||
|
||||
|
||||
result._stereoInfo = Vec4(1.0f, (float) eye, 0.0f, 0.0f);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -104,6 +104,7 @@ public:
|
|||
Mat4 _projection;
|
||||
mutable Mat4 _projectionInverse;
|
||||
Vec4 _viewport; // Public value is int but float in the shader to stay in floats for all the transform computations.
|
||||
mutable Vec4 _stereoInfo;
|
||||
|
||||
const Backend::TransformCamera& recomputeDerived(const Transform& xformView) const;
|
||||
TransformCamera getEyeCamera(int eye, const StereoState& stereo, const Transform& xformView) const;
|
||||
|
|
|
@ -18,6 +18,7 @@ struct TransformCamera {
|
|||
mat4 _projection;
|
||||
mat4 _projectionInverse;
|
||||
vec4 _viewport;
|
||||
vec4 _stereoInfo;
|
||||
};
|
||||
|
||||
layout(std140) uniform transformCameraBuffer {
|
||||
|
@ -31,6 +32,16 @@ TransformCamera getTransformCamera() {
|
|||
vec3 getEyeWorldPos() {
|
||||
return _camera._viewInverse[3].xyz;
|
||||
}
|
||||
|
||||
|
||||
bool cam_isStereo() {
|
||||
return _camera._stereoInfo.x > 0.0;
|
||||
}
|
||||
|
||||
float cam_getStereoSide() {
|
||||
return _camera._stereoInfo.y;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
|
|
|
@ -487,6 +487,11 @@ NetworkMaterial::NetworkMaterial(const FBXMaterial& material, const QUrl& textur
|
|||
setTextureMap(MapChannel::EMISSIVE_MAP, map);
|
||||
}
|
||||
|
||||
if (!material.scatteringTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.scatteringTexture, NetworkTexture::SCATTERING_TEXTURE, MapChannel::SCATTERING_MAP);
|
||||
setTextureMap(MapChannel::SCATTERING_MAP, map);
|
||||
}
|
||||
|
||||
if (!material.lightmapTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.lightmapTexture, NetworkTexture::LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
|
||||
_lightmapTransform = material.lightmapTexture.transform;
|
||||
|
@ -507,6 +512,7 @@ void NetworkMaterial::setTextures(const QVariantMap& textureMap) {
|
|||
const auto& occlusionName = getTextureName(MapChannel::OCCLUSION_MAP);
|
||||
const auto& emissiveName = getTextureName(MapChannel::EMISSIVE_MAP);
|
||||
const auto& lightmapName = getTextureName(MapChannel::LIGHTMAP_MAP);
|
||||
const auto& scatteringName = getTextureName(MapChannel::SCATTERING_MAP);
|
||||
|
||||
if (!albedoName.isEmpty()) {
|
||||
auto url = textureMap.contains(albedoName) ? textureMap[albedoName].toUrl() : QUrl();
|
||||
|
@ -549,6 +555,12 @@ void NetworkMaterial::setTextures(const QVariantMap& textureMap) {
|
|||
setTextureMap(MapChannel::EMISSIVE_MAP, map);
|
||||
}
|
||||
|
||||
if (!scatteringName.isEmpty()) {
|
||||
auto url = textureMap.contains(scatteringName) ? textureMap[scatteringName].toUrl() : QUrl();
|
||||
auto map = fetchTextureMap(url, NetworkTexture::SCATTERING_TEXTURE, MapChannel::SCATTERING_MAP);
|
||||
setTextureMap(MapChannel::SCATTERING_MAP, map);
|
||||
}
|
||||
|
||||
if (!lightmapName.isEmpty()) {
|
||||
auto url = textureMap.contains(lightmapName) ? textureMap[lightmapName].toUrl() : QUrl();
|
||||
auto map = fetchTextureMap(url, NetworkTexture::LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
|
||||
|
|
|
@ -51,6 +51,7 @@ public:
|
|||
EMISSIVE_TEXTURE,
|
||||
CUBE_TEXTURE,
|
||||
OCCLUSION_TEXTURE,
|
||||
SCATTERING_TEXTURE = OCCLUSION_TEXTURE,
|
||||
LIGHTMAP_TEXTURE,
|
||||
CUSTOM_TEXTURE
|
||||
};
|
||||
|
|
|
@ -126,28 +126,48 @@ float getLightAmbientMapNumMips(Light l) {
|
|||
}
|
||||
|
||||
|
||||
<@if GPU_FEATURE_PROFILE == GPU_CORE @>
|
||||
uniform lightBuffer {
|
||||
Light light;
|
||||
};
|
||||
Light getLight() {
|
||||
return light;
|
||||
}
|
||||
<@else@>
|
||||
uniform vec4 lightBuffer[7];
|
||||
Light getLight() {
|
||||
Light light;
|
||||
light._position = lightBuffer[0];
|
||||
light._direction = lightBuffer[1];
|
||||
light._color = lightBuffer[2];
|
||||
light._attenuation = lightBuffer[3];
|
||||
light._spot = lightBuffer[4];
|
||||
light._shadow = lightBuffer[5];
|
||||
light._control = lightBuffer[6];
|
||||
|
||||
return light;
|
||||
|
||||
|
||||
|
||||
bool clipFragToLightVolumePoint(Light light, vec3 fragPos, out vec4 fragLightVecLen2) {
|
||||
fragLightVecLen2.xyz = getLightPosition(light) - fragPos.xyz;
|
||||
fragLightVecLen2.w = dot(fragLightVecLen2.xyz, fragLightVecLen2.xyz);
|
||||
|
||||
// Kill if too far from the light center
|
||||
if (fragLightVecLen2.w > getLightCutoffSquareRadius(light)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool clipFragToLightVolumeSpot(Light light, vec3 fragPos, out vec4 fragLightVecLen2, out vec4 fragLightDirLen, out float cosSpotAngle) {
|
||||
fragLightVecLen2.xyz = getLightPosition(light) - fragPos.xyz;
|
||||
fragLightVecLen2.w = dot(fragLightVecLen2.xyz, fragLightVecLen2.xyz);
|
||||
|
||||
// Kill if too far from the light center
|
||||
if (fragLightVecLen2.w > getLightCutoffSquareRadius(light)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Allright we re valid in the volume
|
||||
fragLightDirLen.w = length(fragLightVecLen2.xyz);
|
||||
fragLightDirLen.xyz = fragLightVecLen2.xyz / fragLightDirLen.w;
|
||||
|
||||
// Kill if not in the spot light (ah ah !)
|
||||
cosSpotAngle = max(-dot(fragLightDirLen.xyz, getLightDirection(light)), 0.0);
|
||||
if (cosSpotAngle < getLightSpotAngleCos(light)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
<@endif@>
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -104,6 +104,13 @@ void Material::setMetallic(float metallic) {
|
|||
_schemaBuffer.edit<Schema>()._metallic = metallic;
|
||||
}
|
||||
|
||||
void Material::setScattering(float scattering) {
|
||||
scattering = glm::clamp(scattering, 0.0f, 1.0f);
|
||||
_key.setMetallic(scattering > 0.0f);
|
||||
_schemaBuffer.edit<Schema>()._key = (uint32)_key._flags.to_ulong();
|
||||
_schemaBuffer.edit<Schema>()._scattering = scattering;
|
||||
}
|
||||
|
||||
void Material::setTextureMap(MapChannel channel, const TextureMapPointer& textureMap) {
|
||||
if (textureMap) {
|
||||
_key.setMapChannel(channel, (true));
|
||||
|
|
|
@ -35,6 +35,7 @@ public:
|
|||
OPACITY_VAL_BIT,
|
||||
OPACITY_MASK_MAP_BIT, // Opacity Map and Opacity MASK map are mutually exclusive
|
||||
OPACITY_TRANSLUCENT_MAP_BIT,
|
||||
SCATTERING_VAL_BIT,
|
||||
|
||||
// THe map bits must be in the same sequence as the enum names for the map channels
|
||||
EMISSIVE_MAP_BIT,
|
||||
|
@ -44,6 +45,7 @@ public:
|
|||
NORMAL_MAP_BIT,
|
||||
OCCLUSION_MAP_BIT,
|
||||
LIGHTMAP_MAP_BIT,
|
||||
SCATTERING_MAP_BIT,
|
||||
|
||||
NUM_FLAGS,
|
||||
};
|
||||
|
@ -57,6 +59,7 @@ public:
|
|||
NORMAL_MAP,
|
||||
OCCLUSION_MAP,
|
||||
LIGHTMAP_MAP,
|
||||
SCATTERING_MAP,
|
||||
|
||||
NUM_MAP_CHANNELS,
|
||||
};
|
||||
|
@ -83,6 +86,8 @@ public:
|
|||
|
||||
Builder& withTranslucentFactor() { _flags.set(OPACITY_VAL_BIT); return (*this); }
|
||||
|
||||
Builder& withScattering() { _flags.set(SCATTERING_VAL_BIT); return (*this); }
|
||||
|
||||
Builder& withEmissiveMap() { _flags.set(EMISSIVE_MAP_BIT); return (*this); }
|
||||
Builder& withAlbedoMap() { _flags.set(ALBEDO_MAP_BIT); return (*this); }
|
||||
Builder& withMetallicMap() { _flags.set(METALLIC_MAP_BIT); return (*this); }
|
||||
|
@ -94,6 +99,7 @@ public:
|
|||
Builder& withNormalMap() { _flags.set(NORMAL_MAP_BIT); return (*this); }
|
||||
Builder& withOcclusionMap() { _flags.set(OCCLUSION_MAP_BIT); return (*this); }
|
||||
Builder& withLightmapMap() { _flags.set(LIGHTMAP_MAP_BIT); return (*this); }
|
||||
Builder& withScatteringMap() { _flags.set(SCATTERING_MAP_BIT); return (*this); }
|
||||
|
||||
// Convenient standard keys that we will keep on using all over the place
|
||||
static MaterialKey opaqueAlbedo() { return Builder().withAlbedo().build(); }
|
||||
|
@ -135,7 +141,7 @@ public:
|
|||
|
||||
void setOpacityMaskMap(bool value) { _flags.set(OPACITY_MASK_MAP_BIT, value); }
|
||||
bool isOpacityMaskMap() const { return _flags[OPACITY_MASK_MAP_BIT]; }
|
||||
|
||||
|
||||
void setNormalMap(bool value) { _flags.set(NORMAL_MAP_BIT, value); }
|
||||
bool isNormalMap() const { return _flags[NORMAL_MAP_BIT]; }
|
||||
|
||||
|
@ -145,6 +151,12 @@ public:
|
|||
void setLightmapMap(bool value) { _flags.set(LIGHTMAP_MAP_BIT, value); }
|
||||
bool isLightmapMap() const { return _flags[LIGHTMAP_MAP_BIT]; }
|
||||
|
||||
void setScattering(bool value) { _flags.set(SCATTERING_VAL_BIT, value); }
|
||||
bool isScattering() const { return _flags[SCATTERING_VAL_BIT]; }
|
||||
|
||||
void setScatteringMap(bool value) { _flags.set(SCATTERING_MAP_BIT, value); }
|
||||
bool isScatteringMap() const { return _flags[SCATTERING_MAP_BIT]; }
|
||||
|
||||
void setMapChannel(MapChannel channel, bool value) { _flags.set(EMISSIVE_MAP_BIT + channel, value); }
|
||||
bool isMapChannel(MapChannel channel) const { return _flags[EMISSIVE_MAP_BIT + channel]; }
|
||||
|
||||
|
@ -218,6 +230,13 @@ public:
|
|||
Builder& withoutLightmapMap() { _value.reset(MaterialKey::LIGHTMAP_MAP_BIT); _mask.set(MaterialKey::LIGHTMAP_MAP_BIT); return (*this); }
|
||||
Builder& withLightmapMap() { _value.set(MaterialKey::LIGHTMAP_MAP_BIT); _mask.set(MaterialKey::LIGHTMAP_MAP_BIT); return (*this); }
|
||||
|
||||
Builder& withoutScattering() { _value.reset(MaterialKey::SCATTERING_VAL_BIT); _mask.set(MaterialKey::SCATTERING_VAL_BIT); return (*this); }
|
||||
Builder& withScattering() { _value.set(MaterialKey::SCATTERING_VAL_BIT); _mask.set(MaterialKey::SCATTERING_VAL_BIT); return (*this); }
|
||||
|
||||
Builder& withoutScatteringMap() { _value.reset(MaterialKey::SCATTERING_MAP_BIT); _mask.set(MaterialKey::SCATTERING_MAP_BIT); return (*this); }
|
||||
Builder& withScatteringMap() { _value.set(MaterialKey::SCATTERING_MAP_BIT); _mask.set(MaterialKey::SCATTERING_MAP_BIT); return (*this); }
|
||||
|
||||
|
||||
// Convenient standard keys that we will keep on using all over the place
|
||||
static MaterialFilter opaqueAlbedo() { return Builder().withAlbedo().withoutTranslucentFactor().build(); }
|
||||
};
|
||||
|
@ -275,6 +294,8 @@ public:
|
|||
void setRoughness(float roughness);
|
||||
float getRoughness() const { return _schemaBuffer.get<Schema>()._roughness; }
|
||||
|
||||
void setScattering(float scattering);
|
||||
float getScattering() const { return _schemaBuffer.get<Schema>()._scattering; }
|
||||
|
||||
// Schema to access the attribute values of the material
|
||||
class Schema {
|
||||
|
@ -288,7 +309,9 @@ public:
|
|||
glm::vec3 _fresnel{ 0.03f }; // Fresnel value for a default non metallic
|
||||
float _metallic{ 0.0f }; // Not Metallic
|
||||
|
||||
glm::vec3 _spare{ 0.0f };
|
||||
float _scattering{ 0.0f }; // Scattering info
|
||||
|
||||
glm::vec2 _spare{ 0.0f };
|
||||
|
||||
uint32_t _key{ 0 }; // a copy of the materialKey
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ struct Material {
|
|||
vec4 _emissiveOpacity;
|
||||
vec4 _albedoRoughness;
|
||||
vec4 _fresnelMetallic;
|
||||
vec4 _spareKey;
|
||||
vec4 _scatteringSpare2Key;
|
||||
};
|
||||
|
||||
uniform materialBuffer {
|
||||
|
@ -37,7 +37,9 @@ float getMaterialMetallic(Material m) { return m._fresnelMetallic.a; }
|
|||
|
||||
float getMaterialShininess(Material m) { return 1.0 - getMaterialRoughness(m); }
|
||||
|
||||
int getMaterialKey(Material m) { return floatBitsToInt(m._spareKey.w); }
|
||||
float getMaterialScattering(Material m) { return m._scatteringSpare2Key.x; }
|
||||
|
||||
int getMaterialKey(Material m) { return floatBitsToInt(m._scatteringSpare2Key.w); }
|
||||
|
||||
const int EMISSIVE_VAL_BIT = 0x00000001;
|
||||
const int UNLIT_VAL_BIT = 0x00000002;
|
||||
|
@ -47,14 +49,17 @@ const int GLOSSY_VAL_BIT = 0x00000010;
|
|||
const int OPACITY_VAL_BIT = 0x00000020;
|
||||
const int OPACITY_MASK_MAP_BIT = 0x00000040;
|
||||
const int OPACITY_TRANSLUCENT_MAP_BIT = 0x00000080;
|
||||
const int SCATTERING_VAL_BIT = 0x00000100;
|
||||
|
||||
const int EMISSIVE_MAP_BIT = 0x00000100;
|
||||
const int ALBEDO_MAP_BIT = 0x00000200;
|
||||
const int METALLIC_MAP_BIT = 0x00000400;
|
||||
const int ROUGHNESS_MAP_BIT = 0x00000800;
|
||||
const int NORMAL_MAP_BIT = 0x00001000;
|
||||
const int OCCLUSION_MAP_BIT = 0x00002000;
|
||||
const int LIGHTMAP_MAP_BIT = 0x00004000;
|
||||
|
||||
const int EMISSIVE_MAP_BIT = 0x00000200;
|
||||
const int ALBEDO_MAP_BIT = 0x00000400;
|
||||
const int METALLIC_MAP_BIT = 0x00000800;
|
||||
const int ROUGHNESS_MAP_BIT = 0x00001000;
|
||||
const int NORMAL_MAP_BIT = 0x00002000;
|
||||
const int OCCLUSION_MAP_BIT = 0x00004000;
|
||||
const int LIGHTMAP_MAP_BIT = 0x00008000;
|
||||
const int SCATTERING_MAP_BIT = 0x00010000;
|
||||
|
||||
|
||||
<@endif@>
|
||||
|
|
|
@ -47,6 +47,9 @@ enum Slot {
|
|||
Lighting,
|
||||
Shadow,
|
||||
Pyramid,
|
||||
Curvature,
|
||||
DiffusedCurvature,
|
||||
Scattering,
|
||||
AmbientOcclusion,
|
||||
AmbientOcclusionBlurred
|
||||
};
|
||||
|
@ -108,6 +111,13 @@ static const std::string DEFAULT_LIGHTMAP_SHADER{
|
|||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_SCATTERING_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
" DeferredFragment frag = unpackDeferredFragmentNoPosition(uv);"
|
||||
" return (frag.mode == FRAG_MODE_SCATTERING ? vec4(vec3(pow(frag.scattering, 1.0 / 2.2)), 1.0) : vec4(vec3(0.0), 1.0));"
|
||||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_DEPTH_SHADER {
|
||||
"vec4 getFragmentColor() {"
|
||||
" return vec4(vec3(texture(depthMap, uv).x), 1.0);"
|
||||
|
@ -138,6 +148,45 @@ static const std::string DEFAULT_PYRAMID_DEPTH_SHADER {
|
|||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_CURVATURE_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
" return vec4(pow(vec3(texture(curvatureMap, uv).a), vec3(1.0 / 2.2)), 1.0);"
|
||||
// " return vec4(pow(vec3(texture(curvatureMap, uv).xyz), vec3(1.0 / 2.2)), 1.0);"
|
||||
//" return vec4(vec3(1.0 - textureLod(pyramidMap, uv, 3).x * 0.01), 1.0);"
|
||||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_NORMAL_CURVATURE_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
//" return vec4(pow(vec3(texture(curvatureMap, uv).a), vec3(1.0 / 2.2)), 1.0);"
|
||||
" return vec4(vec3(texture(curvatureMap, uv).xyz), 1.0);"
|
||||
//" return vec4(vec3(1.0 - textureLod(pyramidMap, uv, 3).x * 0.01), 1.0);"
|
||||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_DIFFUSED_CURVATURE_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
" return vec4(pow(vec3(texture(diffusedCurvatureMap, uv).a), vec3(1.0 / 2.2)), 1.0);"
|
||||
// " return vec4(pow(vec3(texture(curvatureMap, uv).xyz), vec3(1.0 / 2.2)), 1.0);"
|
||||
//" return vec4(vec3(1.0 - textureLod(pyramidMap, uv, 3).x * 0.01), 1.0);"
|
||||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_DIFFUSED_NORMAL_CURVATURE_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
//" return vec4(pow(vec3(texture(curvatureMap, uv).a), vec3(1.0 / 2.2)), 1.0);"
|
||||
" return vec4(vec3(texture(diffusedCurvatureMap, uv).xyz), 1.0);"
|
||||
//" return vec4(vec3(1.0 - textureLod(pyramidMap, uv, 3).x * 0.01), 1.0);"
|
||||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_DEBUG_SCATTERING_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
" return vec4(pow(vec3(texture(scatteringMap, uv).xyz), vec3(1.0 / 2.2)), 1.0);"
|
||||
// " return vec4(vec3(texture(scatteringMap, uv).xyz), 1.0);"
|
||||
" }"
|
||||
};
|
||||
|
||||
static const std::string DEFAULT_AMBIENT_OCCLUSION_SHADER{
|
||||
"vec4 getFragmentColor() {"
|
||||
" return vec4(vec3(texture(obscuranceMap, uv).x), 1.0);"
|
||||
|
@ -197,12 +246,24 @@ std::string DebugDeferredBuffer::getShaderSourceCode(Mode mode, std::string cust
|
|||
return DEFAULT_OCCLUSION_SHADER;
|
||||
case LightmapMode:
|
||||
return DEFAULT_LIGHTMAP_SHADER;
|
||||
case ScatteringMode:
|
||||
return DEFAULT_SCATTERING_SHADER;
|
||||
case LightingMode:
|
||||
return DEFAULT_LIGHTING_SHADER;
|
||||
case ShadowMode:
|
||||
return DEFAULT_SHADOW_SHADER;
|
||||
case PyramidDepthMode:
|
||||
return DEFAULT_PYRAMID_DEPTH_SHADER;
|
||||
case CurvatureMode:
|
||||
return DEFAULT_CURVATURE_SHADER;
|
||||
case NormalCurvatureMode:
|
||||
return DEFAULT_NORMAL_CURVATURE_SHADER;
|
||||
case DiffusedCurvatureMode:
|
||||
return DEFAULT_DIFFUSED_CURVATURE_SHADER;
|
||||
case DiffusedNormalCurvatureMode:
|
||||
return DEFAULT_DIFFUSED_NORMAL_CURVATURE_SHADER;
|
||||
case ScatteringDebugMode:
|
||||
return DEFAULT_DEBUG_SCATTERING_SHADER;
|
||||
case AmbientOcclusionMode:
|
||||
return DEFAULT_AMBIENT_OCCLUSION_SHADER;
|
||||
case AmbientOcclusionBlurredMode:
|
||||
|
@ -257,6 +318,9 @@ const gpu::PipelinePointer& DebugDeferredBuffer::getPipeline(Mode mode, std::str
|
|||
slotBindings.insert(gpu::Shader::Binding("lightingMap", Lighting));
|
||||
slotBindings.insert(gpu::Shader::Binding("shadowMap", Shadow));
|
||||
slotBindings.insert(gpu::Shader::Binding("pyramidMap", Pyramid));
|
||||
slotBindings.insert(gpu::Shader::Binding("curvatureMap", Curvature));
|
||||
slotBindings.insert(gpu::Shader::Binding("diffusedCurvatureMap", DiffusedCurvature));
|
||||
slotBindings.insert(gpu::Shader::Binding("scatteringMap", Scattering));
|
||||
slotBindings.insert(gpu::Shader::Binding("occlusionBlurredMap", AmbientOcclusionBlurred));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
|
@ -282,12 +346,18 @@ void DebugDeferredBuffer::configure(const Config& config) {
|
|||
_size = config.size;
|
||||
}
|
||||
|
||||
void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& inputs) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
auto& diffusedCurvatureFramebuffer = inputs.get0();
|
||||
auto& scatteringFramebuffer = inputs.get1();
|
||||
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
||||
const auto geometryBuffer = DependencyManager::get<GeometryCache>();
|
||||
const auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
const auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
@ -313,6 +383,9 @@ void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const Ren
|
|||
batch.setResourceTexture(Lighting, framebufferCache->getLightingTexture());
|
||||
batch.setResourceTexture(Shadow, lightStage.lights[0]->shadow.framebuffer->getDepthStencilBuffer());
|
||||
batch.setResourceTexture(Pyramid, framebufferCache->getDepthPyramidTexture());
|
||||
batch.setResourceTexture(Curvature, framebufferCache->getCurvatureTexture());
|
||||
batch.setResourceTexture(DiffusedCurvature, diffusedCurvatureFramebuffer->getRenderBuffer(0));
|
||||
batch.setResourceTexture(Scattering, scatteringFramebuffer->getRenderBuffer(0));
|
||||
if (DependencyManager::get<DeferredLightingEffect>()->isAmbientOcclusionEnabled()) {
|
||||
batch.setResourceTexture(AmbientOcclusion, framebufferCache->getOcclusionTexture());
|
||||
} else {
|
||||
|
|
|
@ -34,13 +34,14 @@ signals:
|
|||
|
||||
class DebugDeferredBuffer {
|
||||
public:
|
||||
using Inputs = render::VaryingSet2<gpu::FramebufferPointer, gpu::FramebufferPointer>;
|
||||
using Config = DebugDeferredBufferConfig;
|
||||
using JobModel = render::Job::Model<DebugDeferredBuffer, Config>;
|
||||
using JobModel = render::Job::ModelI<DebugDeferredBuffer, Inputs, Config>;
|
||||
|
||||
DebugDeferredBuffer();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs);
|
||||
|
||||
protected:
|
||||
friend class DebugDeferredBufferConfig;
|
||||
|
@ -56,9 +57,15 @@ protected:
|
|||
UnlitMode,
|
||||
OcclusionMode,
|
||||
LightmapMode,
|
||||
ScatteringMode,
|
||||
LightingMode,
|
||||
ShadowMode,
|
||||
PyramidDepthMode,
|
||||
CurvatureMode,
|
||||
NormalCurvatureMode,
|
||||
DiffusedCurvatureMode,
|
||||
DiffusedNormalCurvatureMode,
|
||||
ScatteringDebugMode,
|
||||
AmbientOcclusionMode,
|
||||
AmbientOcclusionBlurredMode,
|
||||
CustomMode // Needs to stay last
|
||||
|
|
|
@ -20,11 +20,16 @@ const float FRAG_PACK_LIGHTMAPPED_NON_METALLIC = 0.2;
|
|||
const float FRAG_PACK_LIGHTMAPPED_METALLIC = 0.3;
|
||||
const float FRAG_PACK_LIGHTMAPPED_RANGE_INV = 1.0 / (FRAG_PACK_LIGHTMAPPED_METALLIC - FRAG_PACK_LIGHTMAPPED_NON_METALLIC);
|
||||
|
||||
const float FRAG_PACK_UNLIT = 0.5;
|
||||
const float FRAG_PACK_SCATTERING_NON_METALLIC = 0.4;
|
||||
const float FRAG_PACK_SCATTERING_METALLIC = 0.5;
|
||||
const float FRAG_PACK_SCATTERING_RANGE_INV = 1.0 / (FRAG_PACK_SCATTERING_METALLIC - FRAG_PACK_SCATTERING_NON_METALLIC);
|
||||
|
||||
const float FRAG_PACK_UNLIT = 0.6;
|
||||
|
||||
const int FRAG_MODE_UNLIT = 0;
|
||||
const int FRAG_MODE_SHADED = 1;
|
||||
const int FRAG_MODE_LIGHTMAPPED = 2;
|
||||
const int FRAG_MODE_SCATTERING = 3;
|
||||
|
||||
void unpackModeMetallic(float rawValue, out int mode, out float metallic) {
|
||||
if (rawValue <= FRAG_PACK_SHADED_METALLIC) {
|
||||
|
@ -32,7 +37,10 @@ void unpackModeMetallic(float rawValue, out int mode, out float metallic) {
|
|||
metallic = clamp((rawValue - FRAG_PACK_SHADED_NON_METALLIC) * FRAG_PACK_SHADED_RANGE_INV, 0.0, 1.0);
|
||||
} else if (rawValue <= FRAG_PACK_LIGHTMAPPED_METALLIC) {
|
||||
mode = FRAG_MODE_LIGHTMAPPED;
|
||||
metallic = clamp((rawValue - FRAG_PACK_LIGHTMAPPED_NON_METALLIC) * FRAG_PACK_SHADED_RANGE_INV, 0.0, 1.0);
|
||||
metallic = clamp((rawValue - FRAG_PACK_LIGHTMAPPED_NON_METALLIC) * FRAG_PACK_LIGHTMAPPED_RANGE_INV, 0.0, 1.0);
|
||||
} else if (rawValue <= FRAG_PACK_SCATTERING_METALLIC) {
|
||||
mode = FRAG_MODE_SCATTERING;
|
||||
metallic = clamp((rawValue - FRAG_PACK_SCATTERING_NON_METALLIC) * FRAG_PACK_SCATTERING_RANGE_INV, 0.0, 1.0);
|
||||
} else if (rawValue >= FRAG_PACK_UNLIT) {
|
||||
mode = FRAG_MODE_UNLIT;
|
||||
metallic = 0.0;
|
||||
|
@ -47,6 +55,10 @@ float packLightmappedMetallic(float metallic) {
|
|||
return mix(FRAG_PACK_LIGHTMAPPED_NON_METALLIC, FRAG_PACK_LIGHTMAPPED_METALLIC, metallic);
|
||||
}
|
||||
|
||||
float packScatteringMetallic(float metallic) {
|
||||
return mix(FRAG_PACK_SCATTERING_NON_METALLIC, FRAG_PACK_SCATTERING_METALLIC, metallic);
|
||||
}
|
||||
|
||||
float packUnlit() {
|
||||
return FRAG_PACK_UNLIT;
|
||||
}
|
||||
|
|
|
@ -32,38 +32,6 @@ uniform sampler2D obscuranceMap;
|
|||
uniform sampler2D lightingMap;
|
||||
|
||||
|
||||
struct DeferredTransform {
|
||||
mat4 projection;
|
||||
mat4 viewInverse;
|
||||
float stereoSide;
|
||||
vec3 _spareABC;
|
||||
};
|
||||
|
||||
layout(std140) uniform deferredTransformBuffer {
|
||||
DeferredTransform _deferredTransform;
|
||||
};
|
||||
DeferredTransform getDeferredTransform() {
|
||||
return _deferredTransform;
|
||||
}
|
||||
|
||||
bool getStereoMode(DeferredTransform deferredTransform) {
|
||||
return (deferredTransform.stereoSide != 0.0);
|
||||
}
|
||||
float getStereoSide(DeferredTransform deferredTransform) {
|
||||
return (deferredTransform.stereoSide);
|
||||
}
|
||||
|
||||
vec4 evalEyePositionFromZ(DeferredTransform deferredTransform, float depthVal, vec2 texcoord) {
|
||||
vec3 nPos = vec3(texcoord.xy * 2.0f - 1.0f, depthVal * 2.0f - 1.0f);
|
||||
|
||||
// compute the view space position using the depth
|
||||
// basically manually pick the proj matrix components to do the inverse
|
||||
float Ze = -deferredTransform.projection[3][2] / (nPos.z + deferredTransform.projection[2][2]);
|
||||
float Xe = (-Ze * nPos.x - Ze * deferredTransform.projection[2][0] - deferredTransform.projection[3][0]) / deferredTransform.projection[0][0];
|
||||
float Ye = (-Ze * nPos.y - Ze * deferredTransform.projection[2][1] - deferredTransform.projection[3][1]) / deferredTransform.projection[1][1];
|
||||
return vec4(Xe, Ye, Ze, 1.0f);
|
||||
}
|
||||
|
||||
struct DeferredFragment {
|
||||
vec4 normalVal;
|
||||
vec4 diffuseVal;
|
||||
|
@ -77,19 +45,10 @@ struct DeferredFragment {
|
|||
float roughness;
|
||||
vec3 emissive;
|
||||
int mode;
|
||||
float scattering;
|
||||
float depthVal;
|
||||
};
|
||||
|
||||
vec4 unpackDeferredPosition(DeferredTransform deferredTransform, float depthValue, vec2 texcoord) {
|
||||
if (getStereoMode(deferredTransform)) {
|
||||
if (texcoord.x > 0.5) {
|
||||
texcoord.x -= 0.5;
|
||||
}
|
||||
texcoord.x *= 2.0;
|
||||
}
|
||||
return evalEyePositionFromZ(deferredTransform, depthValue, texcoord);
|
||||
}
|
||||
|
||||
DeferredFragment unpackDeferredFragmentNoPosition(vec2 texcoord) {
|
||||
|
||||
DeferredFragment frag;
|
||||
|
@ -105,8 +64,17 @@ DeferredFragment unpackDeferredFragmentNoPosition(vec2 texcoord) {
|
|||
|
||||
// Diffuse color and unpack the mode and the metallicness
|
||||
frag.diffuse = frag.diffuseVal.xyz;
|
||||
frag.scattering = 0.0;
|
||||
unpackModeMetallic(frag.diffuseVal.w, frag.mode, frag.metallic);
|
||||
|
||||
frag.emissive = frag.specularVal.xyz;
|
||||
frag.obscurance = min(frag.specularVal.w, frag.obscurance);
|
||||
|
||||
|
||||
if (frag.mode == FRAG_MODE_SCATTERING) {
|
||||
frag.scattering = frag.emissive.x;
|
||||
frag.emissive = vec3(0.0);
|
||||
}
|
||||
|
||||
if (frag.metallic <= 0.5) {
|
||||
frag.metallic = 0.0;
|
||||
|
@ -116,13 +84,28 @@ DeferredFragment unpackDeferredFragmentNoPosition(vec2 texcoord) {
|
|||
frag.metallic = 1.0;
|
||||
}
|
||||
|
||||
frag.emissive = frag.specularVal.xyz;
|
||||
frag.obscurance = min(frag.specularVal.w, frag.obscurance);
|
||||
|
||||
return frag;
|
||||
}
|
||||
|
||||
DeferredFragment unpackDeferredFragment(DeferredTransform deferredTransform, vec2 texcoord) {
|
||||
|
||||
<@include DeferredTransform.slh@>
|
||||
<$declareDeferredFrameTransform()$>
|
||||
|
||||
vec4 unpackDeferredPosition(DeferredFrameTransform deferredTransform, float depthValue, vec2 texcoord) {
|
||||
int side = 0;
|
||||
if (isStereo()) {
|
||||
if (texcoord.x > 0.5) {
|
||||
texcoord.x -= 0.5;
|
||||
side = 1;
|
||||
}
|
||||
texcoord.x *= 2.0;
|
||||
}
|
||||
float Zeye = evalZeyeFromZdb(depthValue);
|
||||
|
||||
return vec4(evalEyePositionFromZeye(side, Zeye, texcoord), 1.0);
|
||||
}
|
||||
|
||||
DeferredFragment unpackDeferredFragment(DeferredFrameTransform deferredTransform, vec2 texcoord) {
|
||||
|
||||
float depthValue = texture(depthMap, texcoord).r;
|
||||
|
||||
|
@ -136,5 +119,23 @@ DeferredFragment unpackDeferredFragment(DeferredTransform deferredTransform, vec
|
|||
|
||||
|
||||
|
||||
<@func declareDeferredCurvature()@>
|
||||
|
||||
// the curvature texture
|
||||
uniform sampler2D curvatureMap;
|
||||
|
||||
vec4 fetchCurvature(vec2 texcoord) {
|
||||
return texture(curvatureMap, texcoord);
|
||||
}
|
||||
|
||||
// the curvature texture
|
||||
uniform sampler2D diffusedCurvatureMap;
|
||||
|
||||
vec4 fetchDiffusedCurvature(vec2 texcoord) {
|
||||
return texture(diffusedCurvatureMap, texcoord);
|
||||
}
|
||||
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@endif@>
|
||||
|
|
|
@ -30,15 +30,16 @@ const float DEFAULT_METALLIC = 0;
|
|||
const vec3 DEFAULT_SPECULAR = vec3(0.1);
|
||||
const vec3 DEFAULT_EMISSIVE = vec3(0.0);
|
||||
const float DEFAULT_OCCLUSION = 1.0;
|
||||
const float DEFAULT_SCATTERING = 0.0;
|
||||
const vec3 DEFAULT_FRESNEL = DEFAULT_EMISSIVE;
|
||||
|
||||
void packDeferredFragment(vec3 normal, float alpha, vec3 albedo, float roughness, float metallic, vec3 emissive, float occlusion) {
|
||||
void packDeferredFragment(vec3 normal, float alpha, vec3 albedo, float roughness, float metallic, vec3 emissive, float occlusion, float scattering) {
|
||||
if (alpha != 1.0) {
|
||||
discard;
|
||||
}
|
||||
_fragColor0 = vec4(albedo, packShadedMetallic(metallic));
|
||||
_fragColor0 = vec4(albedo, ((scattering > 0.0) ? packScatteringMetallic(metallic) : packShadedMetallic(metallic)));
|
||||
_fragColor1 = vec4(packNormal(normal), clamp(roughness, 0.0, 1.0));
|
||||
_fragColor2 = vec4(emissive, occlusion);
|
||||
_fragColor2 = vec4(((scattering > 0.0) ? vec3(scattering) : emissive), occlusion);
|
||||
}
|
||||
|
||||
|
||||
|
|
71
libraries/render-utils/src/DeferredFrameTransform.cpp
Normal file
71
libraries/render-utils/src/DeferredFrameTransform.cpp
Normal file
|
@ -0,0 +1,71 @@
|
|||
//
|
||||
// DeferredFrameTransform.cpp
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Sam Gateau 6/3/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "DeferredFrameTransform.h"
|
||||
|
||||
#include "gpu/Context.h"
|
||||
#include "render/Engine.h"
|
||||
|
||||
DeferredFrameTransform::DeferredFrameTransform() {
|
||||
FrameTransform frameTransform;
|
||||
_frameTransformBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(FrameTransform), (const gpu::Byte*) &frameTransform));
|
||||
}
|
||||
|
||||
void DeferredFrameTransform::update(RenderArgs* args) {
|
||||
|
||||
// Update the depth info with near and far (same for stereo)
|
||||
auto nearZ = args->getViewFrustum().getNearClip();
|
||||
auto farZ = args->getViewFrustum().getFarClip();
|
||||
|
||||
auto& frameTransformBuffer = _frameTransformBuffer.edit<FrameTransform>();
|
||||
frameTransformBuffer.depthInfo = glm::vec4(nearZ*farZ, farZ - nearZ, -farZ, 0.0f);
|
||||
|
||||
frameTransformBuffer.pixelInfo = args->_viewport;
|
||||
|
||||
//_parametersBuffer.edit<Parameters>()._ditheringInfo.y += 0.25f;
|
||||
|
||||
Transform cameraTransform;
|
||||
args->getViewFrustum().evalViewTransform(cameraTransform);
|
||||
cameraTransform.getMatrix(frameTransformBuffer.invView);
|
||||
cameraTransform.getInverseMatrix(frameTransformBuffer.view);
|
||||
|
||||
args->getViewFrustum().evalProjectionMatrix(frameTransformBuffer.projectionMono);
|
||||
|
||||
// Running in stero ?
|
||||
bool isStereo = args->_context->isStereo();
|
||||
if (!isStereo) {
|
||||
frameTransformBuffer.projection[0] = frameTransformBuffer.projectionMono;
|
||||
frameTransformBuffer.stereoInfo = glm::vec4(0.0f, (float)args->_viewport.z, 0.0f, 0.0f);
|
||||
frameTransformBuffer.invpixelInfo = glm::vec4(1.0f / args->_viewport.z, 1.0f / args->_viewport.w, 0.0f, 0.0f);
|
||||
} else {
|
||||
|
||||
mat4 projMats[2];
|
||||
mat4 eyeViews[2];
|
||||
args->_context->getStereoProjections(projMats);
|
||||
args->_context->getStereoViews(eyeViews);
|
||||
|
||||
for (int i = 0; i < 2; i++) {
|
||||
// Compose the mono Eye space to Stereo clip space Projection Matrix
|
||||
auto sideViewMat = projMats[i] * eyeViews[i];
|
||||
frameTransformBuffer.projection[i] = sideViewMat;
|
||||
}
|
||||
|
||||
frameTransformBuffer.stereoInfo = glm::vec4(1.0f, (float)(args->_viewport.z >> 1), 0.0f, 1.0f);
|
||||
frameTransformBuffer.invpixelInfo = glm::vec4(1.0f / (float)(args->_viewport.z >> 1), 1.0f / args->_viewport.w, 0.0f, 0.0f);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
void GenerateDeferredFrameTransform::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, DeferredFrameTransformPointer& frameTransform) {
|
||||
if (!frameTransform) {
|
||||
frameTransform = std::make_shared<DeferredFrameTransform>();
|
||||
}
|
||||
frameTransform->update(renderContext->args);
|
||||
}
|
78
libraries/render-utils/src/DeferredFrameTransform.h
Normal file
78
libraries/render-utils/src/DeferredFrameTransform.h
Normal file
|
@ -0,0 +1,78 @@
|
|||
//
|
||||
// DeferredFrameTransform.h
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Sam Gateau 6/3/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_DeferredFrameTransform_h
|
||||
#define hifi_DeferredFrameTransform_h
|
||||
|
||||
#include "gpu/Resource.h"
|
||||
#include "render/DrawTask.h"
|
||||
|
||||
class RenderArgs;
|
||||
|
||||
// DeferredFrameTransform is a helper class gathering in one place the needed camera transform
|
||||
// and frame resolution needed for all the deferred rendering passes taking advantage of the Deferred buffers
|
||||
class DeferredFrameTransform {
|
||||
public:
|
||||
using UniformBufferView = gpu::BufferView;
|
||||
|
||||
DeferredFrameTransform();
|
||||
|
||||
void update(RenderArgs* args);
|
||||
|
||||
UniformBufferView getFrameTransformBuffer() const { return _frameTransformBuffer; }
|
||||
|
||||
protected:
|
||||
|
||||
|
||||
// Class describing the uniform buffer with the transform info common to the AO shaders
|
||||
// It s changing every frame
|
||||
class FrameTransform {
|
||||
public:
|
||||
// Pixel info is { viemport width height and stereo on off}
|
||||
glm::vec4 pixelInfo;
|
||||
glm::vec4 invpixelInfo;
|
||||
// Depth info is { n.f, f - n, -f}
|
||||
glm::vec4 depthInfo;
|
||||
// Stereo info
|
||||
glm::vec4 stereoInfo{ 0.0 };
|
||||
// Mono proj matrix or Left and Right proj matrix going from Mono Eye space to side clip space
|
||||
glm::mat4 projection[2];
|
||||
// THe mono projection for sure
|
||||
glm::mat4 projectionMono;
|
||||
// Inv View matrix from eye space (mono) to world space
|
||||
glm::mat4 invView;
|
||||
// View matrix from world space to eye space (mono)
|
||||
glm::mat4 view;
|
||||
|
||||
FrameTransform() {}
|
||||
};
|
||||
UniformBufferView _frameTransformBuffer;
|
||||
|
||||
|
||||
};
|
||||
|
||||
using DeferredFrameTransformPointer = std::shared_ptr<DeferredFrameTransform>;
|
||||
|
||||
|
||||
|
||||
|
||||
class GenerateDeferredFrameTransform {
|
||||
public:
|
||||
using JobModel = render::Job::ModelO<GenerateDeferredFrameTransform, DeferredFrameTransformPointer>;
|
||||
|
||||
GenerateDeferredFrameTransform() {}
|
||||
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, DeferredFrameTransformPointer& frameTransform);
|
||||
|
||||
private:
|
||||
};
|
||||
|
||||
#endif // hifi_SurfaceGeometryPass_h
|
|
@ -12,75 +12,38 @@
|
|||
<@def DEFERRED_GLOBAL_LIGHT_SLH@>
|
||||
|
||||
<@include model/Light.slh@>
|
||||
<@include DeferredLighting.slh@>
|
||||
<!<@include DeferredLighting.slh@>!>
|
||||
|
||||
<@func declareSkyboxMap()@>
|
||||
// declareSkyboxMap
|
||||
uniform samplerCube skyboxMap;
|
||||
<@include LightingModel.slh@>
|
||||
<$declareLightingModel()$>
|
||||
|
||||
vec4 evalSkyboxLight(vec3 direction, float lod) {
|
||||
// textureQueryLevels is not available until #430, so we require explicit lod
|
||||
// float mipmapLevel = lod * textureQueryLevels(skyboxMap);
|
||||
return textureLod(skyboxMap, direction, lod);
|
||||
}
|
||||
<@endfunc@>
|
||||
<@include LightAmbient.slh@>
|
||||
<@include LightDirectional.slh@>
|
||||
|
||||
<@func declareEvalGlobalSpecularIrradiance(supportAmbientSphere, supportAmbientMap, supportIfAmbientMapElseAmbientSphere)@>
|
||||
|
||||
vec3 fresnelSchlickAmbient(vec3 fresnelColor, vec3 lightDir, vec3 halfDir, float gloss) {
|
||||
return fresnelColor + (max(vec3(gloss), fresnelColor) - fresnelColor) * pow(1.0 - clamp(dot(lightDir, halfDir), 0.0, 1.0), 5);
|
||||
}
|
||||
|
||||
<@if supportAmbientMap@>
|
||||
<$declareSkyboxMap()$>
|
||||
<@endif@>
|
||||
|
||||
vec3 evalGlobalSpecularIrradiance(Light light, vec3 fragEyeDir, vec3 fragNormal, float roughness, vec3 fresnel, float obscurance) {
|
||||
vec3 direction = -reflect(fragEyeDir, fragNormal);
|
||||
vec3 ambientFresnel = fresnelSchlickAmbient(fresnel, fragEyeDir, fragNormal, 1 - roughness);
|
||||
vec3 specularLight;
|
||||
<@if supportIfAmbientMapElseAmbientSphere@>
|
||||
if (getLightHasAmbientMap(light))
|
||||
<@endif@>
|
||||
<@if supportAmbientMap@>
|
||||
{
|
||||
float levels = getLightAmbientMapNumMips(light);
|
||||
float lod = min(floor((roughness) * levels), levels);
|
||||
specularLight = evalSkyboxLight(direction, lod).xyz;
|
||||
}
|
||||
<@endif@>
|
||||
<@if supportIfAmbientMapElseAmbientSphere@>
|
||||
else
|
||||
<@endif@>
|
||||
<@if supportAmbientSphere@>
|
||||
{
|
||||
specularLight = evalSphericalLight(getLightAmbientSphere(light), direction).xyz;
|
||||
}
|
||||
<@endif@>
|
||||
|
||||
return specularLight * ambientFresnel * getLightAmbientIntensity(light);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func prepareGlobalLight()@>
|
||||
<@func prepareGlobalLight(isScattering)@>
|
||||
// prepareGlobalLight
|
||||
|
||||
// Transform directions to worldspace
|
||||
// vec3 fragNormal = vec3(invViewMat * vec4(normal, 0.0));
|
||||
vec3 fragNormal = vec3((normal));
|
||||
vec3 fragEyeVector = vec3(invViewMat * vec4(-position, 0.0));
|
||||
vec3 fragEyeDir = normalize(fragEyeVector);
|
||||
|
||||
// Get light
|
||||
Light light = getLight();
|
||||
|
||||
vec3 color = vec3(0.0);
|
||||
|
||||
<@if isScattering@>
|
||||
<@else@>
|
||||
color += emissive * isEmissiveEnabled();
|
||||
<@endif@>
|
||||
|
||||
vec3 fresnel = vec3(0.03); // Default Di-electric fresnel value
|
||||
if (metallic > 0.5) {
|
||||
fresnel = albedo;
|
||||
metallic = 1.0;
|
||||
}
|
||||
vec4 shading = evalFragShading(fragNormal, -getLightDirection(light), fragEyeDir, metallic, fresnel, roughness);
|
||||
vec3 color = vec3(albedo * shading.w + shading.rgb) * min(shadowAttenuation, obscurance) * getLightColor(light) * getLightIntensity(light);
|
||||
color += emissive;
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
|
@ -92,39 +55,138 @@ vec3 evalAmbientGlobalColor(mat4 invViewMat, float shadowAttenuation, float obsc
|
|||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareEvalAmbientSphereGlobalColor()@>
|
||||
<$declareEvalGlobalSpecularIrradiance(1, 0, 0)$>
|
||||
<@func declareEvalAmbientSphereGlobalColor(supportScattering)@>
|
||||
|
||||
<$declareLightingAmbient(1, 0, 0, supportScattering)$>
|
||||
<$declareLightingDirectional(supportScattering)$>
|
||||
|
||||
vec3 evalAmbientSphereGlobalColor(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 albedo, float metallic, vec3 emissive, float roughness) {
|
||||
|
||||
<$prepareGlobalLight()$>
|
||||
|
||||
// Diffuse from ambient
|
||||
color += (1 - metallic) * albedo * evalSphericalLight(getLightAmbientSphere(light), fragNormal).xyz * obscurance * getLightAmbientIntensity(light);
|
||||
// Ambient
|
||||
vec3 ambientDiffuse;
|
||||
vec3 ambientSpecular;
|
||||
evalLightingAmbient(ambientDiffuse, ambientSpecular, light, fragEyeDir, fragNormal, roughness, metallic, fresnel, albedo, obscurance);
|
||||
color += ambientDiffuse * isDiffuseEnabled() * isAmbientEnabled();
|
||||
color += ambientSpecular * isSpecularEnabled() * isAmbientEnabled();
|
||||
|
||||
// Specular highlight from ambient
|
||||
vec3 specularLighting = evalGlobalSpecularIrradiance(light, fragEyeDir, fragNormal, roughness, fresnel, obscurance);
|
||||
color += specularLighting;
|
||||
|
||||
// Directional
|
||||
vec3 directionalDiffuse;
|
||||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, light, fragEyeDir, fragNormal, roughness, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse * isDiffuseEnabled() * isDirectionalEnabled();
|
||||
color += directionalSpecular * isSpecularEnabled() * isDirectionalEnabled();
|
||||
|
||||
return color;
|
||||
}
|
||||
|
||||
<@if supportScattering@>
|
||||
|
||||
<$declareDeferredCurvature()$>
|
||||
|
||||
vec3 evalAmbientSphereGlobalColorScattering(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 albedo, float metallic, vec3 emissive, float roughness, float scattering, vec4 blurredCurvature, vec4 diffusedCurvature) {
|
||||
|
||||
<$prepareGlobalLight(1)$>
|
||||
|
||||
vec3 midNormal = normalize((blurredCurvature.xyz - 0.5f) * 2.0f);
|
||||
vec3 lowNormal = normalize((diffusedCurvature.xyz - 0.5f) * 2.0f);
|
||||
float highCurvature = unpackCurvature(blurredCurvature.w);
|
||||
float lowCurvature = unpackCurvature(diffusedCurvature.w);
|
||||
|
||||
// Ambient
|
||||
vec3 ambientDiffuse;
|
||||
vec3 ambientSpecular;
|
||||
evalLightingAmbientScattering(ambientDiffuse, ambientSpecular, light,
|
||||
fragEyeDir, fragNormal, roughness,
|
||||
metallic, fresnel, albedo, obscurance,
|
||||
isScatteringEnabled() * scattering, lowNormal, highCurvature, lowCurvature);
|
||||
color += ambientDiffuse * isDiffuseEnabled() * isAmbientEnabled();
|
||||
color += ambientSpecular * isSpecularEnabled() * isAmbientEnabled();
|
||||
|
||||
|
||||
// Directional
|
||||
vec3 directionalDiffuse;
|
||||
vec3 directionalSpecular;
|
||||
evalLightingDirectionalScattering(directionalDiffuse, directionalSpecular, light,
|
||||
fragEyeDir, fragNormal, roughness,
|
||||
metallic, fresnel, albedo, shadowAttenuation,
|
||||
isScatteringEnabled() * scattering, midNormal, lowNormal, lowCurvature);
|
||||
color += directionalDiffuse * isDiffuseEnabled() * isDirectionalEnabled();
|
||||
color += directionalSpecular * isSpecularEnabled() * isDirectionalEnabled();
|
||||
|
||||
return color;
|
||||
}
|
||||
|
||||
<@endif@>
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareEvalSkyboxGlobalColor()@>
|
||||
<$declareEvalGlobalSpecularIrradiance(0, 1, 0)$>
|
||||
|
||||
<@func declareEvalSkyboxGlobalColor(supportScattering)@>
|
||||
|
||||
<$declareLightingAmbient(0, 1, 0, supportScattering)$>
|
||||
<$declareLightingDirectional(supportScattering)$>
|
||||
|
||||
vec3 evalSkyboxGlobalColor(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 albedo, float metallic, vec3 emissive, float roughness) {
|
||||
<$prepareGlobalLight()$>
|
||||
|
||||
// Diffuse from ambient
|
||||
color += (1 - metallic) * albedo * evalSphericalLight(getLightAmbientSphere(light), fragNormal).xyz * obscurance * getLightAmbientIntensity(light);
|
||||
// Ambient
|
||||
vec3 ambientDiffuse;
|
||||
vec3 ambientSpecular;
|
||||
evalLightingAmbient(ambientDiffuse, ambientSpecular, light, fragEyeDir, fragNormal, roughness, metallic, fresnel, albedo, obscurance);
|
||||
color += ambientDiffuse * isDiffuseEnabled() * isAmbientEnabled();
|
||||
color += ambientSpecular * isSpecularEnabled() * isAmbientEnabled();
|
||||
|
||||
// Specular highlight from ambient
|
||||
vec3 specularLighting = evalGlobalSpecularIrradiance(light, fragEyeDir, fragNormal, roughness, fresnel, obscurance);
|
||||
color += specularLighting;
|
||||
|
||||
// Directional
|
||||
vec3 directionalDiffuse;
|
||||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, light, fragEyeDir, fragNormal, roughness, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse * isDiffuseEnabled() * isDirectionalEnabled();
|
||||
color += directionalSpecular * isSpecularEnabled() * isDirectionalEnabled();
|
||||
|
||||
return color;
|
||||
}
|
||||
|
||||
<@if supportScattering@>
|
||||
|
||||
<$declareDeferredCurvature()$>
|
||||
|
||||
vec3 evalSkyboxGlobalColorScattering(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 albedo, float metallic, vec3 emissive, float roughness, float scattering, vec4 blurredCurvature, vec4 diffusedCurvature) {
|
||||
<$prepareGlobalLight(1)$>
|
||||
|
||||
vec3 midNormal = normalize((blurredCurvature.xyz - 0.5f) * 2.0f);
|
||||
vec3 lowNormal = normalize((diffusedCurvature.xyz - 0.5f) * 2.0f);
|
||||
float highCurvature = unpackCurvature(blurredCurvature.w);
|
||||
float lowCurvature = unpackCurvature(diffusedCurvature.w);
|
||||
|
||||
// Ambient
|
||||
vec3 ambientDiffuse;
|
||||
vec3 ambientSpecular;
|
||||
evalLightingAmbientScattering(ambientDiffuse, ambientSpecular, light,
|
||||
fragEyeDir, fragNormal, roughness,
|
||||
metallic, fresnel, albedo, obscurance,
|
||||
isScatteringEnabled() * scattering, lowNormal, highCurvature, lowCurvature);
|
||||
color += ambientDiffuse * isDiffuseEnabled() * isAmbientEnabled();
|
||||
color += ambientSpecular * isSpecularEnabled() * isAmbientEnabled();
|
||||
|
||||
// Directional
|
||||
vec3 directionalDiffuse;
|
||||
vec3 directionalSpecular;
|
||||
evalLightingDirectionalScattering(directionalDiffuse, directionalSpecular, light,
|
||||
fragEyeDir, fragNormal, roughness,
|
||||
metallic, fresnel, albedo, shadowAttenuation,
|
||||
isScatteringEnabled() * scattering, midNormal, lowNormal, lowCurvature);
|
||||
color += directionalDiffuse * isDiffuseEnabled() * isDirectionalEnabled();
|
||||
color += directionalSpecular * isSpecularEnabled() * isDirectionalEnabled();
|
||||
|
||||
return vec3(color);
|
||||
}
|
||||
|
||||
<@endif@>
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareEvalLightmappedColor()@>
|
||||
|
@ -147,25 +209,35 @@ vec3 evalLightmappedColor(mat4 invViewMat, float shadowAttenuation, float obscur
|
|||
// Ambient light is the lightmap when in shadow
|
||||
vec3 ambientLight = (1 - lightAttenuation) * lightmap * getLightAmbientIntensity(light);
|
||||
|
||||
return obscurance * albedo * (diffuseLight + ambientLight);
|
||||
return isLightmapEnabled() * obscurance * albedo * (diffuseLight + ambientLight);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
|
||||
|
||||
<@func declareEvalGlobalLightingAlphaBlended()@>
|
||||
|
||||
<$declareEvalGlobalSpecularIrradiance(1, 1, 1)$>
|
||||
<$declareLightingAmbient(1, 1, 1)$>
|
||||
<$declareLightingDirectional()$>
|
||||
|
||||
vec3 evalGlobalLightingAlphaBlended(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 albedo, float metallic, vec3 emissive, float roughness, float opacity) {
|
||||
<$prepareGlobalLight()$>
|
||||
|
||||
// Diffuse from ambient
|
||||
color += (1 - metallic) * albedo * evalSphericalLight(getLightAmbientSphere(light), fragNormal).xyz * obscurance * getLightAmbientIntensity(light);
|
||||
// Ambient
|
||||
vec3 ambientDiffuse;
|
||||
vec3 ambientSpecular;
|
||||
evalLightingAmbient(ambientDiffuse, ambientSpecular, light, fragEyeDir, fragNormal, roughness, metallic, fresnel, albedo, obscurance);
|
||||
color += ambientDiffuse * isDiffuseEnabled() * isAmbientEnabled();
|
||||
color += ambientSpecular * isSpecularEnabled() * isAmbientEnabled() / opacity;
|
||||
|
||||
// Specular highlight from ambient
|
||||
vec3 specularLighting = evalGlobalSpecularIrradiance(light, fragEyeDir, fragNormal, roughness, fresnel, obscurance);
|
||||
color += specularLighting / opacity;
|
||||
|
||||
// Directional
|
||||
vec3 directionalDiffuse;
|
||||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, light, fragEyeDir, fragNormal, roughness, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse * isDiffuseEnabled() * isDirectionalEnabled();
|
||||
color += directionalSpecular * isSpecularEnabled() * isDirectionalEnabled() / opacity;
|
||||
|
||||
return color;
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
!>
|
||||
<!
|
||||
<@if not DEFERRED_LIGHTING_SLH@>
|
||||
<@def DEFERRED_LIGHTING_SLH@>
|
||||
|
||||
|
@ -25,14 +26,15 @@ float specularDistribution(float roughness, vec3 normal, vec3 halfDir) {
|
|||
float power = gloss2 / (3.14159 * denom * denom);
|
||||
return power;
|
||||
}
|
||||
<! //NOTE: ANother implementation for specularDistribution
|
||||
/* //NOTE: ANother implementation for specularDistribution
|
||||
float specularDistribution(float roughness, vec3 normal, vec3 halfDir) {
|
||||
float gloss = exp2(10 * (1.0 - roughness) + 1);
|
||||
float power = pow(clamp(dot(halfDir, normal), 0.0, 1.0), gloss);
|
||||
power *= (gloss * 0.125 + 0.25);
|
||||
return power;
|
||||
}
|
||||
!>
|
||||
*/
|
||||
|
||||
// Frag Shading returns the diffuse amount as W and the specular rgb as xyz
|
||||
vec4 evalPBRShading(vec3 fragNormal, vec3 fragLightDir, vec3 fragEyeDir, float metallic, vec3 fresnel, float roughness) {
|
||||
// Diffuse Lighting
|
||||
|
@ -69,7 +71,6 @@ vec4 evalBlinnShading(vec3 fragNormal, vec3 fragLightDir, vec3 fragEyeDir, vec3
|
|||
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
<$declareEvalPBRShading()$>
|
||||
|
||||
// Return xyz the specular/reflection component and w the diffuse component
|
||||
|
@ -78,3 +79,4 @@ vec4 evalFragShading(vec3 fragNormal, vec3 fragLightDir, vec3 fragEyeDir, float
|
|||
}
|
||||
|
||||
<@endif@>
|
||||
!>
|
|
@ -38,17 +38,20 @@
|
|||
#include "point_light_frag.h"
|
||||
#include "spot_light_frag.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
struct LightLocations {
|
||||
int radius;
|
||||
int ambientSphere;
|
||||
int lightBufferUnit;
|
||||
int texcoordMat;
|
||||
int coneParam;
|
||||
int deferredTransformBuffer;
|
||||
int shadowTransformBuffer;
|
||||
int radius{ -1 };
|
||||
int ambientSphere{ -1 };
|
||||
int lightBufferUnit{ -1 };
|
||||
int sphereParam{ -1 };
|
||||
int coneParam{ -1 };
|
||||
int deferredFrameTransformBuffer{ -1 };
|
||||
int subsurfaceScatteringParametersBuffer{ -1 };
|
||||
int shadowTransformBuffer{ -1 };
|
||||
};
|
||||
|
||||
enum {
|
||||
enum DeferredShader_MapSlot {
|
||||
DEFERRED_BUFFER_COLOR_UNIT = 0,
|
||||
DEFERRED_BUFFER_NORMAL_UNIT = 1,
|
||||
DEFERRED_BUFFER_EMISSIVE_UNIT = 2,
|
||||
|
@ -56,7 +59,18 @@ enum {
|
|||
DEFERRED_BUFFER_OBSCURANCE_UNIT = 4,
|
||||
SHADOW_MAP_UNIT = 5,
|
||||
SKYBOX_MAP_UNIT = 6,
|
||||
DEFERRED_BUFFER_CURVATURE_UNIT,
|
||||
DEFERRED_BUFFER_DIFFUSED_CURVATURE_UNIT,
|
||||
SCATTERING_LUT_UNIT,
|
||||
SCATTERING_SPECULAR_UNIT,
|
||||
};
|
||||
enum DeferredShader_BufferSlot {
|
||||
DEFERRED_FRAME_TRANSFORM_BUFFER_SLOT = 0,
|
||||
LIGHTING_MODEL_BUFFER_SLOT,
|
||||
SCATTERING_PARAMETERS_BUFFER_SLOT,
|
||||
LIGHT_GPU_SLOT,
|
||||
};
|
||||
|
||||
static void loadLightProgram(const char* vertSource, const char* fragSource, bool lightVolume, gpu::PipelinePointer& program, LightLocationsPtr& locations);
|
||||
|
||||
void DeferredLightingEffect::init() {
|
||||
|
@ -132,368 +146,6 @@ void DeferredLightingEffect::addSpotLight(const glm::vec3& position, float radiu
|
|||
}
|
||||
}
|
||||
|
||||
void DeferredLightingEffect::prepare(RenderArgs* args) {
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setStateScissorRect(args->_viewport);
|
||||
|
||||
// Clear Lighting buffer
|
||||
auto lightingFbo = DependencyManager::get<FramebufferCache>()->getLightingFramebuffer();
|
||||
|
||||
batch.setFramebuffer(lightingFbo);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(vec3(0), 0), true);
|
||||
|
||||
// Clear deferred
|
||||
auto deferredFbo = DependencyManager::get<FramebufferCache>()->getDeferredFramebuffer();
|
||||
|
||||
batch.setFramebuffer(deferredFbo);
|
||||
|
||||
// Clear Color, Depth and Stencil for deferred buffer
|
||||
batch.clearFramebuffer(
|
||||
gpu::Framebuffer::BUFFER_COLOR0 | gpu::Framebuffer::BUFFER_COLOR1 | gpu::Framebuffer::BUFFER_COLOR2 |
|
||||
gpu::Framebuffer::BUFFER_DEPTH |
|
||||
gpu::Framebuffer::BUFFER_STENCIL,
|
||||
vec4(vec3(0), 0), 1.0, 0.0, true);
|
||||
});
|
||||
}
|
||||
|
||||
void DeferredLightingEffect::render(const render::RenderContextPointer& renderContext) {
|
||||
auto args = renderContext->args;
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
|
||||
// Allocate the parameters buffer used by all the deferred shaders
|
||||
if (!_deferredTransformBuffer[0]._buffer) {
|
||||
DeferredTransform parameters;
|
||||
_deferredTransformBuffer[0] = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(DeferredTransform), (const gpu::Byte*) ¶meters));
|
||||
_deferredTransformBuffer[1] = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(DeferredTransform), (const gpu::Byte*) ¶meters));
|
||||
}
|
||||
|
||||
// Framebuffer copy operations cannot function as multipass stereo operations.
|
||||
batch.enableStereo(false);
|
||||
|
||||
// perform deferred lighting, rendering to free fbo
|
||||
auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
||||
QSize framebufferSize = framebufferCache->getFrameBufferSize();
|
||||
|
||||
// binding the first framebuffer
|
||||
auto lightingFBO = framebufferCache->getLightingFramebuffer();
|
||||
batch.setFramebuffer(lightingFBO);
|
||||
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setStateScissorRect(args->_viewport);
|
||||
|
||||
// Bind the G-Buffer surfaces
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_COLOR_UNIT, framebufferCache->getDeferredColorTexture());
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_NORMAL_UNIT, framebufferCache->getDeferredNormalTexture());
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_EMISSIVE_UNIT, framebufferCache->getDeferredSpecularTexture());
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_DEPTH_UNIT, framebufferCache->getPrimaryDepthTexture());
|
||||
|
||||
// FIXME: Different render modes should have different tasks
|
||||
if (args->_renderMode == RenderArgs::DEFAULT_RENDER_MODE && _ambientOcclusionEnabled) {
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, framebufferCache->getOcclusionTexture());
|
||||
} else {
|
||||
// need to assign the white texture if ao is off
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, textureCache->getWhiteTexture());
|
||||
}
|
||||
|
||||
assert(_lightStage.lights.size() > 0);
|
||||
const auto& globalShadow = _lightStage.lights[0]->shadow;
|
||||
|
||||
// Bind the shadow buffer
|
||||
batch.setResourceTexture(SHADOW_MAP_UNIT, globalShadow.map);
|
||||
|
||||
// THe main viewport is assumed to be the mono viewport (or the 2 stereo faces side by side within that viewport)
|
||||
auto monoViewport = args->_viewport;
|
||||
float sMin = args->_viewport.x / (float)framebufferSize.width();
|
||||
float sWidth = args->_viewport.z / (float)framebufferSize.width();
|
||||
float tMin = args->_viewport.y / (float)framebufferSize.height();
|
||||
float tHeight = args->_viewport.w / (float)framebufferSize.height();
|
||||
|
||||
// The view frustum is the mono frustum base
|
||||
auto viewFrustum = args->getViewFrustum();
|
||||
|
||||
// Eval the mono projection
|
||||
mat4 monoProjMat;
|
||||
viewFrustum.evalProjectionMatrix(monoProjMat);
|
||||
|
||||
// The mono view transform
|
||||
Transform monoViewTransform;
|
||||
viewFrustum.evalViewTransform(monoViewTransform);
|
||||
|
||||
// THe mono view matrix coming from the mono view transform
|
||||
glm::mat4 monoViewMat;
|
||||
monoViewTransform.getMatrix(monoViewMat);
|
||||
|
||||
// Running in stero ?
|
||||
bool isStereo = args->_context->isStereo();
|
||||
int numPasses = 1;
|
||||
|
||||
mat4 projMats[2];
|
||||
Transform viewTransforms[2];
|
||||
ivec4 viewports[2];
|
||||
vec4 clipQuad[2];
|
||||
vec2 screenBottomLeftCorners[2];
|
||||
vec2 screenTopRightCorners[2];
|
||||
vec4 fetchTexcoordRects[2];
|
||||
|
||||
DeferredTransform deferredTransforms[2];
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
if (isStereo) {
|
||||
numPasses = 2;
|
||||
|
||||
mat4 eyeViews[2];
|
||||
args->_context->getStereoProjections(projMats);
|
||||
args->_context->getStereoViews(eyeViews);
|
||||
|
||||
float halfWidth = 0.5f * sWidth;
|
||||
|
||||
for (int i = 0; i < numPasses; i++) {
|
||||
// In stereo, the 2 sides are layout side by side in the mono viewport and their width is half
|
||||
int sideWidth = monoViewport.z >> 1;
|
||||
viewports[i] = ivec4(monoViewport.x + (i * sideWidth), monoViewport.y, sideWidth, monoViewport.w);
|
||||
|
||||
deferredTransforms[i].projection = projMats[i];
|
||||
|
||||
auto sideViewMat = monoViewMat * glm::inverse(eyeViews[i]);
|
||||
// viewTransforms[i].evalFromRawMatrix(sideViewMat);
|
||||
viewTransforms[i] = monoViewTransform;
|
||||
viewTransforms[i].postTranslate(-glm::vec3((eyeViews[i][3])));// evalFromRawMatrix(sideViewMat);
|
||||
deferredTransforms[i].viewInverse = sideViewMat;
|
||||
|
||||
deferredTransforms[i].stereoSide = (i == 0 ? -1.0f : 1.0f);
|
||||
|
||||
clipQuad[i] = glm::vec4(sMin + i * halfWidth, tMin, halfWidth, tHeight);
|
||||
screenBottomLeftCorners[i] = glm::vec2(-1.0f + i * 1.0f, -1.0f);
|
||||
screenTopRightCorners[i] = glm::vec2(i * 1.0f, 1.0f);
|
||||
|
||||
fetchTexcoordRects[i] = glm::vec4(sMin + i * halfWidth, tMin, halfWidth, tHeight);
|
||||
}
|
||||
} else {
|
||||
|
||||
viewports[0] = monoViewport;
|
||||
projMats[0] = monoProjMat;
|
||||
|
||||
deferredTransforms[0].projection = monoProjMat;
|
||||
|
||||
deferredTransforms[0].viewInverse = monoViewMat;
|
||||
viewTransforms[0] = monoViewTransform;
|
||||
|
||||
deferredTransforms[0].stereoSide = 0.0f;
|
||||
|
||||
clipQuad[0] = glm::vec4(sMin, tMin, sWidth, tHeight);
|
||||
screenBottomLeftCorners[0] = glm::vec2(-1.0f, -1.0f);
|
||||
screenTopRightCorners[0] = glm::vec2(1.0f, 1.0f);
|
||||
|
||||
fetchTexcoordRects[0] = glm::vec4(sMin, tMin, sWidth, tHeight);
|
||||
}
|
||||
|
||||
auto eyePoint = viewFrustum.getPosition();
|
||||
float nearRadius = glm::distance(eyePoint, viewFrustum.getNearTopLeft());
|
||||
|
||||
|
||||
for (int side = 0; side < numPasses; side++) {
|
||||
// Render in this side's viewport
|
||||
batch.setViewportTransform(viewports[side]);
|
||||
batch.setStateScissorRect(viewports[side]);
|
||||
|
||||
// Sync and Bind the correct DeferredTransform ubo
|
||||
_deferredTransformBuffer[side]._buffer->setSubData(0, sizeof(DeferredTransform), (const gpu::Byte*) &deferredTransforms[side]);
|
||||
batch.setUniformBuffer(_directionalLightLocations->deferredTransformBuffer, _deferredTransformBuffer[side]);
|
||||
|
||||
glm::vec2 topLeft(-1.0f, -1.0f);
|
||||
glm::vec2 bottomRight(1.0f, 1.0f);
|
||||
glm::vec2 texCoordTopLeft(clipQuad[side].x, clipQuad[side].y);
|
||||
glm::vec2 texCoordBottomRight(clipQuad[side].x + clipQuad[side].z, clipQuad[side].y + clipQuad[side].w);
|
||||
|
||||
// First Global directional light and ambient pass
|
||||
{
|
||||
auto& program = _shadowMapEnabled ? _directionalLightShadow : _directionalLight;
|
||||
LightLocationsPtr locations = _shadowMapEnabled ? _directionalLightShadowLocations : _directionalLightLocations;
|
||||
const auto& keyLight = _allocatedLights[_globalLights.front()];
|
||||
|
||||
// Setup the global directional pass pipeline
|
||||
{
|
||||
if (_shadowMapEnabled) {
|
||||
if (keyLight->getAmbientMap()) {
|
||||
program = _directionalSkyboxLightShadow;
|
||||
locations = _directionalSkyboxLightShadowLocations;
|
||||
} else {
|
||||
program = _directionalAmbientSphereLightShadow;
|
||||
locations = _directionalAmbientSphereLightShadowLocations;
|
||||
}
|
||||
} else {
|
||||
if (keyLight->getAmbientMap()) {
|
||||
program = _directionalSkyboxLight;
|
||||
locations = _directionalSkyboxLightLocations;
|
||||
} else {
|
||||
program = _directionalAmbientSphereLight;
|
||||
locations = _directionalAmbientSphereLightLocations;
|
||||
}
|
||||
}
|
||||
|
||||
if (locations->shadowTransformBuffer >= 0) {
|
||||
batch.setUniformBuffer(locations->shadowTransformBuffer, globalShadow.getBuffer());
|
||||
}
|
||||
batch.setPipeline(program);
|
||||
}
|
||||
|
||||
{ // Setup the global lighting
|
||||
setupKeyLightBatch(batch, locations->lightBufferUnit, SKYBOX_MAP_UNIT);
|
||||
}
|
||||
|
||||
{
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.setViewTransform(Transform());
|
||||
|
||||
glm::vec4 color(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
geometryCache->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, color);
|
||||
}
|
||||
|
||||
if (keyLight->getAmbientMap()) {
|
||||
batch.setResourceTexture(SKYBOX_MAP_UNIT, nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
auto texcoordMat = glm::mat4();
|
||||
/* texcoordMat[0] = glm::vec4(sWidth / 2.0f, 0.0f, 0.0f, sMin + sWidth / 2.0f);
|
||||
texcoordMat[1] = glm::vec4(0.0f, tHeight / 2.0f, 0.0f, tMin + tHeight / 2.0f);
|
||||
*/ texcoordMat[0] = glm::vec4(fetchTexcoordRects[side].z / 2.0f, 0.0f, 0.0f, fetchTexcoordRects[side].x + fetchTexcoordRects[side].z / 2.0f);
|
||||
texcoordMat[1] = glm::vec4(0.0f, fetchTexcoordRects[side].w / 2.0f, 0.0f, fetchTexcoordRects[side].y + fetchTexcoordRects[side].w / 2.0f);
|
||||
texcoordMat[2] = glm::vec4(0.0f, 0.0f, 1.0f, 0.0f);
|
||||
texcoordMat[3] = glm::vec4(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
|
||||
// enlarge the scales slightly to account for tesselation
|
||||
const float SCALE_EXPANSION = 0.05f;
|
||||
|
||||
|
||||
batch.setProjectionTransform(projMats[side]);
|
||||
batch.setViewTransform(viewTransforms[side]);
|
||||
|
||||
// Splat Point lights
|
||||
if (!_pointLights.empty()) {
|
||||
batch.setPipeline(_pointLight);
|
||||
|
||||
batch._glUniformMatrix4fv(_pointLightLocations->texcoordMat, 1, false, reinterpret_cast< const float* >(&texcoordMat));
|
||||
|
||||
for (auto lightID : _pointLights) {
|
||||
auto& light = _allocatedLights[lightID];
|
||||
// IN DEBUG: light->setShowContour(true);
|
||||
batch.setUniformBuffer(_pointLightLocations->lightBufferUnit, light->getSchemaBuffer());
|
||||
|
||||
float expandedRadius = light->getMaximumRadius() * (1.0f + SCALE_EXPANSION);
|
||||
// TODO: We shouldn;t have to do that test and use a different volume geometry for when inside the vlight volume,
|
||||
// we should be able to draw thre same geometry use DepthClamp but for unknown reason it's s not working...
|
||||
if (glm::distance(eyePoint, glm::vec3(light->getPosition())) < expandedRadius + nearRadius) {
|
||||
Transform model;
|
||||
model.setTranslation(glm::vec3(0.0f, 0.0f, -1.0f));
|
||||
batch.setModelTransform(model);
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
|
||||
glm::vec4 color(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, color);
|
||||
|
||||
batch.setProjectionTransform(projMats[side]);
|
||||
batch.setViewTransform(viewTransforms[side]);
|
||||
} else {
|
||||
Transform model;
|
||||
model.setTranslation(glm::vec3(light->getPosition().x, light->getPosition().y, light->getPosition().z));
|
||||
batch.setModelTransform(model.postScale(expandedRadius));
|
||||
batch._glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
geometryCache->renderSphere(batch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Splat spot lights
|
||||
if (!_spotLights.empty()) {
|
||||
batch.setPipeline(_spotLight);
|
||||
|
||||
batch._glUniformMatrix4fv(_spotLightLocations->texcoordMat, 1, false, reinterpret_cast< const float* >(&texcoordMat));
|
||||
|
||||
for (auto lightID : _spotLights) {
|
||||
auto light = _allocatedLights[lightID];
|
||||
// IN DEBUG: light->setShowContour(true);
|
||||
batch.setUniformBuffer(_spotLightLocations->lightBufferUnit, light->getSchemaBuffer());
|
||||
|
||||
auto eyeLightPos = eyePoint - light->getPosition();
|
||||
auto eyeHalfPlaneDistance = glm::dot(eyeLightPos, light->getDirection());
|
||||
|
||||
const float TANGENT_LENGTH_SCALE = 0.666f;
|
||||
glm::vec4 coneParam(light->getSpotAngleCosSin(), TANGENT_LENGTH_SCALE * tanf(0.5f * light->getSpotAngle()), 1.0f);
|
||||
|
||||
float expandedRadius = light->getMaximumRadius() * (1.0f + SCALE_EXPANSION);
|
||||
// TODO: We shouldn;t have to do that test and use a different volume geometry for when inside the vlight volume,
|
||||
// we should be able to draw thre same geometry use DepthClamp but for unknown reason it's s not working...
|
||||
const float OVER_CONSERVATIVE_SCALE = 1.1f;
|
||||
if ((eyeHalfPlaneDistance > -nearRadius) &&
|
||||
(glm::distance(eyePoint, glm::vec3(light->getPosition())) < (expandedRadius * OVER_CONSERVATIVE_SCALE) + nearRadius)) {
|
||||
coneParam.w = 0.0f;
|
||||
batch._glUniform4fv(_spotLightLocations->coneParam, 1, reinterpret_cast< const float* >(&coneParam));
|
||||
|
||||
Transform model;
|
||||
model.setTranslation(glm::vec3(0.0f, 0.0f, -1.0f));
|
||||
batch.setModelTransform(model);
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
|
||||
glm::vec4 color(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, color);
|
||||
|
||||
batch.setProjectionTransform( projMats[side]);
|
||||
batch.setViewTransform(viewTransforms[side]);
|
||||
} else {
|
||||
light->setShowContour(false);
|
||||
coneParam.w = 1.0f;
|
||||
batch._glUniform4fv(_spotLightLocations->coneParam, 1, reinterpret_cast< const float* >(&coneParam));
|
||||
|
||||
Transform model;
|
||||
model.setTranslation(light->getPosition());
|
||||
model.postRotate(light->getOrientation());
|
||||
model.postScale(glm::vec3(expandedRadius, expandedRadius, expandedRadius));
|
||||
|
||||
batch.setModelTransform(model);
|
||||
auto mesh = getSpotLightMesh();
|
||||
|
||||
batch.setIndexBuffer(mesh->getIndexBuffer());
|
||||
batch.setInputBuffer(0, mesh->getVertexBuffer());
|
||||
batch.setInputFormat(mesh->getVertexFormat());
|
||||
|
||||
{
|
||||
auto& part = mesh->getPartBuffer().get<model::Mesh::Part>(0);
|
||||
batch.drawIndexed(model::Mesh::topologyToPrimitive(part._topology), part._numIndices, part._startIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Probably not necessary in the long run because the gpu layer would unbound this texture if used as render target
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_COLOR_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_NORMAL_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_EMISSIVE_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_DEPTH_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, nullptr);
|
||||
batch.setResourceTexture(SHADOW_MAP_UNIT, nullptr);
|
||||
batch.setResourceTexture(SKYBOX_MAP_UNIT, nullptr);
|
||||
|
||||
batch.setUniformBuffer(_directionalLightLocations->deferredTransformBuffer, nullptr);
|
||||
});
|
||||
|
||||
// End of the Lighting pass
|
||||
if (!_pointLights.empty()) {
|
||||
_pointLights.clear();
|
||||
}
|
||||
if (!_spotLights.empty()) {
|
||||
_spotLights.clear();
|
||||
}
|
||||
}
|
||||
|
||||
void DeferredLightingEffect::setupKeyLightBatch(gpu::Batch& batch, int lightBufferUnit, int skyboxCubemapUnit) {
|
||||
PerformanceTimer perfTimer("DLE->setupBatch()");
|
||||
auto keyLight = _allocatedLights[_globalLights.front()];
|
||||
|
@ -522,21 +174,28 @@ static void loadLightProgram(const char* vertSource, const char* fragSource, boo
|
|||
slotBindings.insert(gpu::Shader::Binding(std::string("shadowMap"), SHADOW_MAP_UNIT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), SKYBOX_MAP_UNIT));
|
||||
|
||||
static const int LIGHT_GPU_SLOT = 3;
|
||||
static const int DEFERRED_TRANSFORM_BUFFER_SLOT = 2;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("curvatureMap"), DEFERRED_BUFFER_CURVATURE_UNIT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("diffusedCurvatureMap"), DEFERRED_BUFFER_DIFFUSED_CURVATURE_UNIT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("scatteringLUT"), SCATTERING_LUT_UNIT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("scatteringSpecularBeckmann"), SCATTERING_SPECULAR_UNIT));
|
||||
|
||||
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("deferredFrameTransformBuffer"), DEFERRED_FRAME_TRANSFORM_BUFFER_SLOT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("lightingModelBuffer"), LIGHTING_MODEL_BUFFER_SLOT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("subsurfaceScatteringParametersBuffer"), SCATTERING_PARAMETERS_BUFFER_SLOT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), LIGHT_GPU_SLOT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("deferredTransformBuffer"), DEFERRED_TRANSFORM_BUFFER_SLOT));
|
||||
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
locations->radius = program->getUniforms().findLocation("radius");
|
||||
locations->ambientSphere = program->getUniforms().findLocation("ambientSphere.L00");
|
||||
|
||||
locations->texcoordMat = program->getUniforms().findLocation("texcoordMat");
|
||||
locations->sphereParam = program->getUniforms().findLocation("sphereParam");
|
||||
locations->coneParam = program->getUniforms().findLocation("coneParam");
|
||||
|
||||
locations->lightBufferUnit = program->getBuffers().findLocation("lightBuffer");
|
||||
locations->deferredTransformBuffer = program->getBuffers().findLocation("deferredTransformBuffer");
|
||||
locations->deferredFrameTransformBuffer = program->getBuffers().findLocation("deferredFrameTransformBuffer");
|
||||
locations->subsurfaceScatteringParametersBuffer = program->getBuffers().findLocation("subsurfaceScatteringParametersBuffer");
|
||||
locations->shadowTransformBuffer = program->getBuffers().findLocation("shadowTransformBuffer");
|
||||
|
||||
auto state = std::make_shared<gpu::State>();
|
||||
|
@ -667,3 +326,316 @@ model::MeshPointer DeferredLightingEffect::getSpotLightMesh() {
|
|||
return _spotLightMesh;
|
||||
}
|
||||
|
||||
void PrepareDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
auto args = renderContext->args;
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setStateScissorRect(args->_viewport);
|
||||
|
||||
// Clear Lighting buffer
|
||||
auto lightingFbo = DependencyManager::get<FramebufferCache>()->getLightingFramebuffer();
|
||||
|
||||
batch.setFramebuffer(lightingFbo);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(vec3(0), 0), true);
|
||||
|
||||
// Clear deferred
|
||||
auto deferredFbo = DependencyManager::get<FramebufferCache>()->getDeferredFramebuffer();
|
||||
|
||||
batch.setFramebuffer(deferredFbo);
|
||||
|
||||
// Clear Color, Depth and Stencil for deferred buffer
|
||||
batch.clearFramebuffer(
|
||||
gpu::Framebuffer::BUFFER_COLOR0 | gpu::Framebuffer::BUFFER_COLOR1 | gpu::Framebuffer::BUFFER_COLOR2 |
|
||||
gpu::Framebuffer::BUFFER_DEPTH |
|
||||
gpu::Framebuffer::BUFFER_STENCIL,
|
||||
vec4(vec3(0), 0), 1.0, 0.0, true);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
void RenderDeferredSetup::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
|
||||
const DeferredFrameTransformPointer& frameTransform,
|
||||
const LightingModelPointer& lightingModel,
|
||||
const gpu::TexturePointer& diffusedCurvature2,
|
||||
const SubsurfaceScatteringResourcePointer& subsurfaceScatteringResource) {
|
||||
|
||||
auto args = renderContext->args;
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
|
||||
// Framebuffer copy operations cannot function as multipass stereo operations.
|
||||
batch.enableStereo(false);
|
||||
|
||||
// perform deferred lighting, rendering to free fbo
|
||||
auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
auto deferredLightingEffect = DependencyManager::get<DeferredLightingEffect>();
|
||||
|
||||
// binding the first framebuffer
|
||||
auto lightingFBO = framebufferCache->getLightingFramebuffer();
|
||||
batch.setFramebuffer(lightingFBO);
|
||||
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setStateScissorRect(args->_viewport);
|
||||
|
||||
|
||||
// Bind the G-Buffer surfaces
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_COLOR_UNIT, framebufferCache->getDeferredColorTexture());
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_NORMAL_UNIT, framebufferCache->getDeferredNormalTexture());
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_EMISSIVE_UNIT, framebufferCache->getDeferredSpecularTexture());
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_DEPTH_UNIT, framebufferCache->getPrimaryDepthTexture());
|
||||
|
||||
// FIXME: Different render modes should have different tasks
|
||||
if (args->_renderMode == RenderArgs::DEFAULT_RENDER_MODE && deferredLightingEffect->isAmbientOcclusionEnabled()) {
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, framebufferCache->getOcclusionTexture());
|
||||
} else {
|
||||
// need to assign the white texture if ao is off
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, textureCache->getWhiteTexture());
|
||||
}
|
||||
|
||||
// The Deferred Frame Transform buffer
|
||||
batch.setUniformBuffer(DEFERRED_FRAME_TRANSFORM_BUFFER_SLOT, frameTransform->getFrameTransformBuffer());
|
||||
|
||||
// THe lighting model
|
||||
batch.setUniformBuffer(LIGHTING_MODEL_BUFFER_SLOT, lightingModel->getParametersBuffer());
|
||||
|
||||
// Subsurface scattering specific
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_CURVATURE_UNIT, framebufferCache->getCurvatureTexture());
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_DIFFUSED_CURVATURE_UNIT, diffusedCurvature2);
|
||||
|
||||
batch.setUniformBuffer(SCATTERING_PARAMETERS_BUFFER_SLOT, subsurfaceScatteringResource->getParametersBuffer());
|
||||
|
||||
|
||||
batch.setResourceTexture(SCATTERING_LUT_UNIT, subsurfaceScatteringResource->getScatteringTable());
|
||||
batch.setResourceTexture(SCATTERING_SPECULAR_UNIT, subsurfaceScatteringResource->getScatteringSpecular());
|
||||
|
||||
|
||||
// Global directional light and ambient pass
|
||||
|
||||
assert(deferredLightingEffect->getLightStage().lights.size() > 0);
|
||||
const auto& globalShadow = deferredLightingEffect->getLightStage().lights[0]->shadow;
|
||||
|
||||
// Bind the shadow buffer
|
||||
batch.setResourceTexture(SHADOW_MAP_UNIT, globalShadow.map);
|
||||
|
||||
auto& program = deferredLightingEffect->_shadowMapEnabled ? deferredLightingEffect->_directionalLightShadow : deferredLightingEffect->_directionalLight;
|
||||
LightLocationsPtr locations = deferredLightingEffect->_shadowMapEnabled ? deferredLightingEffect->_directionalLightShadowLocations : deferredLightingEffect->_directionalLightLocations;
|
||||
const auto& keyLight = deferredLightingEffect->_allocatedLights[deferredLightingEffect->_globalLights.front()];
|
||||
|
||||
// Setup the global directional pass pipeline
|
||||
{
|
||||
if (deferredLightingEffect->_shadowMapEnabled) {
|
||||
if (keyLight->getAmbientMap()) {
|
||||
program = deferredLightingEffect->_directionalSkyboxLightShadow;
|
||||
locations = deferredLightingEffect->_directionalSkyboxLightShadowLocations;
|
||||
} else {
|
||||
program = deferredLightingEffect->_directionalAmbientSphereLightShadow;
|
||||
locations = deferredLightingEffect->_directionalAmbientSphereLightShadowLocations;
|
||||
}
|
||||
} else {
|
||||
if (keyLight->getAmbientMap()) {
|
||||
program = deferredLightingEffect->_directionalSkyboxLight;
|
||||
locations = deferredLightingEffect->_directionalSkyboxLightLocations;
|
||||
} else {
|
||||
program = deferredLightingEffect->_directionalAmbientSphereLight;
|
||||
locations = deferredLightingEffect->_directionalAmbientSphereLightLocations;
|
||||
}
|
||||
}
|
||||
|
||||
if (locations->shadowTransformBuffer >= 0) {
|
||||
batch.setUniformBuffer(locations->shadowTransformBuffer, globalShadow.getBuffer());
|
||||
}
|
||||
batch.setPipeline(program);
|
||||
}
|
||||
|
||||
{ // Setup the global lighting
|
||||
deferredLightingEffect->setupKeyLightBatch(batch, locations->lightBufferUnit, SKYBOX_MAP_UNIT);
|
||||
}
|
||||
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
if (keyLight->getAmbientMap()) {
|
||||
batch.setResourceTexture(SKYBOX_MAP_UNIT, nullptr);
|
||||
}
|
||||
batch.setResourceTexture(SHADOW_MAP_UNIT, nullptr);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
void RenderDeferredLocals::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, bool points, bool spots) {
|
||||
if (!points && !spots) {
|
||||
return;
|
||||
}
|
||||
auto args = renderContext->args;
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
|
||||
// THe main viewport is assumed to be the mono viewport (or the 2 stereo faces side by side within that viewport)
|
||||
auto monoViewport = args->_viewport;
|
||||
|
||||
// The view frustum is the mono frustum base
|
||||
auto viewFrustum = args->getViewFrustum();
|
||||
|
||||
// Eval the mono projection
|
||||
mat4 monoProjMat;
|
||||
viewFrustum.evalProjectionMatrix(monoProjMat);
|
||||
|
||||
// The mono view transform
|
||||
Transform monoViewTransform;
|
||||
viewFrustum.evalViewTransform(monoViewTransform);
|
||||
|
||||
// THe mono view matrix coming from the mono view transform
|
||||
glm::mat4 monoViewMat;
|
||||
monoViewTransform.getMatrix(monoViewMat);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
auto eyePoint = viewFrustum.getPosition();
|
||||
float nearRadius = glm::distance(eyePoint, viewFrustum.getNearTopLeft());
|
||||
|
||||
auto deferredLightingEffect = DependencyManager::get<DeferredLightingEffect>();
|
||||
|
||||
// Render in this side's viewport
|
||||
batch.setViewportTransform(monoViewport);
|
||||
batch.setStateScissorRect(monoViewport);
|
||||
|
||||
// enlarge the scales slightly to account for tesselation
|
||||
const float SCALE_EXPANSION = 0.05f;
|
||||
|
||||
|
||||
batch.setProjectionTransform(monoProjMat);
|
||||
batch.setViewTransform(monoViewTransform);
|
||||
|
||||
// Splat Point lights
|
||||
if (points && !deferredLightingEffect->_pointLights.empty()) {
|
||||
// POint light pipeline
|
||||
batch.setPipeline(deferredLightingEffect->_pointLight);
|
||||
|
||||
for (auto lightID : deferredLightingEffect->_pointLights) {
|
||||
auto& light = deferredLightingEffect->_allocatedLights[lightID];
|
||||
// IN DEBUG: light->setShowContour(true);
|
||||
batch.setUniformBuffer(deferredLightingEffect->_pointLightLocations->lightBufferUnit, light->getSchemaBuffer());
|
||||
|
||||
float expandedRadius = light->getMaximumRadius() * (1.0f + SCALE_EXPANSION);
|
||||
glm::vec4 sphereParam(expandedRadius, 0.0f, 0.0f, 1.0f);
|
||||
|
||||
// TODO: We shouldn;t have to do that test and use a different volume geometry for when inside the vlight volume,
|
||||
// we should be able to draw thre same geometry use DepthClamp but for unknown reason it's s not working...
|
||||
if (glm::distance(eyePoint, glm::vec3(light->getPosition())) < expandedRadius + nearRadius) {
|
||||
sphereParam.w = 0.0f;
|
||||
batch._glUniform4fv(deferredLightingEffect->_pointLightLocations->sphereParam, 1, reinterpret_cast< const float* >(&sphereParam));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
} else {
|
||||
sphereParam.w = 1.0f;
|
||||
batch._glUniform4fv(deferredLightingEffect->_pointLightLocations->sphereParam, 1, reinterpret_cast< const float* >(&sphereParam));
|
||||
|
||||
Transform model;
|
||||
model.setTranslation(glm::vec3(light->getPosition().x, light->getPosition().y, light->getPosition().z));
|
||||
batch.setModelTransform(model.postScale(expandedRadius));
|
||||
batch._glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
geometryCache->renderSphere(batch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Splat spot lights
|
||||
if (spots && !deferredLightingEffect->_spotLights.empty()) {
|
||||
// Spot light pipeline
|
||||
batch.setPipeline(deferredLightingEffect->_spotLight);
|
||||
|
||||
// Spot mesh
|
||||
auto mesh = deferredLightingEffect->getSpotLightMesh();
|
||||
batch.setIndexBuffer(mesh->getIndexBuffer());
|
||||
batch.setInputBuffer(0, mesh->getVertexBuffer());
|
||||
batch.setInputFormat(mesh->getVertexFormat());
|
||||
auto& conePart = mesh->getPartBuffer().get<model::Mesh::Part>(0);
|
||||
|
||||
for (auto lightID : deferredLightingEffect->_spotLights) {
|
||||
auto light = deferredLightingEffect->_allocatedLights[lightID];
|
||||
// IN DEBUG:
|
||||
light->setShowContour(true);
|
||||
batch.setUniformBuffer(deferredLightingEffect->_spotLightLocations->lightBufferUnit, light->getSchemaBuffer());
|
||||
|
||||
auto eyeLightPos = eyePoint - light->getPosition();
|
||||
auto eyeHalfPlaneDistance = glm::dot(eyeLightPos, light->getDirection());
|
||||
|
||||
const float TANGENT_LENGTH_SCALE = 0.666f;
|
||||
glm::vec4 coneParam(light->getSpotAngleCosSin(), TANGENT_LENGTH_SCALE * tanf(0.5f * light->getSpotAngle()), 1.0f);
|
||||
|
||||
float expandedRadius = light->getMaximumRadius() * (1.0f + SCALE_EXPANSION);
|
||||
// TODO: We shouldn;t have to do that test and use a different volume geometry for when inside the vlight volume,
|
||||
// we should be able to draw thre same geometry use DepthClamp but for unknown reason it's s not working...
|
||||
const float OVER_CONSERVATIVE_SCALE = 1.1f;
|
||||
if ((eyeHalfPlaneDistance > -nearRadius) &&
|
||||
(glm::distance(eyePoint, glm::vec3(light->getPosition())) < (expandedRadius * OVER_CONSERVATIVE_SCALE) + nearRadius)) {
|
||||
coneParam.w = 0.0f;
|
||||
batch._glUniform4fv(deferredLightingEffect->_spotLightLocations->coneParam, 1, reinterpret_cast< const float* >(&coneParam));
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
} else {
|
||||
coneParam.w = 1.0f;
|
||||
batch._glUniform4fv(deferredLightingEffect->_spotLightLocations->coneParam, 1, reinterpret_cast< const float* >(&coneParam));
|
||||
|
||||
Transform model;
|
||||
model.setTranslation(light->getPosition());
|
||||
model.postRotate(light->getOrientation());
|
||||
model.postScale(glm::vec3(expandedRadius, expandedRadius, expandedRadius));
|
||||
|
||||
batch.setModelTransform(model);
|
||||
|
||||
batch.drawIndexed(model::Mesh::topologyToPrimitive(conePart._topology), conePart._numIndices, conePart._startIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void RenderDeferredCleanup::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
|
||||
auto args = renderContext->args;
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
// Probably not necessary in the long run because the gpu layer would unbound this texture if used as render target
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_COLOR_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_NORMAL_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_EMISSIVE_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_DEPTH_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, nullptr);
|
||||
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_CURVATURE_UNIT, nullptr);
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_DIFFUSED_CURVATURE_UNIT, nullptr);
|
||||
batch.setResourceTexture(SCATTERING_LUT_UNIT, nullptr);
|
||||
batch.setResourceTexture(SCATTERING_SPECULAR_UNIT, nullptr);
|
||||
|
||||
batch.setUniformBuffer(SCATTERING_PARAMETERS_BUFFER_SLOT, nullptr);
|
||||
batch.setUniformBuffer(LIGHTING_MODEL_BUFFER_SLOT, nullptr);
|
||||
batch.setUniformBuffer(DEFERRED_FRAME_TRANSFORM_BUFFER_SLOT, nullptr);
|
||||
});
|
||||
|
||||
auto deferredLightingEffect = DependencyManager::get<DeferredLightingEffect>();
|
||||
|
||||
// End of the Lighting pass
|
||||
if (!deferredLightingEffect->_pointLights.empty()) {
|
||||
deferredLightingEffect->_pointLights.clear();
|
||||
}
|
||||
if (!deferredLightingEffect->_spotLights.empty()) {
|
||||
deferredLightingEffect->_spotLights.clear();
|
||||
}
|
||||
}
|
||||
|
||||
RenderDeferred::RenderDeferred() {
|
||||
|
||||
}
|
||||
|
||||
|
||||
void RenderDeferred::configure(const Config& config) {
|
||||
}
|
||||
|
||||
void RenderDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& inputs) {
|
||||
auto deferredTransform = inputs.get0();
|
||||
auto lightingModel = inputs.get1();
|
||||
auto diffusedCurvature2 = inputs.get2()->getRenderBuffer(0);
|
||||
auto subsurfaceScatteringResource = inputs.get4();
|
||||
|
||||
setupJob.run(sceneContext, renderContext, deferredTransform, lightingModel, diffusedCurvature2, subsurfaceScatteringResource);
|
||||
|
||||
lightsJob.run(sceneContext, renderContext, deferredTransform, lightingModel->isPointLightEnabled(), lightingModel->isSpotLightEnabled());
|
||||
|
||||
cleanupJob.run(sceneContext, renderContext);
|
||||
}
|
||||
|
|
|
@ -21,13 +21,20 @@
|
|||
#include "model/Geometry.h"
|
||||
|
||||
#include "render/Context.h"
|
||||
#include <render/CullTask.h>
|
||||
|
||||
#include "DeferredFrameTransform.h"
|
||||
#include "LightingModel.h"
|
||||
|
||||
#include "LightStage.h"
|
||||
|
||||
#include "SubsurfaceScattering.h"
|
||||
|
||||
class RenderArgs;
|
||||
struct LightLocations;
|
||||
using LightLocationsPtr = std::shared_ptr<LightLocations>;
|
||||
/// Handles deferred lighting for the bits that require it (voxels...)
|
||||
|
||||
// THis is where we currently accumulate the local lights, let s change that sooner than later
|
||||
class DeferredLightingEffect : public Dependency {
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
|
@ -42,9 +49,6 @@ public:
|
|||
void addSpotLight(const glm::vec3& position, float radius, const glm::vec3& color = glm::vec3(1.0f, 1.0f, 1.0f),
|
||||
float intensity = 0.5f, float falloffRadius = 0.01f,
|
||||
const glm::quat& orientation = glm::quat(), float exponent = 0.0f, float cutoff = PI);
|
||||
|
||||
void prepare(RenderArgs* args);
|
||||
void render(const render::RenderContextPointer& renderContext);
|
||||
|
||||
void setupKeyLightBatch(gpu::Batch& batch, int lightBufferUnit, int skyboxCubemapUnit);
|
||||
|
||||
|
@ -95,19 +99,73 @@ private:
|
|||
std::vector<int> _globalLights;
|
||||
std::vector<int> _pointLights;
|
||||
std::vector<int> _spotLights;
|
||||
|
||||
friend class RenderDeferredSetup;
|
||||
friend class RenderDeferredLocals;
|
||||
friend class RenderDeferredCleanup;
|
||||
};
|
||||
|
||||
// Class describing the uniform buffer with all the parameters common to the deferred shaders
|
||||
class DeferredTransform {
|
||||
public:
|
||||
glm::mat4 projection;
|
||||
glm::mat4 viewInverse;
|
||||
float stereoSide { 0.f };
|
||||
float spareA, spareB, spareC;
|
||||
class PrepareDeferred {
|
||||
public:
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
|
||||
DeferredTransform() {}
|
||||
};
|
||||
typedef gpu::BufferView UniformBufferView;
|
||||
UniformBufferView _deferredTransformBuffer[2];
|
||||
using JobModel = render::Job::Model<PrepareDeferred>;
|
||||
};
|
||||
|
||||
class RenderDeferredSetup {
|
||||
public:
|
||||
// using JobModel = render::Job::ModelI<RenderDeferredSetup, DeferredFrameTransformPointer>;
|
||||
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext,
|
||||
const DeferredFrameTransformPointer& frameTransform,
|
||||
const LightingModelPointer& lightingModel,
|
||||
const gpu::TexturePointer& diffusedCurvature2,
|
||||
const SubsurfaceScatteringResourcePointer& subsurfaceScatteringResource);
|
||||
};
|
||||
|
||||
class RenderDeferredLocals {
|
||||
public:
|
||||
using JobModel = render::Job::ModelI<RenderDeferredLocals, DeferredFrameTransformPointer>;
|
||||
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, bool points, bool spots);
|
||||
};
|
||||
|
||||
|
||||
class RenderDeferredCleanup {
|
||||
public:
|
||||
using JobModel = render::Job::Model<RenderDeferredCleanup>;
|
||||
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
};
|
||||
|
||||
|
||||
class RenderDeferredConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
public:
|
||||
RenderDeferredConfig() : render::Job::Config(true) {}
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
|
||||
class RenderDeferred {
|
||||
public:
|
||||
using Inputs = render::VaryingSet5 < DeferredFrameTransformPointer, LightingModelPointer, gpu::FramebufferPointer, gpu::FramebufferPointer, SubsurfaceScatteringResourcePointer>;
|
||||
using Config = RenderDeferredConfig;
|
||||
using JobModel = render::Job::ModelI<RenderDeferred, Inputs, Config>;
|
||||
|
||||
RenderDeferred();
|
||||
|
||||
void configure(const Config& config);
|
||||
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs);
|
||||
|
||||
RenderDeferredSetup setupJob;
|
||||
RenderDeferredLocals lightsJob;
|
||||
RenderDeferredCleanup cleanupJob;
|
||||
|
||||
protected:
|
||||
};
|
||||
|
||||
#endif // hifi_DeferredLightingEffect_h
|
||||
|
|
118
libraries/render-utils/src/DeferredTransform.slh
Normal file
118
libraries/render-utils/src/DeferredTransform.slh
Normal file
|
@ -0,0 +1,118 @@
|
|||
<!
|
||||
// DeferredTransform.slh
|
||||
// libraries/render-utils/src
|
||||
//
|
||||
// Created by Sam Gateau on 6/2/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
!>
|
||||
<@if not DEFERRED_TRANSFORM_SLH@>
|
||||
<@def DEFERRED_TRANSFORM_SLH@>
|
||||
|
||||
<@func declareDeferredFrameTransform()@>
|
||||
|
||||
struct DeferredFrameTransform {
|
||||
vec4 _pixelInfo;
|
||||
vec4 _invPixelInfo;
|
||||
vec4 _depthInfo;
|
||||
vec4 _stereoInfo;
|
||||
mat4 _projection[2];
|
||||
mat4 _projectionMono;
|
||||
mat4 _viewInverse;
|
||||
mat4 _view;
|
||||
};
|
||||
|
||||
uniform deferredFrameTransformBuffer {
|
||||
DeferredFrameTransform frameTransform;
|
||||
};
|
||||
|
||||
DeferredFrameTransform getDeferredFrameTransform() {
|
||||
return frameTransform;
|
||||
}
|
||||
|
||||
vec2 getWidthHeight(int resolutionLevel) {
|
||||
return vec2(ivec2(frameTransform._pixelInfo.zw) >> resolutionLevel);
|
||||
}
|
||||
|
||||
vec2 getInvWidthHeight() {
|
||||
return frameTransform._invPixelInfo.xy;
|
||||
}
|
||||
|
||||
float getProjScaleEye() {
|
||||
return frameTransform._projection[0][1][1];
|
||||
}
|
||||
|
||||
float getProjScale(int resolutionLevel) {
|
||||
return getWidthHeight(resolutionLevel).y * frameTransform._projection[0][1][1] * 0.5;
|
||||
}
|
||||
mat4 getProjection(int side) {
|
||||
return frameTransform._projection[side];
|
||||
}
|
||||
mat4 getProjectionMono() {
|
||||
return frameTransform._projectionMono;
|
||||
}
|
||||
|
||||
// positive near distance of the projection
|
||||
float getProjectionNear() {
|
||||
float planeC = frameTransform._projection[0][2][3] + frameTransform._projection[0][2][2];
|
||||
float planeD = frameTransform._projection[0][3][2];
|
||||
return planeD / planeC;
|
||||
}
|
||||
|
||||
mat4 getViewInverse() {
|
||||
return frameTransform._viewInverse;
|
||||
}
|
||||
|
||||
mat4 getView() {
|
||||
return frameTransform._view;
|
||||
}
|
||||
|
||||
bool isStereo() {
|
||||
return frameTransform._stereoInfo.x > 0.0f;
|
||||
}
|
||||
|
||||
float getStereoSideWidth(int resolutionLevel) {
|
||||
return float(int(frameTransform._stereoInfo.y) >> resolutionLevel);
|
||||
}
|
||||
|
||||
ivec4 getStereoSideInfo(int xPos, int resolutionLevel) {
|
||||
int sideWidth = int(getStereoSideWidth(resolutionLevel));
|
||||
return ivec4(xPos < sideWidth ? ivec2(0, 0) : ivec2(1, sideWidth), sideWidth, isStereo());
|
||||
}
|
||||
|
||||
float evalZeyeFromZdb(float depth) {
|
||||
return frameTransform._depthInfo.x / (depth * frameTransform._depthInfo.y + frameTransform._depthInfo.z);
|
||||
}
|
||||
|
||||
vec3 evalEyeNormal(vec3 C) {
|
||||
//return normalize(cross(dFdy(C), dFdx(C)));
|
||||
return normalize(cross(dFdx(C), dFdy(C)));
|
||||
}
|
||||
|
||||
vec3 evalEyePositionFromZeye(int side, float Zeye, vec2 texcoord) {
|
||||
// compute the view space position using the depth
|
||||
// basically manually pick the proj matrix components to do the inverse
|
||||
float Xe = (-Zeye * (texcoord.x * 2.0 - 1.0) - Zeye * frameTransform._projection[side][2][0] - frameTransform._projection[side][3][0]) / frameTransform._projection[side][0][0];
|
||||
float Ye = (-Zeye * (texcoord.y * 2.0 - 1.0) - Zeye * frameTransform._projection[side][2][1] - frameTransform._projection[side][3][1]) / frameTransform._projection[side][1][1];
|
||||
return vec3(Xe, Ye, Zeye);
|
||||
}
|
||||
|
||||
ivec2 getPixelPosTexcoordPosAndSide(in vec2 glFragCoord, out ivec2 pixelPos, out vec2 texcoordPos, out ivec4 stereoSide) {
|
||||
ivec2 fragPos = ivec2(glFragCoord.xy);
|
||||
|
||||
stereoSide = getStereoSideInfo(fragPos.x, 0);
|
||||
|
||||
pixelPos = fragPos;
|
||||
pixelPos.x -= stereoSide.y;
|
||||
|
||||
texcoordPos = (vec2(pixelPos) + 0.5) * getInvWidthHeight();
|
||||
|
||||
return fragPos;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
<@endif@>
|
|
@ -47,6 +47,8 @@ void FramebufferCache::setFrameBufferSize(QSize frameBufferSize) {
|
|||
_lightingFramebuffer.reset();
|
||||
_depthPyramidFramebuffer.reset();
|
||||
_depthPyramidTexture.reset();
|
||||
_curvatureFramebuffer.reset();
|
||||
_curvatureTexture.reset();
|
||||
_occlusionFramebuffer.reset();
|
||||
_occlusionTexture.reset();
|
||||
_occlusionBlurredFramebuffer.reset();
|
||||
|
@ -105,12 +107,16 @@ void FramebufferCache::createPrimaryFramebuffer() {
|
|||
|
||||
// For AO:
|
||||
auto pointMipSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_POINT);
|
||||
_depthPyramidTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::RGB), width, height, pointMipSampler));
|
||||
_depthPyramidTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::RGB), width, height, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT)));
|
||||
_depthPyramidFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
_depthPyramidFramebuffer->setRenderBuffer(0, _depthPyramidTexture);
|
||||
_depthPyramidFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
|
||||
|
||||
|
||||
|
||||
_curvatureTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, width, height, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT)));
|
||||
_curvatureFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
_curvatureFramebuffer->setRenderBuffer(0, _curvatureTexture);
|
||||
_curvatureFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
|
||||
|
||||
resizeAmbientOcclusionBuffers();
|
||||
}
|
||||
|
||||
|
@ -245,6 +251,20 @@ gpu::TexturePointer FramebufferCache::getDepthPyramidTexture() {
|
|||
return _depthPyramidTexture;
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer FramebufferCache::getCurvatureFramebuffer() {
|
||||
if (!_curvatureFramebuffer) {
|
||||
createPrimaryFramebuffer();
|
||||
}
|
||||
return _curvatureFramebuffer;
|
||||
}
|
||||
|
||||
gpu::TexturePointer FramebufferCache::getCurvatureTexture() {
|
||||
if (!_curvatureTexture) {
|
||||
createPrimaryFramebuffer();
|
||||
}
|
||||
return _curvatureTexture;
|
||||
}
|
||||
|
||||
void FramebufferCache::setAmbientOcclusionResolutionLevel(int level) {
|
||||
const int MAX_AO_RESOLUTION_LEVEL = 4;
|
||||
level = std::max(0, std::min(level, MAX_AO_RESOLUTION_LEVEL));
|
||||
|
|
|
@ -47,6 +47,12 @@ public:
|
|||
gpu::FramebufferPointer getDepthPyramidFramebuffer();
|
||||
gpu::TexturePointer getDepthPyramidTexture();
|
||||
|
||||
gpu::FramebufferPointer getCurvatureFramebuffer();
|
||||
gpu::TexturePointer getCurvatureTexture();
|
||||
|
||||
gpu::FramebufferPointer getScatteringFramebuffer();
|
||||
gpu::TexturePointer getScatteringTexture();
|
||||
|
||||
void setAmbientOcclusionResolutionLevel(int level);
|
||||
gpu::FramebufferPointer getOcclusionFramebuffer();
|
||||
gpu::TexturePointer getOcclusionTexture();
|
||||
|
@ -95,7 +101,10 @@ private:
|
|||
gpu::FramebufferPointer _depthPyramidFramebuffer;
|
||||
gpu::TexturePointer _depthPyramidTexture;
|
||||
|
||||
|
||||
|
||||
gpu::FramebufferPointer _curvatureFramebuffer;
|
||||
gpu::TexturePointer _curvatureTexture;
|
||||
|
||||
gpu::FramebufferPointer _occlusionFramebuffer;
|
||||
gpu::TexturePointer _occlusionTexture;
|
||||
|
||||
|
|
111
libraries/render-utils/src/LightAmbient.slh
Normal file
111
libraries/render-utils/src/LightAmbient.slh
Normal file
|
@ -0,0 +1,111 @@
|
|||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 7/5/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
|
||||
<@func declareSkyboxMap()@>
|
||||
// declareSkyboxMap
|
||||
uniform samplerCube skyboxMap;
|
||||
|
||||
vec4 evalSkyboxLight(vec3 direction, float lod) {
|
||||
// textureQueryLevels is not available until #430, so we require explicit lod
|
||||
// float mipmapLevel = lod * textureQueryLevels(skyboxMap);
|
||||
return textureLod(skyboxMap, direction, lod);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareEvalAmbientSpecularIrradiance(supportAmbientSphere, supportAmbientMap, supportIfAmbientMapElseAmbientSphere)@>
|
||||
|
||||
vec3 fresnelSchlickAmbient(vec3 fresnelColor, vec3 lightDir, vec3 halfDir, float gloss) {
|
||||
return fresnelColor + (max(vec3(gloss), fresnelColor) - fresnelColor) * pow(1.0 - clamp(dot(lightDir, halfDir), 0.0, 1.0), 5);
|
||||
}
|
||||
|
||||
<@if supportAmbientMap@>
|
||||
<$declareSkyboxMap()$>
|
||||
<@endif@>
|
||||
|
||||
vec3 evalAmbientSpecularIrradiance(Light light, vec3 fragEyeDir, vec3 fragNormal, float roughness, vec3 fresnel) {
|
||||
vec3 direction = -reflect(fragEyeDir, fragNormal);
|
||||
vec3 ambientFresnel = fresnelSchlickAmbient(fresnel, fragEyeDir, fragNormal, 1 - roughness);
|
||||
vec3 specularLight;
|
||||
<@if supportIfAmbientMapElseAmbientSphere@>
|
||||
if (getLightHasAmbientMap(light))
|
||||
<@endif@>
|
||||
<@if supportAmbientMap@>
|
||||
{
|
||||
float levels = getLightAmbientMapNumMips(light);
|
||||
float lod = min(floor((roughness)* levels), levels);
|
||||
specularLight = evalSkyboxLight(direction, lod).xyz;
|
||||
}
|
||||
<@endif@>
|
||||
<@if supportIfAmbientMapElseAmbientSphere@>
|
||||
else
|
||||
<@endif@>
|
||||
<@if supportAmbientSphere@>
|
||||
{
|
||||
specularLight = evalSphericalLight(getLightAmbientSphere(light), direction).xyz;
|
||||
}
|
||||
<@endif@>
|
||||
|
||||
return specularLight * ambientFresnel;
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareLightingAmbient(supportAmbientSphere, supportAmbientMap, supportIfAmbientMapElseAmbientSphere, supportScattering)@>
|
||||
|
||||
<$declareEvalAmbientSpecularIrradiance(supportAmbientSphere, supportAmbientMap, supportIfAmbientMapElseAmbientSphere)$>
|
||||
|
||||
void evalLightingAmbient(out vec3 diffuse, out vec3 specular, Light light, vec3 eyeDir, vec3 normal, float roughness, float metallic, vec3 fresnel, vec3 albedo, float obscurance) {
|
||||
// Diffuse from ambient
|
||||
diffuse = (1 - metallic) * albedo * evalSphericalLight(getLightAmbientSphere(light), normal).xyz * obscurance * getLightAmbientIntensity(light);
|
||||
|
||||
// Specular highlight from ambient
|
||||
specular = evalAmbientSpecularIrradiance(light, eyeDir, normal, roughness, fresnel) * obscurance * getLightAmbientIntensity(light);
|
||||
}
|
||||
|
||||
<@if supportScattering@>
|
||||
|
||||
<!<@include SubsurfaceScattering.slh@>!>
|
||||
|
||||
float curvatureAO(in float k) {
|
||||
return 1.0f - (0.0022f * k * k) + (0.0776f * k) + 0.7369;
|
||||
}
|
||||
|
||||
void evalLightingAmbientScattering(out vec3 diffuse, out vec3 specular, Light light,
|
||||
vec3 eyeDir, vec3 normal, float roughness,
|
||||
float metallic, vec3 fresnel, vec3 albedo, float obscurance,
|
||||
float scatttering, vec3 lowNormal, float highCurvature, float lowCurvature) {
|
||||
|
||||
float ambientOcclusion = curvatureAO(lowCurvature * 20.0f) * 0.5f;
|
||||
float ambientOcclusionHF = curvatureAO(highCurvature * 8.0f) * 0.5f;
|
||||
ambientOcclusion = min(ambientOcclusion, ambientOcclusionHF);
|
||||
|
||||
/* if (showCurvature()) {
|
||||
diffuse = vec3(ambientOcclusion);
|
||||
specular = vec3(0.0);
|
||||
return;
|
||||
}*/
|
||||
|
||||
|
||||
// Diffuse from ambient
|
||||
diffuse = ambientOcclusion * albedo * evalSphericalLight(getLightAmbientSphere(light), lowNormal).xyz *getLightAmbientIntensity(light);
|
||||
|
||||
// Specular highlight from ambient
|
||||
// vec3 specularLighting = evalGlobalSpecularIrradiance(light, fragEyeDir, fragNormal, roughness, fresnel);
|
||||
// color += specularLighting;
|
||||
|
||||
|
||||
// Specular highlight from ambient
|
||||
// specular = evalAmbientSpecularIrradiance(light, eyeDir, normal, roughness, fresnel) * obscurance * getLightAmbientIntensity(light);
|
||||
specular = vec3(0.0);
|
||||
}
|
||||
|
||||
<@endif@>
|
||||
|
||||
<@endfunc@>
|
50
libraries/render-utils/src/LightDirectional.slh
Normal file
50
libraries/render-utils/src/LightDirectional.slh
Normal file
|
@ -0,0 +1,50 @@
|
|||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 7/5/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
|
||||
<@func declareLightingDirectional(supportScattering)@>
|
||||
<@include DeferredLighting.slh@>
|
||||
|
||||
void evalLightingDirectional(out vec3 diffuse, out vec3 specular, Light light,
|
||||
vec3 eyeDir, vec3 normal, float roughness,
|
||||
float metallic, vec3 fresnel, vec3 albedo, float shadow) {
|
||||
|
||||
vec4 shading = evalFragShading(normal, -getLightDirection(light), eyeDir, metallic, fresnel, roughness);
|
||||
|
||||
diffuse = albedo * shading.w * shadow * getLightColor(light) * getLightIntensity(light);
|
||||
|
||||
specular = shading.rgb * shadow * getLightColor(light) * getLightIntensity(light);
|
||||
}
|
||||
|
||||
<@if supportScattering@>
|
||||
|
||||
void evalLightingDirectionalScattering(out vec3 diffuse, out vec3 specular, Light light,
|
||||
vec3 eyeDir, vec3 normal, float roughness,
|
||||
float metallic, vec3 fresnel, vec3 albedo, float shadow,
|
||||
float scattering, vec3 midNormal, vec3 lowNormal, float curvature) {
|
||||
|
||||
vec3 fragLightDir = -normalize(getLightDirection(light));
|
||||
|
||||
evalFragShading(diffuse, specular,
|
||||
normal, fragLightDir, eyeDir,
|
||||
metallic, fresnel, roughness,
|
||||
scattering, vec4(midNormal, curvature), vec4(lowNormal, curvature));
|
||||
|
||||
vec3 lightEnergy = shadow * getLightColor(light) * getLightIntensity(light);
|
||||
|
||||
diffuse *= albedo * lightEnergy;
|
||||
|
||||
specular *= lightEnergy;
|
||||
}
|
||||
|
||||
<@endif@>
|
||||
|
||||
<@endfunc@>
|
||||
|
87
libraries/render-utils/src/LightPoint.slh
Normal file
87
libraries/render-utils/src/LightPoint.slh
Normal file
|
@ -0,0 +1,87 @@
|
|||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 7/5/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
|
||||
<@func declareLightingPoint(supportScattering)@>
|
||||
|
||||
|
||||
<@include DeferredLighting.slh@>
|
||||
|
||||
void evalLightingPoint(out vec3 diffuse, out vec3 specular, Light light,
|
||||
vec3 fragLightVec, vec3 fragEyeDir, vec3 normal, float roughness,
|
||||
float metallic, vec3 fresnel, vec3 albedo, float shadow) {
|
||||
|
||||
// Allright we re valid in the volume
|
||||
float fragLightDistance = length(fragLightVec);
|
||||
vec3 fragLightDir = fragLightVec / fragLightDistance;
|
||||
|
||||
// Eval attenuation
|
||||
float radialAttenuation = evalLightAttenuation(light, fragLightDistance);
|
||||
|
||||
vec3 lightEnergy = radialAttenuation * shadow * getLightColor(light) * getLightIntensity(light);
|
||||
|
||||
// Eval shading
|
||||
vec4 shading = evalFragShading(normal, fragLightDir, fragEyeDir, metallic, fresnel, roughness);
|
||||
|
||||
diffuse = albedo * shading.w * lightEnergy;
|
||||
|
||||
specular = shading.rgb * lightEnergy;
|
||||
|
||||
if (getLightShowContour(light) > 0.0) {
|
||||
// Show edge
|
||||
float edge = abs(2.0 * ((getLightRadius(light) - fragLightDistance) / (0.1)) - 1.0);
|
||||
if (edge < 1) {
|
||||
float edgeCoord = exp2(-8.0*edge*edge);
|
||||
diffuse = vec3(edgeCoord * edgeCoord * getLightShowContour(light) * getLightColor(light));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
<@if supportScattering@>
|
||||
|
||||
void evalLightingPointScattering(out vec3 diffuse, out vec3 specular, Light light,
|
||||
vec3 fragLightVec, vec3 fragEyeDir, vec3 normal, float roughness,
|
||||
float metallic, vec3 fresnel, vec3 albedo, float shadow,
|
||||
float scattering, vec3 midNormal, vec3 lowNormal, float curvature) {
|
||||
|
||||
// Allright we re valid in the volume
|
||||
float fragLightDistance = length(fragLightVec);
|
||||
vec3 fragLightDir = fragLightVec / fragLightDistance;
|
||||
|
||||
// Eval attenuation
|
||||
float radialAttenuation = evalLightAttenuation(light, fragLightDistance);
|
||||
|
||||
vec3 lightEnergy = radialAttenuation * shadow * getLightColor(light) * getLightIntensity(light);
|
||||
|
||||
// Eval shading
|
||||
evalFragShading(diffuse, specular,
|
||||
normal, fragLightDir, fragEyeDir,
|
||||
metallic, fresnel, roughness,
|
||||
scattering, vec4(midNormal, curvature), vec4(lowNormal, curvature));
|
||||
|
||||
diffuse *= albedo * lightEnergy;
|
||||
|
||||
specular *= lightEnergy;
|
||||
|
||||
if (getLightShowContour(light) > 0.0) {
|
||||
// Show edge
|
||||
float edge = abs(2.0 * ((getLightRadius(light) - fragLightDistance) / (0.1)) - 1.0);
|
||||
if (edge < 1) {
|
||||
float edgeCoord = exp2(-8.0*edge*edge);
|
||||
diffuse = vec3(edgeCoord * edgeCoord * getLightShowContour(light) * getLightColor(light));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
<@endif@>
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
90
libraries/render-utils/src/LightSpot.slh
Normal file
90
libraries/render-utils/src/LightSpot.slh
Normal file
|
@ -0,0 +1,90 @@
|
|||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 7/5/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
|
||||
<@func declareLightingSpot(supportScattering)@>
|
||||
|
||||
void evalLightingSpot(out vec3 diffuse, out vec3 specular, Light light,
|
||||
vec4 fragLightDirLen, float cosSpotAngle, vec3 fragEyeDir, vec3 normal, float roughness,
|
||||
float metallic, vec3 fresnel, vec3 albedo, float shadow) {
|
||||
|
||||
// Allright we re valid in the volume
|
||||
float fragLightDistance = fragLightDirLen.w;
|
||||
vec3 fragLightDir = fragLightDirLen.xyz;
|
||||
|
||||
// Eval attenuation
|
||||
float radialAttenuation = evalLightAttenuation(light, fragLightDistance);
|
||||
float angularAttenuation = evalLightSpotAttenuation(light, cosSpotAngle);
|
||||
vec3 lightEnergy = angularAttenuation * radialAttenuation * shadow * getLightColor(light) * getLightIntensity(light);
|
||||
|
||||
// Eval shading
|
||||
vec4 shading = evalFragShading(normal, fragLightDir, fragEyeDir, metallic, fresnel, roughness);
|
||||
|
||||
diffuse = albedo * shading.w * lightEnergy;
|
||||
|
||||
specular = shading.rgb * lightEnergy;
|
||||
|
||||
if (getLightShowContour(light) > 0.0) {
|
||||
// Show edges
|
||||
float edgeDistR = (getLightRadius(light) - fragLightDistance);
|
||||
float edgeDistS = dot(fragLightDistance * vec2(cosSpotAngle, sqrt(1.0 - cosSpotAngle * cosSpotAngle)), -getLightSpotOutsideNormal2(light));
|
||||
float edgeDist = min(edgeDistR, edgeDistS);
|
||||
float edge = abs(2.0 * (edgeDist / (0.1)) - 1.0);
|
||||
if (edge < 1) {
|
||||
float edgeCoord = exp2(-8.0*edge*edge);
|
||||
diffuse = vec3(edgeCoord * edgeCoord * getLightColor(light));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
<@if supportScattering@>
|
||||
|
||||
void evalLightingSpotScattering(out vec3 diffuse, out vec3 specular, Light light,
|
||||
vec4 fragLightDirLen, float cosSpotAngle, vec3 fragEyeDir, vec3 normal, float roughness,
|
||||
float metallic, vec3 fresnel, vec3 albedo, float shadow,
|
||||
float scattering, vec3 midNormal, vec3 lowNormal, float curvature) {
|
||||
|
||||
// Allright we re valid in the volume
|
||||
float fragLightDistance = fragLightDirLen.w;
|
||||
vec3 fragLightDir = fragLightDirLen.xyz;
|
||||
|
||||
// Eval attenuation
|
||||
float radialAttenuation = evalLightAttenuation(light, fragLightDistance);
|
||||
float angularAttenuation = evalLightSpotAttenuation(light, cosSpotAngle);
|
||||
vec3 lightEnergy = angularAttenuation * radialAttenuation * shadow * getLightColor(light) * getLightIntensity(light);
|
||||
|
||||
// Eval shading
|
||||
evalFragShading(diffuse, specular,
|
||||
normal, fragLightDir, fragEyeDir,
|
||||
metallic, fresnel, roughness,
|
||||
scattering, vec4(midNormal, curvature), vec4(lowNormal, curvature));
|
||||
|
||||
diffuse *= albedo * lightEnergy;
|
||||
|
||||
specular *= lightEnergy;
|
||||
|
||||
if (getLightShowContour(light) > 0.0) {
|
||||
// Show edges
|
||||
float edgeDistR = (getLightRadius(light) - fragLightDistance);
|
||||
float edgeDistS = dot(fragLightDistance * vec2(cosSpotAngle, sqrt(1.0 - cosSpotAngle * cosSpotAngle)), -getLightSpotOutsideNormal2(light));
|
||||
float edgeDist = min(edgeDistR, edgeDistS);
|
||||
float edge = abs(2.0 * (edgeDist / (0.1)) - 1.0);
|
||||
if (edge < 1) {
|
||||
float edgeCoord = exp2(-8.0*edge*edge);
|
||||
diffuse = vec3(edgeCoord * edgeCoord * getLightColor(light));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
<@endif@>
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
|
@ -88,8 +88,9 @@ const glm::mat4& LightStage::Shadow::getProjection() const {
|
|||
}
|
||||
|
||||
const LightStage::LightPointer LightStage::addLight(model::LightPointer light) {
|
||||
Shadow stageShadow{light};
|
||||
LightPointer stageLight = std::make_shared<Light>(std::move(stageShadow));
|
||||
// Shadow stageShadow{light};
|
||||
LightPointer stageLight = std::make_shared<Light>(Shadow(light));
|
||||
stageLight->light = light;
|
||||
lights.push_back(stageLight);
|
||||
return stageLight;
|
||||
}
|
||||
|
|
|
@ -52,6 +52,8 @@ public:
|
|||
glm::float32 scale = 1 / MAP_SIZE;
|
||||
};
|
||||
UniformBufferView _schemaBuffer = nullptr;
|
||||
|
||||
friend class Light;
|
||||
};
|
||||
using ShadowPointer = std::shared_ptr<Shadow>;
|
||||
|
||||
|
|
142
libraries/render-utils/src/LightingModel.cpp
Normal file
142
libraries/render-utils/src/LightingModel.cpp
Normal file
|
@ -0,0 +1,142 @@
|
|||
//
|
||||
// LightingModel.cpp
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Sam Gateau 7/1/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "LightingModel.h"
|
||||
|
||||
LightingModel::LightingModel() {
|
||||
Parameters parameters;
|
||||
_parametersBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Parameters), (const gpu::Byte*) ¶meters));
|
||||
}
|
||||
|
||||
void LightingModel::setUnlit(bool enable) {
|
||||
if (enable != isUnlitEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableUnlit = (float) enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isUnlitEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableUnlit;
|
||||
}
|
||||
|
||||
void LightingModel::setShaded(bool enable) {
|
||||
if (enable != isShadedEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableShaded = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isShadedEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableShaded;
|
||||
}
|
||||
|
||||
void LightingModel::setEmissive(bool enable) {
|
||||
if (enable != isEmissiveEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableEmissive = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isEmissiveEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableEmissive;
|
||||
}
|
||||
void LightingModel::setLightmap(bool enable) {
|
||||
if (enable != isLightmapEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableLightmap = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isLightmapEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableLightmap;
|
||||
}
|
||||
|
||||
void LightingModel::setScattering(bool enable) {
|
||||
if (enable != isScatteringEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableScattering = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isScatteringEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableScattering;
|
||||
}
|
||||
|
||||
void LightingModel::setDiffuse(bool enable) {
|
||||
if (enable != isDiffuseEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableDiffuse = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isDiffuseEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableDiffuse;
|
||||
}
|
||||
void LightingModel::setSpecular(bool enable) {
|
||||
if (enable != isSpecularEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableSpecular = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isSpecularEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableSpecular;
|
||||
}
|
||||
|
||||
void LightingModel::setAmbientLight(bool enable) {
|
||||
if (enable != isAmbientLightEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableAmbientLight = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isAmbientLightEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableAmbientLight;
|
||||
}
|
||||
void LightingModel::setDirectionalLight(bool enable) {
|
||||
if (enable != isDirectionalLightEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableDirectionalLight = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isDirectionalLightEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableDirectionalLight;
|
||||
}
|
||||
void LightingModel::setPointLight(bool enable) {
|
||||
if (enable != isPointLightEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enablePointLight = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isPointLightEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enablePointLight;
|
||||
}
|
||||
void LightingModel::setSpotLight(bool enable) {
|
||||
if (enable != isSpotLightEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().enableSpotLight = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isSpotLightEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().enableSpotLight;
|
||||
}
|
||||
void LightingModel::setShowLightContour(bool enable) {
|
||||
if (enable != isShowLightContourEnabled()) {
|
||||
_parametersBuffer.edit<Parameters>().showLightContour = (float)enable;
|
||||
}
|
||||
}
|
||||
bool LightingModel::isShowLightContourEnabled() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().showLightContour;
|
||||
}
|
||||
|
||||
MakeLightingModel::MakeLightingModel() {
|
||||
_lightingModel = std::make_shared<LightingModel>();
|
||||
}
|
||||
|
||||
void MakeLightingModel::configure(const Config& config) {
|
||||
_lightingModel->setUnlit(config.enableUnlit);
|
||||
_lightingModel->setShaded(config.enableShaded);
|
||||
_lightingModel->setEmissive(config.enableEmissive);
|
||||
_lightingModel->setLightmap(config.enableLightmap);
|
||||
_lightingModel->setScattering(config.enableScattering);
|
||||
_lightingModel->setDiffuse(config.enableDiffuse);
|
||||
_lightingModel->setSpecular(config.enableSpecular);
|
||||
_lightingModel->setAmbientLight(config.enableAmbientLight);
|
||||
_lightingModel->setDirectionalLight(config.enableDirectionalLight);
|
||||
_lightingModel->setPointLight(config.enablePointLight);
|
||||
_lightingModel->setSpotLight(config.enableSpotLight);
|
||||
_lightingModel->setShowLightContour(config.showLightContour);
|
||||
}
|
||||
|
||||
void MakeLightingModel::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, LightingModelPointer& lightingModel) {
|
||||
|
||||
lightingModel = _lightingModel;
|
||||
}
|
151
libraries/render-utils/src/LightingModel.h
Normal file
151
libraries/render-utils/src/LightingModel.h
Normal file
|
@ -0,0 +1,151 @@
|
|||
//
|
||||
// LightingModel.h
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Sam Gateau 7/1/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_LightingModel_h
|
||||
#define hifi_LightingModel_h
|
||||
|
||||
#include "gpu/Resource.h"
|
||||
#include "render/DrawTask.h"
|
||||
|
||||
class RenderArgs;
|
||||
|
||||
// LightingModel is a helper class gathering in one place the flags to enable the lighting contributions
|
||||
class LightingModel {
|
||||
public:
|
||||
using UniformBufferView = gpu::BufferView;
|
||||
|
||||
LightingModel();
|
||||
|
||||
|
||||
void setUnlit(bool enable);
|
||||
bool isUnlitEnabled() const;
|
||||
void setShaded(bool enable);
|
||||
bool isShadedEnabled() const;
|
||||
|
||||
void setEmissive(bool enable);
|
||||
bool isEmissiveEnabled() const;
|
||||
void setLightmap(bool enable);
|
||||
bool isLightmapEnabled() const;
|
||||
|
||||
void setScattering(bool enable);
|
||||
bool isScatteringEnabled() const;
|
||||
|
||||
void setDiffuse(bool enable);
|
||||
bool isDiffuseEnabled() const;
|
||||
void setSpecular(bool enable);
|
||||
bool isSpecularEnabled() const;
|
||||
|
||||
void setAmbientLight(bool enable);
|
||||
bool isAmbientLightEnabled() const;
|
||||
void setDirectionalLight(bool enable);
|
||||
bool isDirectionalLightEnabled() const;
|
||||
void setPointLight(bool enable);
|
||||
bool isPointLightEnabled() const;
|
||||
void setSpotLight(bool enable);
|
||||
bool isSpotLightEnabled() const;
|
||||
|
||||
void setShowLightContour(bool enable);
|
||||
bool isShowLightContourEnabled() const;
|
||||
|
||||
UniformBufferView getParametersBuffer() const { return _parametersBuffer; }
|
||||
|
||||
protected:
|
||||
|
||||
|
||||
// Class describing the uniform buffer with the transform info common to the AO shaders
|
||||
// It s changing every frame
|
||||
class Parameters {
|
||||
public:
|
||||
float enableUnlit{ 1.0f };
|
||||
float enableShaded{ 1.0f };
|
||||
float enableEmissive{ 1.0f };
|
||||
float enableLightmap{ 1.0f };
|
||||
|
||||
float enableScattering{ 1.0f };
|
||||
float enableDiffuse{ 1.0f };
|
||||
float enableSpecular{ 1.0f };
|
||||
float spare;
|
||||
|
||||
float enableAmbientLight{ 1.0f };
|
||||
float enableDirectionalLight{ 1.0f };
|
||||
float enablePointLight{ 1.0f };
|
||||
float enableSpotLight{ 1.0f };
|
||||
|
||||
float showLightContour{ 1.0f };
|
||||
glm::vec3 spares{ 0.0f };
|
||||
|
||||
Parameters() {}
|
||||
};
|
||||
UniformBufferView _parametersBuffer;
|
||||
};
|
||||
|
||||
using LightingModelPointer = std::shared_ptr<LightingModel>;
|
||||
|
||||
|
||||
|
||||
|
||||
class MakeLightingModelConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(bool enableUnlit MEMBER enableUnlit NOTIFY dirty)
|
||||
Q_PROPERTY(bool enableShaded MEMBER enableShaded NOTIFY dirty)
|
||||
Q_PROPERTY(bool enableEmissive MEMBER enableEmissive NOTIFY dirty)
|
||||
Q_PROPERTY(bool enableLightmap MEMBER enableLightmap NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(bool enableScattering MEMBER enableScattering NOTIFY dirty)
|
||||
Q_PROPERTY(bool enableDiffuse MEMBER enableDiffuse NOTIFY dirty)
|
||||
Q_PROPERTY(bool enableSpecular MEMBER enableSpecular NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(bool enableAmbientLight MEMBER enableAmbientLight NOTIFY dirty)
|
||||
Q_PROPERTY(bool enableDirectionalLight MEMBER enableDirectionalLight NOTIFY dirty)
|
||||
Q_PROPERTY(bool enablePointLight MEMBER enablePointLight NOTIFY dirty)
|
||||
Q_PROPERTY(bool enableSpotLight MEMBER enableSpotLight NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(bool showLightContour MEMBER showLightContour NOTIFY dirty)
|
||||
|
||||
public:
|
||||
MakeLightingModelConfig() : render::Job::Config() {} // Make Lighting Model is always on
|
||||
|
||||
bool enableUnlit{ true };
|
||||
bool enableShaded{ true };
|
||||
bool enableEmissive{ true };
|
||||
bool enableLightmap{ true };
|
||||
|
||||
bool enableScattering{ true };
|
||||
bool enableDiffuse{ true };
|
||||
bool enableSpecular{ true };
|
||||
|
||||
bool enableAmbientLight{ true };
|
||||
bool enableDirectionalLight{ true };
|
||||
bool enablePointLight{ true };
|
||||
bool enableSpotLight{ true };
|
||||
|
||||
bool showLightContour{ true };
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class MakeLightingModel {
|
||||
public:
|
||||
using Config = MakeLightingModelConfig;
|
||||
using JobModel = render::Job::ModelO<MakeLightingModel, LightingModelPointer, Config>;
|
||||
|
||||
MakeLightingModel();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, LightingModelPointer& lightingModel);
|
||||
|
||||
private:
|
||||
LightingModelPointer _lightingModel;
|
||||
};
|
||||
|
||||
#endif // hifi_SurfaceGeometryPass_h
|
179
libraries/render-utils/src/LightingModel.slh
Normal file
179
libraries/render-utils/src/LightingModel.slh
Normal file
|
@ -0,0 +1,179 @@
|
|||
<!
|
||||
// LightingModel.slh
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 1/25/14.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
!>
|
||||
<@if not LIGHTING_MODEL_SLH@>
|
||||
<@def LIGHTING_MODEL_SLH@>
|
||||
|
||||
<@func declareLightingModel()@>
|
||||
|
||||
struct LightingModel {
|
||||
vec4 _UnlitShadedEmissiveLightmap;
|
||||
vec4 _ScatteringDiffuseSpecular;
|
||||
vec4 _AmbientDirectionalPointSpot;
|
||||
vec4 _ShowContour;
|
||||
};
|
||||
|
||||
uniform lightingModelBuffer {
|
||||
LightingModel lightingModel;
|
||||
};
|
||||
|
||||
float isUnlitEnabled() {
|
||||
return lightingModel._UnlitShadedEmissiveLightmap.x;
|
||||
}
|
||||
float isShadedEnabled() {
|
||||
return lightingModel._UnlitShadedEmissiveLightmap.y;
|
||||
}
|
||||
float isEmissiveEnabled() {
|
||||
return lightingModel._UnlitShadedEmissiveLightmap.z;
|
||||
}
|
||||
float isLightmapEnabled() {
|
||||
return lightingModel._UnlitShadedEmissiveLightmap.w;
|
||||
}
|
||||
|
||||
float isScatteringEnabled() {
|
||||
return lightingModel._ScatteringDiffuseSpecular.x;
|
||||
}
|
||||
float isDiffuseEnabled() {
|
||||
return lightingModel._ScatteringDiffuseSpecular.y;
|
||||
}
|
||||
float isSpecularEnabled() {
|
||||
return lightingModel._ScatteringDiffuseSpecular.z;
|
||||
}
|
||||
|
||||
float isAmbientEnabled() {
|
||||
return lightingModel._AmbientDirectionalPointSpot.x;
|
||||
}
|
||||
float isDirectionalEnabled() {
|
||||
return lightingModel._AmbientDirectionalPointSpot.y;
|
||||
}
|
||||
float isPointEnabled() {
|
||||
return lightingModel._AmbientDirectionalPointSpot.z;
|
||||
}
|
||||
float isSpotEnabled() {
|
||||
return lightingModel._AmbientDirectionalPointSpot.w;
|
||||
}
|
||||
|
||||
float isShowContour() {
|
||||
return lightingModel._ShowContour.x;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareBeckmannSpecular()@>
|
||||
|
||||
uniform sampler2D scatteringSpecularBeckmann;
|
||||
|
||||
float fetchSpecularBeckmann(float ndoth, float roughness) {
|
||||
return pow(2.0 * texture(scatteringSpecularBeckmann, vec2(ndoth, roughness)).r, 10.0);
|
||||
}
|
||||
|
||||
float fresnelSchlickScalar(float fresnelColor, vec3 lightDir, vec3 halfDir) {
|
||||
float base = 1.0 - clamp(dot(lightDir, halfDir), 0.0, 1.0);
|
||||
float exponential = pow(base, 5.0);
|
||||
return (exponential)+fresnelColor * (1.0 - exponential);
|
||||
}
|
||||
|
||||
vec2 skinSpecular(vec3 N, vec3 L, vec3 V, float roughness, float intensity) {
|
||||
vec2 result = vec2(0.0, 1.0);
|
||||
float ndotl = dot(N, L);
|
||||
if (ndotl > 0.0) {
|
||||
vec3 h = L + V;
|
||||
vec3 H = normalize(h);
|
||||
float ndoth = dot(N, H);
|
||||
float PH = fetchSpecularBeckmann(ndoth, roughness);
|
||||
float F = fresnelSchlickScalar(0.028, H, V);
|
||||
float frSpec = max(PH * F / dot(h, h), 0.0);
|
||||
result.x = ndotl * intensity * frSpec;
|
||||
result.y -= F;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareEvalPBRShading()@>
|
||||
|
||||
vec3 fresnelSchlickColor(vec3 fresnelColor, vec3 lightDir, vec3 halfDir) {
|
||||
float base = 1.0 - clamp(dot(lightDir, halfDir), 0.0, 1.0);
|
||||
float exponential = pow(base, 5.0);
|
||||
return vec3(exponential) + fresnelColor * (1.0 - exponential);
|
||||
}
|
||||
|
||||
|
||||
|
||||
float specularDistribution(float roughness, vec3 normal, vec3 halfDir) {
|
||||
float ndoth = clamp(dot(halfDir, normal), 0.0, 1.0);
|
||||
float gloss2 = pow(0.001 + roughness, 4);
|
||||
float denom = (ndoth * ndoth*(gloss2 - 1) + 1);
|
||||
float power = gloss2 / (3.14159 * denom * denom);
|
||||
return power;
|
||||
}
|
||||
<! //NOTE: ANother implementation for specularDistribution
|
||||
float specularDistribution(float roughness, vec3 normal, vec3 halfDir) {
|
||||
float gloss = exp2(10 * (1.0 - roughness) + 1);
|
||||
float power = pow(clamp(dot(halfDir, normal), 0.0, 1.0), gloss);
|
||||
power *= (gloss * 0.125 + 0.25);
|
||||
return power;
|
||||
}
|
||||
!>
|
||||
// Frag Shading returns the diffuse amount as W and the specular rgb as xyz
|
||||
vec4 evalPBRShading(vec3 fragNormal, vec3 fragLightDir, vec3 fragEyeDir, float metallic, vec3 fresnel, float roughness) {
|
||||
// Diffuse Lighting
|
||||
float diffuse = clamp(dot(fragNormal, fragLightDir), 0.0, 1.0);
|
||||
|
||||
// Specular Lighting
|
||||
vec3 halfDir = normalize(fragEyeDir + fragLightDir);
|
||||
vec3 fresnelColor = fresnelSchlickColor(fresnel, fragLightDir, halfDir);
|
||||
float power = specularDistribution(roughness, fragNormal, halfDir);
|
||||
vec3 specular = power * fresnelColor * diffuse;
|
||||
|
||||
return vec4(specular, (1.0 - metallic) * diffuse * (1 - fresnelColor.x));
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
|
||||
<$declareEvalPBRShading()$>
|
||||
|
||||
// Return xyz the specular/reflection component and w the diffuse component
|
||||
vec4 evalFragShading(vec3 fragNormal, vec3 fragLightDir, vec3 fragEyeDir, float metallic, vec3 fresnel, float roughness) {
|
||||
return evalPBRShading(fragNormal, fragLightDir, fragEyeDir, metallic, fresnel, roughness);
|
||||
}
|
||||
|
||||
|
||||
<$declareBeckmannSpecular()$>
|
||||
<@include SubsurfaceScattering.slh@>
|
||||
<$declareSubsurfaceScatteringBRDF()$>
|
||||
|
||||
void evalFragShading(out vec3 diffuse, out vec3 specular,
|
||||
vec3 fragNormal, vec3 fragLightDir, vec3 fragEyeDir,
|
||||
float metallic, vec3 fresnel, float roughness,
|
||||
float scattering, vec4 midNormalCurvature, vec4 lowNormalCurvature) {
|
||||
if (scattering > 0.0) {
|
||||
vec3 brdf = evalSkinBRDF(fragLightDir, fragNormal, midNormalCurvature.xyz, lowNormalCurvature.xyz, lowNormalCurvature.w);
|
||||
float NdotL = clamp(dot(fragNormal, fragLightDir), 0.0, 1.0);
|
||||
diffuse = mix(vec3(NdotL), brdf, scattering);
|
||||
|
||||
// Specular Lighting
|
||||
vec3 halfDir = normalize(fragEyeDir + fragLightDir);
|
||||
vec2 specularBrdf = skinSpecular(fragNormal, fragLightDir, fragEyeDir, roughness, 1.0);
|
||||
|
||||
diffuse *= specularBrdf.y;
|
||||
specular = vec3(specularBrdf.x);
|
||||
|
||||
} else {
|
||||
vec4 shading = evalPBRShading(fragNormal, fragLightDir, fragEyeDir, metallic, specular, roughness);
|
||||
diffuse = vec3(shading.w);
|
||||
specular = shading.xyz;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
<@endif@>
|
|
@ -44,7 +44,7 @@ TexMapArray getTexMapArray() {
|
|||
<@endfunc@>
|
||||
|
||||
|
||||
<@func declareMaterialTextures(withAlbedo, withRoughness, withNormal, withMetallic, withEmissive, withOcclusion)@>
|
||||
<@func declareMaterialTextures(withAlbedo, withRoughness, withNormal, withMetallic, withEmissive, withOcclusion, withScattering)@>
|
||||
|
||||
<@if withAlbedo@>
|
||||
uniform sampler2D albedoMap;
|
||||
|
@ -87,10 +87,20 @@ float fetchOcclusionMap(vec2 uv) {
|
|||
return texture(occlusionMap, uv).r;
|
||||
}
|
||||
<@endif@>
|
||||
|
||||
<@if withScattering@>
|
||||
uniform sampler2D scatteringMap;
|
||||
float fetchScatteringMap(vec2 uv) {
|
||||
float scattering = texture(scatteringMap, uv).r; // boolean scattering for now
|
||||
return max(((scattering - 0.1) / 0.9), 0.0);
|
||||
return texture(scatteringMap, uv).r; // boolean scattering for now
|
||||
}
|
||||
<@endif@>
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
<@func fetchMaterialTexturesCoord0(matKey, texcoord0, albedo, roughness, normal, metallic, emissive)@>
|
||||
<@func fetchMaterialTexturesCoord0(matKey, texcoord0, albedo, roughness, normal, metallic, emissive, scattering)@>
|
||||
<@if albedo@>
|
||||
vec4 <$albedo$> = (((<$matKey$> & (ALBEDO_MAP_BIT | OPACITY_MASK_MAP_BIT | OPACITY_TRANSLUCENT_MAP_BIT)) != 0) ? fetchAlbedoMap(<$texcoord0$>) : vec4(1.0));
|
||||
<@endif@>
|
||||
|
@ -106,6 +116,9 @@ float fetchOcclusionMap(vec2 uv) {
|
|||
<@if emissive@>
|
||||
vec3 <$emissive$> = (((<$matKey$> & EMISSIVE_MAP_BIT) != 0) ? fetchEmissiveMap(<$texcoord0$>) : vec3(0.0));
|
||||
<@endif@>
|
||||
<@if scattering@>
|
||||
float <$scattering$> = (((<$matKey$> & SCATTERING_MAP_BIT) != 0) ? fetchScatteringMap(<$texcoord0$>) : 0.0);
|
||||
<@endif@>
|
||||
<@endfunc@>
|
||||
|
||||
<@func fetchMaterialTexturesCoord1(matKey, texcoord1, occlusion, lightmapVal)@>
|
||||
|
@ -191,4 +204,10 @@ vec3 fetchLightmapMap(vec2 uv) {
|
|||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func evalMaterialScattering(fetchedScattering, materialScattering, matKey, scattering)@>
|
||||
{
|
||||
<$scattering$> = (((<$matKey$> & SCATTERING_MAP_BIT) != 0) ? <$fetchedScattering$> : <$materialScattering$>);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@endif@>
|
|
@ -217,6 +217,20 @@ void MeshPartPayload::bindMaterial(gpu::Batch& batch, const ShapePipeline::Locat
|
|||
batch.setResourceTexture(ShapePipeline::Slot::MAP::OCCLUSION, nullptr);
|
||||
}
|
||||
|
||||
// Scattering map
|
||||
if (materialKey.isScatteringMap()) {
|
||||
auto scatteringMap = textureMaps[model::MaterialKey::SCATTERING_MAP];
|
||||
if (scatteringMap && scatteringMap->isDefined()) {
|
||||
batch.setResourceTexture(ShapePipeline::Slot::MAP::SCATTERING, scatteringMap->getTextureView());
|
||||
|
||||
// texcoord are assumed to be the same has albedo
|
||||
} else {
|
||||
batch.setResourceTexture(ShapePipeline::Slot::MAP::SCATTERING, textureCache->getWhiteTexture());
|
||||
}
|
||||
} else {
|
||||
batch.setResourceTexture(ShapePipeline::Slot::MAP::SCATTERING, nullptr);
|
||||
}
|
||||
|
||||
// Emissive / Lightmap
|
||||
if (materialKey.isLightmapMap()) {
|
||||
auto lightmapMap = textureMaps[model::MaterialKey::LIGHTMAP_MAP];
|
||||
|
|
|
@ -23,9 +23,12 @@
|
|||
#include <render/DrawTask.h>
|
||||
#include <render/DrawStatus.h>
|
||||
#include <render/DrawSceneOctree.h>
|
||||
#include <render/BlurTask.h>
|
||||
|
||||
#include "LightingModel.h"
|
||||
#include "DebugDeferredBuffer.h"
|
||||
#include "DeferredLightingEffect.h"
|
||||
#include "SurfaceGeometryPass.h"
|
||||
#include "FramebufferCache.h"
|
||||
#include "HitEffect.h"
|
||||
#include "TextureCache.h"
|
||||
|
@ -33,6 +36,7 @@
|
|||
#include "AmbientOcclusionEffect.h"
|
||||
#include "AntialiasingEffect.h"
|
||||
#include "ToneMappingEffect.h"
|
||||
#include "SubsurfaceScattering.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
|
@ -40,14 +44,6 @@ extern void initStencilPipeline(gpu::PipelinePointer& pipeline);
|
|||
extern void initOverlay3DPipelines(render::ShapePlumber& plumber);
|
||||
extern void initDeferredPipelines(render::ShapePlumber& plumber);
|
||||
|
||||
void PrepareDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
DependencyManager::get<DeferredLightingEffect>()->prepare(renderContext->args);
|
||||
}
|
||||
|
||||
void RenderDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
DependencyManager::get<DeferredLightingEffect>()->render(renderContext);
|
||||
}
|
||||
|
||||
RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
||||
cullFunctor = cullFunctor ? cullFunctor : [](const RenderArgs*, const AABox&){ return true; };
|
||||
|
||||
|
@ -92,6 +88,11 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
|||
const auto overlayTransparents = addJob<DepthSortItems>("DepthSortOverlayTransparent", filteredNonspatialBuckets[TRANSPARENT_SHAPE_BUCKET], DepthSortItems(false));
|
||||
const auto background = filteredNonspatialBuckets[BACKGROUND_BUCKET];
|
||||
|
||||
// Prepare deferred, generate the shared Deferred Frame Transform
|
||||
const auto deferredFrameTransform = addJob<GenerateDeferredFrameTransform>("DeferredFrameTransform");
|
||||
const auto lightingModel = addJob<MakeLightingModel>("LightingModel");
|
||||
|
||||
|
||||
// GPU jobs: Start preparing the deferred and lighting buffer
|
||||
addJob<PrepareDeferred>("PrepareDeferred");
|
||||
|
||||
|
@ -104,21 +105,44 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
|||
// Use Stencil and start drawing background in Lighting buffer
|
||||
addJob<DrawBackgroundDeferred>("DrawBackgroundDeferred", background);
|
||||
|
||||
// Opaque all rendered, generate surface geometry buffers
|
||||
const auto curvatureFramebufferAndDepth = addJob<SurfaceGeometryPass>("SurfaceGeometry", deferredFrameTransform);
|
||||
|
||||
|
||||
const auto theCurvatureVarying = curvatureFramebufferAndDepth[0];
|
||||
|
||||
//#define SIMPLE_BLUR 1
|
||||
#if SIMPLE_BLUR
|
||||
const auto curvatureFramebuffer = addJob<render::BlurGaussian>("DiffuseCurvature", curvatureFramebufferAndDepth.get<SurfaceGeometryPass::Outputs>().first);
|
||||
const auto diffusedCurvatureFramebuffer = addJob<render::BlurGaussian>("DiffuseCurvature2", curvatureFramebufferAndDepth.get<SurfaceGeometryPass::Outputs>().first, true);
|
||||
#else
|
||||
const auto curvatureFramebuffer = addJob<render::BlurGaussianDepthAware>("DiffuseCurvature", curvatureFramebufferAndDepth);
|
||||
const auto diffusedCurvatureFramebuffer = addJob<render::BlurGaussianDepthAware>("DiffuseCurvature2", curvatureFramebufferAndDepth, true);
|
||||
#endif
|
||||
|
||||
const auto scatteringResource = addJob<SubsurfaceScattering>("Scattering");
|
||||
|
||||
// AO job
|
||||
addJob<AmbientOcclusionEffect>("AmbientOcclusion");
|
||||
|
||||
// Draw Lights just add the lights to the current list of lights to deal with. NOt really gpu job for now.
|
||||
addJob<DrawLight>("DrawLight", lights);
|
||||
|
||||
const auto deferredLightingInputs = render::Varying(RenderDeferred::Inputs(deferredFrameTransform, lightingModel, curvatureFramebuffer, diffusedCurvatureFramebuffer, scatteringResource));
|
||||
|
||||
// DeferredBuffer is complete, now let's shade it into the LightingBuffer
|
||||
addJob<RenderDeferred>("RenderDeferred");
|
||||
addJob<RenderDeferred>("RenderDeferred", deferredLightingInputs);
|
||||
|
||||
|
||||
// AA job to be revisited
|
||||
addJob<Antialiasing>("Antialiasing");
|
||||
|
||||
// Render transparent objects forward in LightingBuffer
|
||||
addJob<DrawDeferred>("DrawTransparentDeferred", transparents, shapePlumber);
|
||||
|
||||
|
||||
addJob<DebugSubsurfaceScattering>("DebugScattering", deferredLightingInputs);
|
||||
|
||||
|
||||
// Lighting Buffer ready for tone mapping
|
||||
addJob<ToneMappingDeferred>("ToneMapping");
|
||||
|
||||
|
@ -126,11 +150,13 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
|||
addJob<DrawOverlay3D>("DrawOverlay3DOpaque", overlayOpaques, true);
|
||||
addJob<DrawOverlay3D>("DrawOverlay3DTransparent", overlayTransparents, false);
|
||||
|
||||
|
||||
|
||||
// Debugging stages
|
||||
{
|
||||
|
||||
// Debugging Deferred buffer job
|
||||
addJob<DebugDeferredBuffer>("DebugDeferredBuffer");
|
||||
const auto debugFramebuffers = render::Varying(DebugDeferredBuffer::Inputs(diffusedCurvatureFramebuffer, curvatureFramebuffer));
|
||||
addJob<DebugDeferredBuffer>("DebugDeferredBuffer", debugFramebuffers);
|
||||
|
||||
// Scene Octree Debuging job
|
||||
{
|
||||
|
@ -147,9 +173,6 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: Hit effect is never used, let's hide it for now, probably a more generic way to add custom post process effects
|
||||
// addJob<HitEffect>("HitEffect");
|
||||
|
||||
// Blit!
|
||||
addJob<Blit>("Blit");
|
||||
}
|
||||
|
|
|
@ -15,28 +15,6 @@
|
|||
#include <gpu/Pipeline.h>
|
||||
#include <render/CullTask.h>
|
||||
|
||||
class SetupDeferred {
|
||||
public:
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
|
||||
using JobModel = render::Job::Model<SetupDeferred>;
|
||||
};
|
||||
|
||||
|
||||
class PrepareDeferred {
|
||||
public:
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
|
||||
using JobModel = render::Job::Model<PrepareDeferred>;
|
||||
};
|
||||
|
||||
class RenderDeferred {
|
||||
public:
|
||||
using JobModel = render::Job::Model<RenderDeferred>;
|
||||
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
};
|
||||
|
||||
class DrawConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(int numDrawn READ getNumDrawn NOTIFY newStats)
|
||||
|
|
586
libraries/render-utils/src/SubsurfaceScattering.cpp
Normal file
586
libraries/render-utils/src/SubsurfaceScattering.cpp
Normal file
|
@ -0,0 +1,586 @@
|
|||
//
|
||||
// SubsurfaceScattering.cpp
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Sam Gateau 6/3/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "SubsurfaceScattering.h"
|
||||
|
||||
#include <gpu/Context.h>
|
||||
#include <gpu/StandardShaderLib.h>
|
||||
|
||||
#include "FramebufferCache.h"
|
||||
|
||||
#include "DeferredLightingEffect.h"
|
||||
|
||||
#include "subsurfaceScattering_makeProfile_frag.h"
|
||||
#include "subsurfaceScattering_makeLUT_frag.h"
|
||||
#include "subsurfaceScattering_makeSpecularBeckmann_frag.h"
|
||||
|
||||
#include "subsurfaceScattering_drawScattering_frag.h"
|
||||
|
||||
enum ScatteringShaderBufferSlots {
|
||||
ScatteringTask_FrameTransformSlot = 0,
|
||||
ScatteringTask_ParamSlot,
|
||||
ScatteringTask_LightSlot,
|
||||
};
|
||||
enum ScatteringShaderMapSlots {
|
||||
ScatteringTask_ScatteringTableSlot = 0,
|
||||
ScatteringTask_CurvatureMapSlot,
|
||||
ScatteringTask_DiffusedCurvatureMapSlot,
|
||||
ScatteringTask_NormalMapSlot,
|
||||
|
||||
ScatteringTask_AlbedoMapSlot,
|
||||
ScatteringTask_LinearMapSlot,
|
||||
|
||||
ScatteringTask_IBLMapSlot,
|
||||
|
||||
};
|
||||
|
||||
SubsurfaceScatteringResource::SubsurfaceScatteringResource() {
|
||||
Parameters parameters;
|
||||
_parametersBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Parameters), (const gpu::Byte*) ¶meters));
|
||||
|
||||
}
|
||||
|
||||
void SubsurfaceScatteringResource::setBentNormalFactors(const glm::vec4& rgbsBentFactors) {
|
||||
if (rgbsBentFactors != getBentNormalFactors()) {
|
||||
_parametersBuffer.edit<Parameters>().normalBentInfo = rgbsBentFactors;
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec4 SubsurfaceScatteringResource::getBentNormalFactors() const {
|
||||
return _parametersBuffer.get<Parameters>().normalBentInfo;
|
||||
}
|
||||
|
||||
void SubsurfaceScatteringResource::setCurvatureFactors(const glm::vec2& sbCurvatureFactors) {
|
||||
if (sbCurvatureFactors != getCurvatureFactors()) {
|
||||
_parametersBuffer.edit<Parameters>().curvatureInfo = sbCurvatureFactors;
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec2 SubsurfaceScatteringResource::getCurvatureFactors() const {
|
||||
return _parametersBuffer.get<Parameters>().curvatureInfo;
|
||||
}
|
||||
|
||||
|
||||
void SubsurfaceScatteringResource::setLevel(float level) {
|
||||
if (level != getLevel()) {
|
||||
_parametersBuffer.edit<Parameters>().level = level;
|
||||
}
|
||||
}
|
||||
float SubsurfaceScatteringResource::getLevel() const {
|
||||
return _parametersBuffer.get<Parameters>().level;
|
||||
}
|
||||
|
||||
void SubsurfaceScatteringResource::setShowBRDF(bool show) {
|
||||
if (show != isShowBRDF()) {
|
||||
_parametersBuffer.edit<Parameters>().showBRDF = show;
|
||||
}
|
||||
}
|
||||
bool SubsurfaceScatteringResource::isShowBRDF() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().showBRDF;
|
||||
}
|
||||
|
||||
void SubsurfaceScatteringResource::setShowCurvature(bool show) {
|
||||
if (show != isShowCurvature()) {
|
||||
_parametersBuffer.edit<Parameters>().showCurvature = show;
|
||||
}
|
||||
}
|
||||
bool SubsurfaceScatteringResource::isShowCurvature() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().showCurvature;
|
||||
}
|
||||
|
||||
void SubsurfaceScatteringResource::setShowDiffusedNormal(bool show) {
|
||||
if (show != isShowDiffusedNormal()) {
|
||||
_parametersBuffer.edit<Parameters>().showDiffusedNormal = show;
|
||||
}
|
||||
}
|
||||
bool SubsurfaceScatteringResource::isShowDiffusedNormal() const {
|
||||
return (bool)_parametersBuffer.get<Parameters>().showDiffusedNormal;
|
||||
}
|
||||
|
||||
void SubsurfaceScatteringResource::generateScatteringTable(RenderArgs* args) {
|
||||
if (!_scatteringProfile) {
|
||||
_scatteringProfile = generateScatteringProfile(args);
|
||||
}
|
||||
if (!_scatteringTable) {
|
||||
_scatteringTable = generatePreIntegratedScattering(_scatteringProfile, args);
|
||||
}
|
||||
if (!_scatteringSpecular) {
|
||||
_scatteringSpecular = generateScatteringSpecularBeckmann(args);
|
||||
}
|
||||
}
|
||||
|
||||
SubsurfaceScattering::SubsurfaceScattering() {
|
||||
_scatteringResource = std::make_shared<SubsurfaceScatteringResource>();
|
||||
}
|
||||
|
||||
void SubsurfaceScattering::configure(const Config& config) {
|
||||
|
||||
glm::vec4 bentInfo(config.bentRed, config.bentGreen, config.bentBlue, config.bentScale);
|
||||
_scatteringResource->setBentNormalFactors(bentInfo);
|
||||
|
||||
glm::vec2 curvatureInfo(config.curvatureOffset, config.curvatureScale);
|
||||
_scatteringResource->setCurvatureFactors(curvatureInfo);
|
||||
|
||||
_scatteringResource->setLevel((float)config.enableScattering);
|
||||
_scatteringResource->setShowBRDF(config.showScatteringBRDF);
|
||||
_scatteringResource->setShowCurvature(config.showCurvature);
|
||||
_scatteringResource->setShowDiffusedNormal(config.showDiffusedNormal);
|
||||
}
|
||||
|
||||
void SubsurfaceScattering::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, Outputs& outputs) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
|
||||
if (!_scatteringResource->getScatteringTable()) {
|
||||
_scatteringResource->generateScatteringTable(renderContext->args);
|
||||
}
|
||||
|
||||
outputs = _scatteringResource;
|
||||
}
|
||||
|
||||
#ifdef GENERATE_SCATTERING_RESOURCE_ON_CPU
|
||||
|
||||
// Reference: http://www.altdevblogaday.com/2011/12/31/skin-shading-in-unity3d/
|
||||
#include <cstdio>
|
||||
#include <cmath>
|
||||
#include <algorithm>
|
||||
|
||||
#define _PI 3.14159265358979523846
|
||||
|
||||
using namespace std;
|
||||
|
||||
double gaussian(float v, float r) {
|
||||
double g = (1.0 / sqrt(2.0 * _PI * v)) * exp(-(r*r) / (2.0 * v));
|
||||
return g;
|
||||
}
|
||||
|
||||
vec3 scatter(double r) {
|
||||
// Values from GPU Gems 3 "Advanced Skin Rendering".
|
||||
// Originally taken from real life samples.
|
||||
static const double profile[][4] = {
|
||||
{ 0.0064, 0.233, 0.455, 0.649 },
|
||||
{ 0.0484, 0.100, 0.336, 0.344 },
|
||||
{ 0.1870, 0.118, 0.198, 0.000 },
|
||||
{ 0.5670, 0.113, 0.007, 0.007 },
|
||||
{ 1.9900, 0.358, 0.004, 0.000 },
|
||||
{ 7.4100, 0.078, 0.000, 0.000 }
|
||||
};
|
||||
static const int profileNum = 6;
|
||||
vec3 ret(0.0);
|
||||
for (int i = 0; i < profileNum; i++) {
|
||||
double g = gaussian(profile[i][0] * 1.414f, r);
|
||||
ret.x += g * profile[i][1];
|
||||
ret.y += g * profile[i][2];
|
||||
ret.z += g * profile[i][3];
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
vec3 integrate(double cosTheta, double skinRadius) {
|
||||
// Angle from lighting direction.
|
||||
double theta = acos(cosTheta);
|
||||
vec3 totalWeights(0.0);
|
||||
vec3 totalLight(0.0);
|
||||
vec3 skinColour(1.0);
|
||||
|
||||
double a = -(_PI);
|
||||
|
||||
double inc = 0.005;
|
||||
|
||||
while (a <= (_PI)) {
|
||||
double sampleAngle = theta + a;
|
||||
double diffuse = cos(sampleAngle);
|
||||
if (diffuse < 0.0) diffuse = 0.0;
|
||||
if (diffuse > 1.0) diffuse = 1.0;
|
||||
|
||||
// Distance.
|
||||
double sampleDist = abs(2.0 * skinRadius * sin(a * 0.5));
|
||||
|
||||
// Profile Weight.
|
||||
vec3 weights = scatter(sampleDist);
|
||||
|
||||
totalWeights += weights;
|
||||
totalLight.x += diffuse * weights.x * (skinColour.x * skinColour.x);
|
||||
totalLight.y += diffuse * weights.y * (skinColour.y * skinColour.y);
|
||||
totalLight.z += diffuse * weights.z * (skinColour.z * skinColour.z);
|
||||
a += inc;
|
||||
}
|
||||
|
||||
vec3 result;
|
||||
result.x = totalLight.x / totalWeights.x;
|
||||
result.y = totalLight.y / totalWeights.y;
|
||||
result.z = totalLight.z / totalWeights.z;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void diffuseScatter(gpu::TexturePointer& lut) {
|
||||
int width = lut->getWidth();
|
||||
int height = lut->getHeight();
|
||||
|
||||
const int COMPONENT_COUNT = 4;
|
||||
std::vector<unsigned char> bytes(COMPONENT_COUNT * height * width);
|
||||
|
||||
int index = 0;
|
||||
for (int j = 0; j < height; j++) {
|
||||
for (int i = 0; i < width; i++) {
|
||||
// Lookup by: x: NDotL y: 1 / r
|
||||
float y = 2.0 * 1.0 / ((j + 1.0) / (double)height);
|
||||
float x = ((i / (double)width) * 2.0) - 1.0;
|
||||
vec3 val = integrate(x, y);
|
||||
|
||||
// Convert to linear
|
||||
// val.x = sqrt(val.x);
|
||||
// val.y = sqrt(val.y);
|
||||
// val.z = sqrt(val.z);
|
||||
|
||||
// Convert to 24-bit image.
|
||||
unsigned char valI[3];
|
||||
if (val.x > 1.0) val.x = 1.0;
|
||||
if (val.y > 1.0) val.y = 1.0;
|
||||
if (val.z > 1.0) val.z = 1.0;
|
||||
valI[0] = (unsigned char)(val.x * 256.0);
|
||||
valI[1] = (unsigned char)(val.y * 256.0);
|
||||
valI[2] = (unsigned char)(val.z * 256.0);
|
||||
|
||||
bytes[COMPONENT_COUNT * index] = valI[0];
|
||||
bytes[COMPONENT_COUNT * index + 1] = valI[1];
|
||||
bytes[COMPONENT_COUNT * index + 2] = valI[2];
|
||||
bytes[COMPONENT_COUNT * index + 3] = 255.0;
|
||||
|
||||
index++;
|
||||
}
|
||||
}
|
||||
|
||||
lut->assignStoredMip(0, gpu::Element::COLOR_RGBA_32, bytes.size(), bytes.data());
|
||||
}
|
||||
|
||||
|
||||
void diffuseProfile(gpu::TexturePointer& profile) {
|
||||
int width = profile->getWidth();
|
||||
int height = profile->getHeight();
|
||||
|
||||
const int COMPONENT_COUNT = 4;
|
||||
std::vector<unsigned char> bytes(COMPONENT_COUNT * height * width);
|
||||
|
||||
int index = 0;
|
||||
for (int j = 0; j < height; j++) {
|
||||
for (int i = 0; i < width; i++) {
|
||||
float y = (double)(i + 1.0) / (double)width;
|
||||
vec3 val = scatter(y * 2.0f);
|
||||
|
||||
// Convert to 24-bit image.
|
||||
unsigned char valI[3];
|
||||
if (val.x > 1.0) val.x = 1.0;
|
||||
if (val.y > 1.0) val.y = 1.0;
|
||||
if (val.z > 1.0) val.z = 1.0;
|
||||
valI[0] = (unsigned char)(val.x * 255.0);
|
||||
valI[1] = (unsigned char)(val.y * 255.0);
|
||||
valI[2] = (unsigned char)(val.z * 255.0);
|
||||
|
||||
bytes[COMPONENT_COUNT * index] = valI[0];
|
||||
bytes[COMPONENT_COUNT * index + 1] = valI[1];
|
||||
bytes[COMPONENT_COUNT * index + 2] = valI[2];
|
||||
bytes[COMPONENT_COUNT * index + 3] = 255.0;
|
||||
|
||||
index++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
profile->assignStoredMip(0, gpu::Element::COLOR_RGBA_32, bytes.size(), bytes.data());
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
void diffuseProfileGPU(gpu::TexturePointer& profileMap, RenderArgs* args) {
|
||||
int width = profileMap->getWidth();
|
||||
int height = profileMap->getHeight();
|
||||
|
||||
gpu::PipelinePointer makePipeline;
|
||||
{
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(subsurfaceScattering_makeProfile_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
makePipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
auto makeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
makeFramebuffer->setRenderBuffer(0, profileMap);
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, width, height));
|
||||
|
||||
batch.setFramebuffer(makeFramebuffer);
|
||||
batch.setPipeline(makePipeline);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
batch.setResourceTexture(0, nullptr);
|
||||
batch.setPipeline(nullptr);
|
||||
batch.setFramebuffer(nullptr);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
void diffuseScatterGPU(const gpu::TexturePointer& profileMap, gpu::TexturePointer& lut, RenderArgs* args) {
|
||||
int width = lut->getWidth();
|
||||
int height = lut->getHeight();
|
||||
|
||||
gpu::PipelinePointer makePipeline;
|
||||
{
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(subsurfaceScattering_makeLUT_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("scatteringProfile"), 0));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
makePipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
auto makeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
makeFramebuffer->setRenderBuffer(0, lut);
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, width, height));
|
||||
|
||||
batch.setFramebuffer(makeFramebuffer);
|
||||
batch.setPipeline(makePipeline);
|
||||
batch.setResourceTexture(0, profileMap);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
batch.setResourceTexture(0, nullptr);
|
||||
batch.setPipeline(nullptr);
|
||||
batch.setFramebuffer(nullptr);
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
void computeSpecularBeckmannGPU(gpu::TexturePointer& beckmannMap, RenderArgs* args) {
|
||||
int width = beckmannMap->getWidth();
|
||||
int height = beckmannMap->getHeight();
|
||||
|
||||
gpu::PipelinePointer makePipeline;
|
||||
{
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(subsurfaceScattering_makeSpecularBeckmann_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
makePipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
auto makeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
makeFramebuffer->setRenderBuffer(0, beckmannMap);
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, width, height));
|
||||
|
||||
batch.setFramebuffer(makeFramebuffer);
|
||||
batch.setPipeline(makePipeline);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
batch.setResourceTexture(0, nullptr);
|
||||
batch.setPipeline(nullptr);
|
||||
batch.setFramebuffer(nullptr);
|
||||
});
|
||||
}
|
||||
|
||||
gpu::TexturePointer SubsurfaceScatteringResource::generateScatteringProfile(RenderArgs* args) {
|
||||
const int PROFILE_RESOLUTION = 512;
|
||||
auto profileMap = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_SRGBA_32, PROFILE_RESOLUTION, 1, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR, gpu::Sampler::WRAP_CLAMP)));
|
||||
diffuseProfileGPU(profileMap, args);
|
||||
return profileMap;
|
||||
}
|
||||
|
||||
gpu::TexturePointer SubsurfaceScatteringResource::generatePreIntegratedScattering(const gpu::TexturePointer& profile, RenderArgs* args) {
|
||||
|
||||
const int TABLE_RESOLUTION = 512;
|
||||
auto scatteringLUT = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_SRGBA_32, TABLE_RESOLUTION, TABLE_RESOLUTION, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR, gpu::Sampler::WRAP_CLAMP)));
|
||||
//diffuseScatter(scatteringLUT);
|
||||
diffuseScatterGPU(profile, scatteringLUT, args);
|
||||
return scatteringLUT;
|
||||
}
|
||||
|
||||
gpu::TexturePointer SubsurfaceScatteringResource::generateScatteringSpecularBeckmann(RenderArgs* args) {
|
||||
const int SPECULAR_RESOLUTION = 256;
|
||||
auto beckmannMap = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32 /*gpu::Element(gpu::SCALAR, gpu::HALF, gpu::RGB)*/, SPECULAR_RESOLUTION, SPECULAR_RESOLUTION, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR, gpu::Sampler::WRAP_CLAMP)));
|
||||
computeSpecularBeckmannGPU(beckmannMap, args);
|
||||
return beckmannMap;
|
||||
}
|
||||
|
||||
DebugSubsurfaceScattering::DebugSubsurfaceScattering() {
|
||||
}
|
||||
|
||||
void DebugSubsurfaceScattering::configure(const Config& config) {
|
||||
|
||||
_showProfile = config.showProfile;
|
||||
_showLUT = config.showLUT;
|
||||
_showSpecularTable = config.showSpecularTable;
|
||||
_showCursorPixel = config.showCursorPixel;
|
||||
_debugCursorTexcoord = config.debugCursorTexcoord;
|
||||
}
|
||||
|
||||
|
||||
|
||||
gpu::PipelinePointer DebugSubsurfaceScattering::getScatteringPipeline() {
|
||||
if (!_scatteringPipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
// auto vs = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(subsurfaceScattering_drawScattering_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("deferredFrameTransformBuffer"), ScatteringTask_FrameTransformSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("scatteringParamsBuffer"), ScatteringTask_ParamSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), ScatteringTask_LightSlot));
|
||||
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("scatteringLUT"), ScatteringTask_ScatteringTableSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("curvatureMap"), ScatteringTask_CurvatureMapSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("diffusedCurvatureMap"), ScatteringTask_DiffusedCurvatureMapSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("normalMap"), ScatteringTask_NormalMapSlot));
|
||||
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("albedoMap"), ScatteringTask_AlbedoMapSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("linearDepthMap"), ScatteringTask_LinearMapSlot));
|
||||
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), ScatteringTask_IBLMapSlot));
|
||||
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
_scatteringPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _scatteringPipeline;
|
||||
}
|
||||
|
||||
|
||||
gpu::PipelinePointer _showLUTPipeline;
|
||||
gpu::PipelinePointer getShowLUTPipeline();
|
||||
gpu::PipelinePointer DebugSubsurfaceScattering::getShowLUTPipeline() {
|
||||
if (!_showLUTPipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::StandardShaderLib::getDrawTextureOpaquePS();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
_showLUTPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _showLUTPipeline;
|
||||
}
|
||||
|
||||
|
||||
void DebugSubsurfaceScattering::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
|
||||
auto& frameTransform = inputs.get0();
|
||||
auto& curvatureFramebuffer = inputs.get2();
|
||||
auto& diffusedFramebuffer = inputs.get3();
|
||||
auto& scatteringResource = inputs.get4();
|
||||
|
||||
if (!scatteringResource) {
|
||||
return;
|
||||
}
|
||||
auto scatteringProfile = scatteringResource->getScatteringProfile();
|
||||
auto scatteringTable = scatteringResource->getScatteringTable();
|
||||
auto scatteringSpecular = scatteringResource->getScatteringSpecular();
|
||||
|
||||
auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
|
||||
|
||||
|
||||
const auto theLight = DependencyManager::get<DeferredLightingEffect>()->getLightStage().lights[0];
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
|
||||
auto viewportSize = std::min(args->_viewport.z, args->_viewport.w) >> 1;
|
||||
auto offsetViewport = viewportSize * 0.1;
|
||||
|
||||
if (_showProfile) {
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, viewportSize, offsetViewport));
|
||||
batch.setPipeline(getShowLUTPipeline());
|
||||
batch.setResourceTexture(0, scatteringProfile);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
|
||||
if (_showLUT) {
|
||||
batch.setViewportTransform(glm::ivec4(0, offsetViewport * 1.5, viewportSize, viewportSize));
|
||||
batch.setPipeline(getShowLUTPipeline());
|
||||
batch.setResourceTexture(0, scatteringTable);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
if (_showCursorPixel) {
|
||||
|
||||
auto debugScatteringPipeline = getScatteringPipeline();
|
||||
batch.setPipeline(debugScatteringPipeline);
|
||||
|
||||
Transform model;
|
||||
model.setTranslation(glm::vec3(0.0, offsetViewport * 1.5 / args->_viewport.w, 0.0));
|
||||
model.setScale(glm::vec3(viewportSize / (float)args->_viewport.z, viewportSize / (float)args->_viewport.w, 1.0));
|
||||
batch.setModelTransform(model);
|
||||
|
||||
batch.setUniformBuffer(ScatteringTask_FrameTransformSlot, frameTransform->getFrameTransformBuffer());
|
||||
batch.setUniformBuffer(ScatteringTask_ParamSlot, scatteringResource->getParametersBuffer());
|
||||
if (theLight->light) {
|
||||
batch.setUniformBuffer(ScatteringTask_LightSlot, theLight->light->getSchemaBuffer());
|
||||
}
|
||||
batch.setResourceTexture(ScatteringTask_ScatteringTableSlot, scatteringTable);
|
||||
batch.setResourceTexture(ScatteringTask_CurvatureMapSlot, curvatureFramebuffer->getRenderBuffer(0));
|
||||
batch.setResourceTexture(ScatteringTask_DiffusedCurvatureMapSlot, diffusedFramebuffer->getRenderBuffer(0));
|
||||
batch.setResourceTexture(ScatteringTask_NormalMapSlot, framebufferCache->getDeferredNormalTexture());
|
||||
batch.setResourceTexture(ScatteringTask_AlbedoMapSlot, framebufferCache->getDeferredColorTexture());
|
||||
batch.setResourceTexture(ScatteringTask_LinearMapSlot, framebufferCache->getDepthPyramidTexture());
|
||||
|
||||
|
||||
batch._glUniform2f(debugScatteringPipeline->getProgram()->getUniforms().findLocation("uniformCursorTexcoord"), _debugCursorTexcoord.x, _debugCursorTexcoord.y);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
}
|
||||
|
||||
if (_showSpecularTable) {
|
||||
batch.setViewportTransform(glm::ivec4(viewportSize + offsetViewport * 0.5, 0, viewportSize * 0.5, viewportSize * 0.5));
|
||||
batch.setPipeline(getShowLUTPipeline());
|
||||
batch.setResourceTexture(0, scatteringSpecular);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
}
|
||||
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
||||
});
|
||||
}
|
187
libraries/render-utils/src/SubsurfaceScattering.h
Normal file
187
libraries/render-utils/src/SubsurfaceScattering.h
Normal file
|
@ -0,0 +1,187 @@
|
|||
//
|
||||
// SubsurfaceScattering.h
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Sam Gateau 6/3/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_SubsurfaceScattering_h
|
||||
#define hifi_SubsurfaceScattering_h
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
#include "render/DrawTask.h"
|
||||
#include "DeferredFrameTransform.h"
|
||||
#include "LightingModel.h"
|
||||
|
||||
class SubsurfaceScatteringResource {
|
||||
public:
|
||||
using UniformBufferView = gpu::BufferView;
|
||||
|
||||
SubsurfaceScatteringResource();
|
||||
|
||||
void setBentNormalFactors(const glm::vec4& rgbsBentFactors);
|
||||
glm::vec4 getBentNormalFactors() const;
|
||||
|
||||
void setCurvatureFactors(const glm::vec2& sbCurvatureFactors);
|
||||
glm::vec2 getCurvatureFactors() const;
|
||||
|
||||
void setLevel(float level);
|
||||
float getLevel() const;
|
||||
|
||||
|
||||
void setShowBRDF(bool show);
|
||||
bool isShowBRDF() const;
|
||||
void setShowCurvature(bool show);
|
||||
bool isShowCurvature() const;
|
||||
void setShowDiffusedNormal(bool show);
|
||||
bool isShowDiffusedNormal() const;
|
||||
|
||||
UniformBufferView getParametersBuffer() const { return _parametersBuffer; }
|
||||
|
||||
gpu::TexturePointer getScatteringProfile() const { return _scatteringProfile; }
|
||||
gpu::TexturePointer getScatteringTable() const { return _scatteringTable; }
|
||||
gpu::TexturePointer getScatteringSpecular() const { return _scatteringSpecular; }
|
||||
|
||||
void generateScatteringTable(RenderArgs* args);
|
||||
|
||||
static gpu::TexturePointer generateScatteringProfile(RenderArgs* args);
|
||||
static gpu::TexturePointer generatePreIntegratedScattering(const gpu::TexturePointer& profile, RenderArgs* args);
|
||||
static gpu::TexturePointer generateScatteringSpecularBeckmann(RenderArgs* args);
|
||||
|
||||
protected:
|
||||
|
||||
|
||||
// Class describing the uniform buffer with the transform info common to the AO shaders
|
||||
// It s changing every frame
|
||||
class Parameters {
|
||||
public:
|
||||
glm::vec4 normalBentInfo{ 1.5f, 0.8f, 0.3f, 1.5f };
|
||||
glm::vec2 curvatureInfo{ 0.08f, 0.8f };
|
||||
float level{ 1.0f };
|
||||
float showBRDF{ 0.0f };
|
||||
float showCurvature{ 0.0f };
|
||||
float showDiffusedNormal{ 0.0f };
|
||||
float spare1{ 0.0f };
|
||||
float spare2{ 0.0f };
|
||||
|
||||
|
||||
Parameters() {}
|
||||
};
|
||||
UniformBufferView _parametersBuffer;
|
||||
|
||||
|
||||
|
||||
gpu::TexturePointer _scatteringProfile;
|
||||
gpu::TexturePointer _scatteringTable;
|
||||
gpu::TexturePointer _scatteringSpecular;
|
||||
};
|
||||
|
||||
using SubsurfaceScatteringResourcePointer = std::shared_ptr<SubsurfaceScatteringResource>;
|
||||
|
||||
|
||||
|
||||
class SubsurfaceScatteringConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(float bentRed MEMBER bentRed NOTIFY dirty)
|
||||
Q_PROPERTY(float bentGreen MEMBER bentGreen NOTIFY dirty)
|
||||
Q_PROPERTY(float bentBlue MEMBER bentBlue NOTIFY dirty)
|
||||
Q_PROPERTY(float bentScale MEMBER bentScale NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(float curvatureOffset MEMBER curvatureOffset NOTIFY dirty)
|
||||
Q_PROPERTY(float curvatureScale MEMBER curvatureScale NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(bool enableScattering MEMBER enableScattering NOTIFY dirty)
|
||||
Q_PROPERTY(bool showScatteringBRDF MEMBER showScatteringBRDF NOTIFY dirty)
|
||||
Q_PROPERTY(bool showCurvature MEMBER showCurvature NOTIFY dirty)
|
||||
Q_PROPERTY(bool showDiffusedNormal MEMBER showDiffusedNormal NOTIFY dirty)
|
||||
|
||||
public:
|
||||
SubsurfaceScatteringConfig() : render::Job::Config(true) {}
|
||||
|
||||
float bentRed{ 1.5f };
|
||||
float bentGreen{ 0.8f };
|
||||
float bentBlue{ 0.3f };
|
||||
float bentScale{ 1.5f };
|
||||
|
||||
float curvatureOffset{ 0.08f };
|
||||
float curvatureScale{ 0.9f };
|
||||
|
||||
bool enableScattering{ true };
|
||||
bool showScatteringBRDF{ false };
|
||||
bool showCurvature{ false };
|
||||
bool showDiffusedNormal{ false };
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class SubsurfaceScattering {
|
||||
public:
|
||||
//using Inputs = render::VaryingSet4<DeferredFrameTransformPointer, gpu::FramebufferPointer, gpu::FramebufferPointer, SubsurfaceScatteringResourcePointer>;
|
||||
using Outputs = SubsurfaceScatteringResourcePointer;
|
||||
using Config = SubsurfaceScatteringConfig;
|
||||
using JobModel = render::Job::ModelO<SubsurfaceScattering, Outputs, Config>;
|
||||
|
||||
SubsurfaceScattering();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, Outputs& outputs);
|
||||
|
||||
private:
|
||||
SubsurfaceScatteringResourcePointer _scatteringResource;
|
||||
};
|
||||
|
||||
|
||||
|
||||
class DebugSubsurfaceScatteringConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(bool showProfile MEMBER showProfile NOTIFY dirty)
|
||||
Q_PROPERTY(bool showLUT MEMBER showLUT NOTIFY dirty)
|
||||
Q_PROPERTY(bool showSpecularTable MEMBER showSpecularTable NOTIFY dirty)
|
||||
Q_PROPERTY(bool showCursorPixel MEMBER showCursorPixel NOTIFY dirty)
|
||||
Q_PROPERTY(glm::vec2 debugCursorTexcoord MEMBER debugCursorTexcoord NOTIFY dirty)
|
||||
public:
|
||||
DebugSubsurfaceScatteringConfig() : render::Job::Config(true) {}
|
||||
|
||||
bool showProfile{ false };
|
||||
bool showLUT{ false };
|
||||
bool showSpecularTable{ false };
|
||||
bool showCursorPixel{ false };
|
||||
glm::vec2 debugCursorTexcoord{ 0.5, 0.5 };
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class DebugSubsurfaceScattering {
|
||||
public:
|
||||
using Inputs = render::VaryingSet5<DeferredFrameTransformPointer, LightingModelPointer, gpu::FramebufferPointer, gpu::FramebufferPointer, SubsurfaceScatteringResourcePointer>;
|
||||
using Config = DebugSubsurfaceScatteringConfig;
|
||||
using JobModel = render::Job::ModelI<DebugSubsurfaceScattering, Inputs, Config>;
|
||||
|
||||
DebugSubsurfaceScattering();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs);
|
||||
|
||||
private:
|
||||
|
||||
gpu::PipelinePointer _scatteringPipeline;
|
||||
gpu::PipelinePointer getScatteringPipeline();
|
||||
|
||||
gpu::PipelinePointer _showLUTPipeline;
|
||||
gpu::PipelinePointer getShowLUTPipeline();
|
||||
bool _showProfile{ false };
|
||||
bool _showLUT{ false };
|
||||
bool _showSpecularTable{ false };
|
||||
bool _showCursorPixel{ false };
|
||||
glm::vec2 _debugCursorTexcoord;
|
||||
};
|
||||
|
||||
#endif // hifi_SubsurfaceScattering_h
|
226
libraries/render-utils/src/SubsurfaceScattering.slh
Normal file
226
libraries/render-utils/src/SubsurfaceScattering.slh
Normal file
|
@ -0,0 +1,226 @@
|
|||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/8/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
<@if not SUBSURFACE_SCATTERING_SLH@>
|
||||
<@def SUBSURFACE_SCATTERING_SLH@>
|
||||
|
||||
<@func declareSubsurfaceScatteringProfileSource()@>
|
||||
|
||||
float gaussian(float v, float r) {
|
||||
const float _PI = 3.14159265358979523846;
|
||||
return (1.0 / sqrt(2.0 * _PI * v)) * exp(-(r*r) / (2.0 * v));
|
||||
}
|
||||
|
||||
vec3 scatter(float r) {
|
||||
// r is the distance expressed in millimeter
|
||||
// returns the scatter reflectance
|
||||
// Values from GPU Gems 3 "Advanced Skin Rendering".
|
||||
// Originally taken from real life samples.
|
||||
const vec4 profile[6] = vec4[6](
|
||||
vec4(0.0064, 0.233, 0.455, 0.649),
|
||||
vec4(0.0484, 0.100, 0.336, 0.344),
|
||||
vec4(0.1870, 0.118, 0.198, 0.000),
|
||||
vec4(0.5670, 0.113, 0.007, 0.007),
|
||||
vec4(1.9900, 0.358, 0.004, 0.000),
|
||||
vec4(7.4100, 0.078, 0.000, 0.000)
|
||||
);
|
||||
const int profileNum = 6;
|
||||
|
||||
vec3 ret = vec3(0.0);
|
||||
for (int i = 0; i < profileNum; i++) {
|
||||
float v = profile[i].x * 1.414;
|
||||
float g = gaussian(v, r);
|
||||
ret += g * profile[i].yzw;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareSubsurfaceScatteringGenerateProfileMap()@>
|
||||
<$declareSubsurfaceScatteringProfileSource()$>
|
||||
|
||||
vec3 generateProfile(vec2 uv) {
|
||||
return scatter(uv.x * 2.0);
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareSubsurfaceScatteringProfileMap()@>
|
||||
|
||||
uniform sampler2D scatteringProfile;
|
||||
|
||||
vec3 scatter(float r) {
|
||||
return texture(scatteringProfile, vec2(r * 0.5, 0.5)).rgb;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
<@func declareSkinSpecularLighting()@>
|
||||
|
||||
uniform sampler2D scatteringSpecularBeckmann;
|
||||
|
||||
float fetchSpecularBeckmann(float ndoth, float roughness) {
|
||||
return pow( 2.0 * texture(scatteringSpecularBeckmann, vec2(ndoth, roughness)).r, 10.0);
|
||||
}
|
||||
|
||||
float fresnelReflectance(vec3 H, vec3 V, float Fo) {
|
||||
float base = 1.0 - dot(V, H);
|
||||
float exponential = pow(base, 5.0);
|
||||
return exponential + Fo * (1.0 - exponential);
|
||||
}
|
||||
|
||||
float skinSpecular(vec3 N, vec3 L, vec3 V, float roughness, float intensity) {
|
||||
float result = 0.0;
|
||||
float ndotl = dot(N, L);
|
||||
if (ndotl > 0.0) {
|
||||
vec3 h = L + V;
|
||||
vec3 H = normalize(h);
|
||||
float ndoth = dot(N, H);
|
||||
float PH = fetchSpecularBeckmann(ndoth, roughness);
|
||||
float F = fresnelReflectance(H, V, 0.028);
|
||||
float frSpec = max(PH * F / dot(h, h), 0.0);
|
||||
result = ndotl * intensity * frSpec;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareSubsurfaceScatteringIntegrate(NumIntegrationSteps)@>
|
||||
|
||||
|
||||
vec3 integrate(float cosTheta, float skinRadius) {
|
||||
// Angle from lighting direction.
|
||||
float theta = acos(cosTheta);
|
||||
vec3 totalWeights = vec3(0.0);
|
||||
vec3 totalLight = vec3(0.0);
|
||||
|
||||
const float _PI = 3.14159265358979523846;
|
||||
const float step = 2.0 * _PI / <$NumIntegrationSteps$>;
|
||||
float a = -(_PI);
|
||||
|
||||
|
||||
while (a <= (_PI)) {
|
||||
float sampleAngle = theta + a;
|
||||
float diffuse = clamp(cos(sampleAngle), 0.0, 1.0);
|
||||
//if (diffuse < 0.0) diffuse = 0.0;
|
||||
//if (diffuse > 1.0) diffuse = 1.0;
|
||||
|
||||
// Distance.
|
||||
float sampleDist = abs(2.0 * skinRadius * sin(a * 0.5));
|
||||
|
||||
// Profile Weight.
|
||||
vec3 weights = scatter(sampleDist);
|
||||
|
||||
totalWeights += weights;
|
||||
totalLight += diffuse * weights;
|
||||
a += step;
|
||||
}
|
||||
|
||||
vec3 result = (totalLight / totalWeights);
|
||||
return clamp(result, vec3(0.0), vec3(1.0));
|
||||
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareSubsurfaceScatteringResource()@>
|
||||
|
||||
uniform sampler2D scatteringLUT;
|
||||
|
||||
vec3 fetchBRDF(float LdotN, float curvature) {
|
||||
return texture(scatteringLUT, vec2( clamp(LdotN * 0.5 + 0.5, 0.0, 1.0), clamp(2 * curvature, 0.0, 1.0))).xyz;
|
||||
}
|
||||
|
||||
vec3 fetchBRDFSpectrum(vec3 LdotNSpectrum, float curvature) {
|
||||
return vec3(
|
||||
fetchBRDF(LdotNSpectrum.r, curvature).r,
|
||||
fetchBRDF(LdotNSpectrum.g, curvature).g,
|
||||
fetchBRDF(LdotNSpectrum.b, curvature).b);
|
||||
}
|
||||
|
||||
// Subsurface Scattering parameters
|
||||
struct ScatteringParameters {
|
||||
vec4 normalBendInfo; // R, G, B, factor
|
||||
vec4 curvatureInfo;// Offset, Scale, level
|
||||
vec4 debugFlags;
|
||||
};
|
||||
|
||||
uniform subsurfaceScatteringParametersBuffer {
|
||||
ScatteringParameters parameters;
|
||||
};
|
||||
|
||||
vec3 getBendFactor() {
|
||||
return parameters.normalBendInfo.xyz * parameters.normalBendInfo.w;
|
||||
}
|
||||
|
||||
float getScatteringLevel() {
|
||||
return parameters.curvatureInfo.z;
|
||||
}
|
||||
|
||||
bool showBRDF() {
|
||||
return parameters.curvatureInfo.w > 0.0;
|
||||
}
|
||||
|
||||
bool showCurvature() {
|
||||
return parameters.debugFlags.x > 0.0;
|
||||
}
|
||||
bool showDiffusedNormal() {
|
||||
return parameters.debugFlags.y > 0.0;
|
||||
}
|
||||
|
||||
|
||||
float tuneCurvatureUnsigned(float curvature) {
|
||||
return abs(curvature) * parameters.curvatureInfo.y + parameters.curvatureInfo.x;
|
||||
}
|
||||
|
||||
float unpackCurvature(float packedCurvature) {
|
||||
return (packedCurvature * 2 - 1);
|
||||
}
|
||||
|
||||
vec3 evalScatteringBentNdotL(vec3 normal, vec3 midNormal, vec3 lowNormal, vec3 lightDir) {
|
||||
vec3 bendFactorSpectrum = getBendFactor();
|
||||
// vec3 rN = normalize(mix(normal, lowNormal, bendFactorSpectrum.x));
|
||||
vec3 rN = normalize(mix(midNormal, lowNormal, bendFactorSpectrum.x));
|
||||
vec3 gN = normalize(mix(midNormal, lowNormal, bendFactorSpectrum.y));
|
||||
vec3 bN = normalize(mix(midNormal, lowNormal, bendFactorSpectrum.z));
|
||||
|
||||
vec3 NdotLSpectrum = vec3(dot(rN, lightDir), dot(gN, lightDir), dot(bN, lightDir));
|
||||
|
||||
return NdotLSpectrum;
|
||||
}
|
||||
|
||||
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareSubsurfaceScatteringBRDF()@>
|
||||
<$declareSubsurfaceScatteringResource()$>
|
||||
|
||||
vec3 evalSkinBRDF(vec3 lightDir, vec3 normal, vec3 midNormal, vec3 lowNormal, float curvature) {
|
||||
if (showDiffusedNormal()) {
|
||||
return lowNormal * 0.5 + vec3(0.5);
|
||||
}
|
||||
if (showCurvature()) {
|
||||
return (curvature > 0 ? vec3(curvature, 0.0, 0.0) : vec3(0.0, 0.0, -curvature));
|
||||
}
|
||||
|
||||
vec3 bentNdotL = evalScatteringBentNdotL(normal, midNormal, lowNormal, lightDir);
|
||||
|
||||
float tunedCurvature = tuneCurvatureUnsigned(curvature);
|
||||
|
||||
vec3 brdf = fetchBRDFSpectrum(bentNdotL, tunedCurvature);
|
||||
return brdf;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@endif@>
|
16
libraries/render-utils/src/SurfaceGeometry.slh
Normal file
16
libraries/render-utils/src/SurfaceGeometry.slh
Normal file
|
@ -0,0 +1,16 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/3/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredTransform.slh@>
|
||||
<$declareDeferredFrameTransform()$>
|
||||
|
||||
|
||||
|
164
libraries/render-utils/src/SurfaceGeometryPass.cpp
Normal file
164
libraries/render-utils/src/SurfaceGeometryPass.cpp
Normal file
|
@ -0,0 +1,164 @@
|
|||
//
|
||||
// SurfaceGeometryPass.cpp
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Sam Gateau 6/3/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "SurfaceGeometryPass.h"
|
||||
|
||||
#include <gpu/Context.h>
|
||||
#include <gpu/StandardShaderLib.h>
|
||||
|
||||
#include "FramebufferCache.h"
|
||||
|
||||
const int SurfaceGeometryPass_FrameTransformSlot = 0;
|
||||
const int SurfaceGeometryPass_ParamsSlot = 1;
|
||||
const int SurfaceGeometryPass_DepthMapSlot = 0;
|
||||
const int SurfaceGeometryPass_NormalMapSlot = 1;
|
||||
|
||||
#include "surfaceGeometry_makeLinearDepth_frag.h"
|
||||
|
||||
#include "surfaceGeometry_makeCurvature_frag.h"
|
||||
|
||||
|
||||
SurfaceGeometryPass::SurfaceGeometryPass() {
|
||||
Parameters parameters;
|
||||
_parametersBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Parameters), (const gpu::Byte*) ¶meters));
|
||||
}
|
||||
|
||||
void SurfaceGeometryPass::configure(const Config& config) {
|
||||
|
||||
if (config.depthThreshold != getCurvatureDepthThreshold()) {
|
||||
_parametersBuffer.edit<Parameters>().curvatureInfo.x = config.depthThreshold;
|
||||
}
|
||||
|
||||
if (config.basisScale != getCurvatureBasisScale()) {
|
||||
_parametersBuffer.edit<Parameters>().curvatureInfo.y = config.basisScale;
|
||||
}
|
||||
|
||||
if (config.curvatureScale != getCurvatureScale()) {
|
||||
_parametersBuffer.edit<Parameters>().curvatureInfo.w = config.curvatureScale;
|
||||
}
|
||||
}
|
||||
|
||||
void SurfaceGeometryPass::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, Outputs& curvatureAndDepth) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
|
||||
auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
auto depthBuffer = framebufferCache->getPrimaryDepthTexture();
|
||||
auto normalTexture = framebufferCache->getDeferredNormalTexture();
|
||||
auto pyramidFBO = framebufferCache->getDepthPyramidFramebuffer();
|
||||
|
||||
auto pyramidTexture = framebufferCache->getDepthPyramidTexture();
|
||||
auto curvatureFBO = framebufferCache->getCurvatureFramebuffer();
|
||||
|
||||
curvatureAndDepth.edit0() = curvatureFBO;
|
||||
curvatureAndDepth.edit1() = pyramidTexture;
|
||||
|
||||
auto curvatureTexture = framebufferCache->getCurvatureTexture();
|
||||
|
||||
QSize framebufferSize = framebufferCache->getFrameBufferSize();
|
||||
float sMin = args->_viewport.x / (float)framebufferSize.width();
|
||||
float sWidth = args->_viewport.z / (float)framebufferSize.width();
|
||||
float tMin = args->_viewport.y / (float)framebufferSize.height();
|
||||
float tHeight = args->_viewport.w / (float)framebufferSize.height();
|
||||
|
||||
|
||||
auto linearDepthPipeline = getLinearDepthPipeline();
|
||||
auto curvaturePipeline = getCurvaturePipeline();
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.setViewTransform(Transform());
|
||||
|
||||
Transform model;
|
||||
model.setTranslation(glm::vec3(sMin, tMin, 0.0f));
|
||||
model.setScale(glm::vec3(sWidth, tHeight, 1.0f));
|
||||
batch.setModelTransform(model);
|
||||
|
||||
batch.setUniformBuffer(SurfaceGeometryPass_FrameTransformSlot, frameTransform->getFrameTransformBuffer());
|
||||
batch.setUniformBuffer(SurfaceGeometryPass_ParamsSlot, _parametersBuffer);
|
||||
|
||||
// Pyramid pass
|
||||
batch.setFramebuffer(pyramidFBO);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(args->getViewFrustum().getFarClip(), 0.0f, 0.0f, 0.0f));
|
||||
batch.setPipeline(linearDepthPipeline);
|
||||
batch.setResourceTexture(SurfaceGeometryPass_DepthMapSlot, depthBuffer);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
// Curvature pass
|
||||
batch.setFramebuffer(curvatureFBO);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
batch.setPipeline(curvaturePipeline);
|
||||
batch.setResourceTexture(SurfaceGeometryPass_DepthMapSlot, pyramidTexture);
|
||||
batch.setResourceTexture(SurfaceGeometryPass_NormalMapSlot, normalTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
batch.setResourceTexture(SurfaceGeometryPass_DepthMapSlot, nullptr);
|
||||
batch.setResourceTexture(SurfaceGeometryPass_NormalMapSlot, nullptr);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& SurfaceGeometryPass::getLinearDepthPipeline() {
|
||||
if (!_linearDepthPipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(surfaceGeometry_makeLinearDepth_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("deferredFrameTransformBuffer"), SurfaceGeometryPass_FrameTransformSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("linearDepthMap"), SurfaceGeometryPass_DepthMapSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
// Stencil test the curvature pass for objects pixels only, not the background
|
||||
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::NOT_EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
|
||||
|
||||
state->setColorWriteMask(true, false, false, false);
|
||||
|
||||
// Good to go add the brand new pipeline
|
||||
_linearDepthPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _linearDepthPipeline;
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& SurfaceGeometryPass::getCurvaturePipeline() {
|
||||
if (!_curvaturePipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(surfaceGeometry_makeCurvature_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("deferredFrameTransformBuffer"), SurfaceGeometryPass_FrameTransformSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("surfaceGeometryParamsBuffer"), SurfaceGeometryPass_ParamsSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), SurfaceGeometryPass_DepthMapSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("normalMap"), SurfaceGeometryPass_NormalMapSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
// Stencil test the curvature pass for objects pixels only, not the background
|
||||
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::NOT_EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
|
||||
|
||||
// Good to go add the brand new pipeline
|
||||
_curvaturePipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _curvaturePipeline;
|
||||
}
|
||||
|
80
libraries/render-utils/src/SurfaceGeometryPass.h
Normal file
80
libraries/render-utils/src/SurfaceGeometryPass.h
Normal file
|
@ -0,0 +1,80 @@
|
|||
//
|
||||
// SurfaceGeometryPass.h
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Sam Gateau 6/3/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_SurfaceGeometryPass_h
|
||||
#define hifi_SurfaceGeometryPass_h
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
#include "render/DrawTask.h"
|
||||
#include "DeferredFrameTransform.h"
|
||||
|
||||
class SurfaceGeometryPassConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(float depthThreshold MEMBER depthThreshold NOTIFY dirty)
|
||||
Q_PROPERTY(float basisScale MEMBER basisScale NOTIFY dirty)
|
||||
Q_PROPERTY(float curvatureScale MEMBER curvatureScale NOTIFY dirty)
|
||||
Q_PROPERTY(double gpuTime READ getGpuTime)
|
||||
public:
|
||||
SurfaceGeometryPassConfig() : render::Job::Config(true) {}
|
||||
|
||||
float depthThreshold{ 0.02f }; // meters
|
||||
float basisScale{ 1.0f };
|
||||
float curvatureScale{ 10.0f };
|
||||
|
||||
double getGpuTime() { return gpuTime; }
|
||||
|
||||
double gpuTime{ 0.0 };
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class SurfaceGeometryPass {
|
||||
public:
|
||||
using Outputs = render::VaryingSet2<gpu::FramebufferPointer, gpu::TexturePointer>;
|
||||
using Config = SurfaceGeometryPassConfig;
|
||||
using JobModel = render::Job::ModelIO<SurfaceGeometryPass, DeferredFrameTransformPointer, Outputs, Config>;
|
||||
|
||||
SurfaceGeometryPass();
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const DeferredFrameTransformPointer& frameTransform, Outputs& curvatureAndDepth);
|
||||
|
||||
float getCurvatureDepthThreshold() const { return _parametersBuffer.get<Parameters>().curvatureInfo.x; }
|
||||
float getCurvatureBasisScale() const { return _parametersBuffer.get<Parameters>().curvatureInfo.y; }
|
||||
float getCurvatureScale() const { return _parametersBuffer.get<Parameters>().curvatureInfo.w; }
|
||||
|
||||
private:
|
||||
typedef gpu::BufferView UniformBufferView;
|
||||
|
||||
// Class describing the uniform buffer with all the parameters common to the AO shaders
|
||||
class Parameters {
|
||||
public:
|
||||
// Resolution info
|
||||
glm::vec4 resolutionInfo { -1.0f, 0.0f, 0.0f, 0.0f };
|
||||
// Curvature algorithm
|
||||
glm::vec4 curvatureInfo{ 0.0f };
|
||||
|
||||
Parameters() {}
|
||||
};
|
||||
gpu::BufferView _parametersBuffer;
|
||||
|
||||
const gpu::PipelinePointer& getLinearDepthPipeline();
|
||||
const gpu::PipelinePointer& getCurvaturePipeline();
|
||||
|
||||
gpu::PipelinePointer _linearDepthPipeline;
|
||||
gpu::PipelinePointer _curvaturePipeline;
|
||||
|
||||
gpu::RangeTimer _gpuTimer;
|
||||
};
|
||||
|
||||
#endif // hifi_SurfaceGeometryPass_h
|
|
@ -17,6 +17,9 @@
|
|||
uniform sampler2D pyramidMap;
|
||||
uniform sampler2D occlusionMap;
|
||||
uniform sampler2D occlusionBlurredMap;
|
||||
uniform sampler2D curvatureMap;
|
||||
uniform sampler2D diffusedCurvatureMap;
|
||||
uniform sampler2D scatteringMap;
|
||||
|
||||
in vec2 uv;
|
||||
out vec4 outFragColor;
|
||||
|
|
|
@ -5,18 +5,26 @@
|
|||
// deferred_light.vert
|
||||
// vertex shader
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/18/14.
|
||||
// Created by Sam Gateau on 6/16/16.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/Inputs.slh@>
|
||||
|
||||
out vec2 _texCoord0;
|
||||
|
||||
void main(void) {
|
||||
_texCoord0 = inTexCoord0.st;
|
||||
gl_Position = inPosition;
|
||||
const float depth = 1.0;
|
||||
const vec4 UNIT_QUAD[4] = vec4[4](
|
||||
vec4(-1.0, -1.0, depth, 1.0),
|
||||
vec4(1.0, -1.0, depth, 1.0),
|
||||
vec4(-1.0, 1.0, depth, 1.0),
|
||||
vec4(1.0, 1.0, depth, 1.0)
|
||||
);
|
||||
vec4 pos = UNIT_QUAD[gl_VertexID];
|
||||
|
||||
_texCoord0 = (pos.xy + 1) * 0.5;
|
||||
|
||||
gl_Position = pos;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// deferred_light_limited.vert
|
||||
// vertex shader
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/19/14.
|
||||
// Created by Sam Gateau on 6/16/16.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
|
@ -18,17 +18,39 @@
|
|||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
uniform mat4 texcoordMat;
|
||||
uniform vec4 sphereParam;
|
||||
|
||||
out vec4 _texCoord0;
|
||||
|
||||
void main(void) {
|
||||
// standard transform
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToClipPos(cam, obj, inPosition, gl_Position)$>;
|
||||
if (sphereParam.w != 0.0) {
|
||||
|
||||
vec4 projected = gl_Position / gl_Position.w;
|
||||
_texCoord0 = vec4(dot(projected, texcoordMat[0]) * gl_Position.w,
|
||||
dot(projected, texcoordMat[1]) * gl_Position.w, 0.0, gl_Position.w);
|
||||
// standard transform
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToClipPos(cam, obj, inPosition, gl_Position)$>;
|
||||
|
||||
vec4 projected = gl_Position / gl_Position.w;
|
||||
projected.xy = (projected.xy + 1.0) * 0.5;
|
||||
|
||||
if (cam_isStereo()) {
|
||||
projected.x = 0.5 * (projected.x + cam_getStereoSide());
|
||||
}
|
||||
_texCoord0 = vec4(projected.xy, 0.0, 1.0) * gl_Position.w;
|
||||
} else {
|
||||
const float depth = -1.0; //Draw at near plane
|
||||
const vec4 UNIT_QUAD[4] = vec4[4](
|
||||
vec4(-1.0, -1.0, depth, 1.0),
|
||||
vec4(1.0, -1.0, depth, 1.0),
|
||||
vec4(-1.0, 1.0, depth, 1.0),
|
||||
vec4(1.0, 1.0, depth, 1.0)
|
||||
);
|
||||
vec4 pos = UNIT_QUAD[gl_VertexID];
|
||||
|
||||
_texCoord0 = vec4((pos.xy + 1) * 0.5, 0.0, 1.0);
|
||||
if (cam_isStereo()) {
|
||||
_texCoord0.x = 0.5 * (_texCoord0.x + cam_getStereoSide());
|
||||
}
|
||||
gl_Position = pos;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
uniform mat4 texcoordMat;
|
||||
uniform vec4 coneParam;
|
||||
|
||||
out vec4 _texCoord0;
|
||||
|
@ -38,14 +37,34 @@ void main(void) {
|
|||
} else {
|
||||
coneVertex.z = 0.0;
|
||||
}
|
||||
|
||||
|
||||
// standard transform
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToClipPos(cam, obj, coneVertex, gl_Position)$>;
|
||||
vec4 projected = gl_Position / gl_Position.w;
|
||||
projected.xy = (projected.xy + 1.0) * 0.5;
|
||||
|
||||
if (cam_isStereo()) {
|
||||
projected.x = 0.5 * (projected.x + cam_getStereoSide());
|
||||
}
|
||||
_texCoord0 = vec4(projected.xy, 0.0, 1.0) * gl_Position.w;
|
||||
|
||||
} else {
|
||||
const float depth = -1.0; //Draw at near plane
|
||||
const vec4 UNIT_QUAD[4] = vec4[4](
|
||||
vec4(-1.0, -1.0, depth, 1.0),
|
||||
vec4(1.0, -1.0, depth, 1.0),
|
||||
vec4(-1.0, 1.0, depth, 1.0),
|
||||
vec4(1.0, 1.0, depth, 1.0)
|
||||
);
|
||||
vec4 pos = UNIT_QUAD[gl_VertexID];
|
||||
|
||||
_texCoord0 = vec4((pos.xy + 1) * 0.5, 0.0, 1.0);
|
||||
if (cam_isStereo()) {
|
||||
_texCoord0.x = 0.5 * (_texCoord0.x + cam_getStereoSide());
|
||||
}
|
||||
gl_Position = pos;
|
||||
}
|
||||
|
||||
// standard transform
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToClipPos(cam, obj, coneVertex, gl_Position)$>;
|
||||
|
||||
vec4 projected = gl_Position / gl_Position.w;
|
||||
_texCoord0 = vec4(dot(projected, texcoordMat[0]) * gl_Position.w,
|
||||
dot(projected, texcoordMat[1]) * gl_Position.w, 0.0, gl_Position.w);
|
||||
}
|
||||
|
|
|
@ -16,13 +16,14 @@
|
|||
<@include DeferredGlobalLight.slh@>
|
||||
|
||||
<$declareEvalLightmappedColor()$>
|
||||
<$declareEvalAmbientSphereGlobalColor()$>
|
||||
<$declareEvalAmbientSphereGlobalColor(supportScattering)$>
|
||||
|
||||
|
||||
in vec2 _texCoord0;
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
DeferredTransform deferredTransform = getDeferredTransform();
|
||||
DeferredFrameTransform deferredTransform = getDeferredFrameTransform();
|
||||
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
|
||||
|
||||
float shadowAttenuation = 1.0;
|
||||
|
@ -31,16 +32,35 @@ void main(void) {
|
|||
_fragColor = vec4(frag.diffuse, 1.0);
|
||||
} else if (frag.mode == FRAG_MODE_LIGHTMAPPED) {
|
||||
vec3 color = evalLightmappedColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.normal,
|
||||
frag.diffuse,
|
||||
frag.specularVal.xyz);
|
||||
_fragColor = vec4(color, 1.0);
|
||||
} else {
|
||||
} else { //if (frag.mode == FRAG_MODE_SCATTERING) {
|
||||
|
||||
vec4 blurredCurvature = fetchCurvature(_texCoord0);
|
||||
vec4 diffusedCurvature = fetchDiffusedCurvature(_texCoord0);
|
||||
|
||||
vec3 color = evalAmbientSphereGlobalColorScattering(
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.position.xyz,
|
||||
frag.normal,
|
||||
frag.diffuse,
|
||||
frag.metallic,
|
||||
frag.emissive,
|
||||
frag.roughness,
|
||||
frag.scattering,
|
||||
blurredCurvature,
|
||||
diffusedCurvature);
|
||||
_fragColor = vec4(color, 1.0);
|
||||
/* } else {
|
||||
vec3 color = evalAmbientSphereGlobalColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.position.xyz,
|
||||
|
@ -49,6 +69,6 @@ void main(void) {
|
|||
frag.metallic,
|
||||
frag.emissive,
|
||||
frag.roughness);
|
||||
_fragColor = vec4(color, frag.normalVal.a);
|
||||
_fragColor = vec4(color, frag.normalVal.a);*/
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,17 +23,17 @@ in vec2 _texCoord0;
|
|||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
DeferredTransform deferredTransform = getDeferredTransform();
|
||||
DeferredFrameTransform deferredTransform = getDeferredFrameTransform();
|
||||
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
|
||||
|
||||
vec4 worldPos = deferredTransform.viewInverse * vec4(frag.position.xyz, 1.0);
|
||||
vec4 worldPos = getViewInverse() * vec4(frag.position.xyz, 1.0);
|
||||
float shadowAttenuation = evalShadowAttenuation(worldPos);
|
||||
|
||||
if (frag.mode == FRAG_MODE_UNLIT) {
|
||||
_fragColor = vec4(frag.diffuse, 1.0);
|
||||
} else if (frag.mode == FRAG_MODE_LIGHTMAPPED) {
|
||||
vec3 color = evalLightmappedColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.normal,
|
||||
|
@ -42,7 +42,7 @@ void main(void) {
|
|||
_fragColor = vec4(color, 1.0);
|
||||
} else {
|
||||
vec3 color = evalAmbientSphereGlobalColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.position.xyz,
|
||||
|
|
|
@ -22,7 +22,7 @@ in vec2 _texCoord0;
|
|||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
DeferredTransform deferredTransform = getDeferredTransform();
|
||||
DeferredFrameTransform deferredTransform = getDeferredFrameTransform();
|
||||
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
|
||||
|
||||
float shadowAttenuation = 1.0;
|
||||
|
@ -32,7 +32,7 @@ void main(void) {
|
|||
_fragColor = vec4(frag.diffuse, 1.0);
|
||||
} else if (frag.mode == FRAG_MODE_LIGHTMAPPED) {
|
||||
vec3 color = evalLightmappedColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.normal,
|
||||
|
@ -41,7 +41,7 @@ void main(void) {
|
|||
_fragColor = vec4(color, 1.0);
|
||||
} else {
|
||||
vec3 color = evalAmbientGlobalColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.position.xyz,
|
||||
|
|
|
@ -23,10 +23,10 @@ in vec2 _texCoord0;
|
|||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
DeferredTransform deferredTransform = getDeferredTransform();
|
||||
DeferredFrameTransform deferredTransform = getDeferredFrameTransform();
|
||||
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
|
||||
|
||||
vec4 worldPos = deferredTransform.viewInverse * vec4(frag.position.xyz, 1.0);
|
||||
vec4 worldPos = getViewInverse() * vec4(frag.position.xyz, 1.0);
|
||||
float shadowAttenuation = evalShadowAttenuation(worldPos);
|
||||
|
||||
// Light mapped or not ?
|
||||
|
@ -34,7 +34,7 @@ void main(void) {
|
|||
_fragColor = vec4(frag.diffuse, 1.0);
|
||||
} else if (frag.mode == FRAG_MODE_LIGHTMAPPED) {
|
||||
vec3 color = evalLightmappedColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.normal,
|
||||
|
@ -43,7 +43,7 @@ void main(void) {
|
|||
_fragColor = vec4(color, 1.0);
|
||||
} else {
|
||||
vec3 color = evalAmbientGlobalColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.position.xyz,
|
||||
|
|
|
@ -16,13 +16,13 @@
|
|||
<@include DeferredGlobalLight.slh@>
|
||||
|
||||
<$declareEvalLightmappedColor()$>
|
||||
<$declareEvalSkyboxGlobalColor()$>
|
||||
<$declareEvalSkyboxGlobalColor(supportScattering)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
DeferredTransform deferredTransform = getDeferredTransform();
|
||||
DeferredFrameTransform deferredTransform = getDeferredFrameTransform();
|
||||
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
|
||||
|
||||
float shadowAttenuation = 1.0;
|
||||
|
@ -32,16 +32,35 @@ void main(void) {
|
|||
_fragColor = vec4(frag.diffuse, 1.0);
|
||||
} else if (frag.mode == FRAG_MODE_LIGHTMAPPED) {
|
||||
vec3 color = evalLightmappedColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.normal,
|
||||
frag.diffuse,
|
||||
frag.specularVal.xyz);
|
||||
_fragColor = vec4(color, 1.0);
|
||||
} else {
|
||||
} else {// if (frag.mode == FRAG_MODE_SCATTERING) {
|
||||
|
||||
vec4 blurredCurvature = fetchCurvature(_texCoord0);
|
||||
vec4 diffusedCurvature = fetchDiffusedCurvature(_texCoord0);
|
||||
|
||||
vec3 color = evalSkyboxGlobalColorScattering(
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.position.xyz,
|
||||
frag.normal,
|
||||
frag.diffuse,
|
||||
frag.metallic,
|
||||
frag.emissive,
|
||||
frag.roughness,
|
||||
frag.scattering,
|
||||
blurredCurvature,
|
||||
diffusedCurvature);
|
||||
_fragColor = vec4(color, 1.0);
|
||||
/* } else {
|
||||
vec3 color = evalSkyboxGlobalColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.position.xyz,
|
||||
|
@ -52,5 +71,6 @@ void main(void) {
|
|||
frag.roughness);
|
||||
|
||||
_fragColor = vec4(color, frag.normalVal.a);
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,10 +23,10 @@ in vec2 _texCoord0;
|
|||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
DeferredTransform deferredTransform = getDeferredTransform();
|
||||
DeferredFrameTransform deferredTransform = getDeferredFrameTransform();
|
||||
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
|
||||
|
||||
vec4 worldPos = deferredTransform.viewInverse * vec4(frag.position.xyz, 1.0);
|
||||
vec4 worldPos = getViewInverse() * vec4(frag.position.xyz, 1.0);
|
||||
float shadowAttenuation = evalShadowAttenuation(worldPos);
|
||||
|
||||
// Light mapped or not ?
|
||||
|
@ -34,7 +34,7 @@ void main(void) {
|
|||
_fragColor = vec4(frag.diffuse, 1.0);
|
||||
} else if (frag.mode == FRAG_MODE_LIGHTMAPPED) {
|
||||
vec3 color = evalLightmappedColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.normal,
|
||||
|
@ -43,7 +43,7 @@ void main(void) {
|
|||
_fragColor = vec4(color, 1.0);
|
||||
} else {
|
||||
vec3 color = evalSkyboxGlobalColor(
|
||||
deferredTransform.viewInverse,
|
||||
getViewInverse(),
|
||||
shadowAttenuation,
|
||||
frag.obscurance,
|
||||
frag.position.xyz,
|
||||
|
|
|
@ -45,6 +45,8 @@ void main(void) {
|
|||
vec3 emissive = getMaterialEmissive(mat);
|
||||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
float scattering = getMaterialScattering(mat);
|
||||
|
||||
packDeferredFragment(
|
||||
normalize(_normal.xyz),
|
||||
opacity,
|
||||
|
@ -52,5 +54,6 @@ void main(void) {
|
|||
roughness,
|
||||
getMaterialMetallic(mat),
|
||||
emissive,
|
||||
occlusionTex);
|
||||
occlusionTex,
|
||||
scattering);
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
<@include model/Material.slh@>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO, ROUGHNESS, NORMAL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
|
||||
<$declareMaterialTextures(ALBEDO, ROUGHNESS, NORMAL, _SCRIBE_NULL, EMISSIVE, OCCLUSION, SCATTERING)$>
|
||||
|
||||
in vec4 _position;
|
||||
in vec2 _texCoord0;
|
||||
|
@ -29,7 +29,7 @@ in vec3 _color;
|
|||
void main(void) {
|
||||
Material mat = getMaterial();
|
||||
int matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, normalTex, _SCRIBE_NULL, emissiveTex)$>
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, normalTex, _SCRIBE_NULL, emissiveTex, scatteringTex)$>
|
||||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
|
@ -49,6 +49,9 @@ void main(void) {
|
|||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float scattering = getMaterialScattering(mat);
|
||||
<$evalMaterialScattering(scatteringTex, scattering, matKey, scattering)$>;
|
||||
|
||||
packDeferredFragment(
|
||||
viewNormal,
|
||||
opacity,
|
||||
|
@ -56,5 +59,6 @@ void main(void) {
|
|||
roughness,
|
||||
getMaterialMetallic(mat),
|
||||
emissive,
|
||||
occlusionTex);
|
||||
occlusionTex,
|
||||
scattering);
|
||||
}
|
||||
|
|
|
@ -52,6 +52,7 @@ void main(void) {
|
|||
float metallic = getMaterialMetallic(mat);
|
||||
<$evalMaterialMetallic(metallicTex, metallic, matKey, metallic)$>;
|
||||
|
||||
float scattering = getMaterialScattering(mat);
|
||||
|
||||
packDeferredFragment(
|
||||
normalize(viewNormal.xyz),
|
||||
|
@ -60,5 +61,6 @@ void main(void) {
|
|||
roughness,
|
||||
metallic,
|
||||
emissive,
|
||||
occlusionTex);
|
||||
occlusionTex,
|
||||
scattering);
|
||||
}
|
||||
|
|
|
@ -49,6 +49,8 @@ void main(void) {
|
|||
float metallic = getMaterialMetallic(mat);
|
||||
<$evalMaterialMetallic(metallicTex, metallic, matKey, metallic)$>;
|
||||
|
||||
float scattering = getMaterialScattering(mat);
|
||||
|
||||
packDeferredFragment(
|
||||
normalize(_normal),
|
||||
opacity,
|
||||
|
@ -56,5 +58,6 @@ void main(void) {
|
|||
roughness,
|
||||
metallic,
|
||||
emissive,
|
||||
occlusionTex);
|
||||
occlusionTex,
|
||||
scattering);
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredLighting.slh@>
|
||||
<@include LightingModel.slh@>
|
||||
<@include model/Light.slh@>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredLighting.slh@>
|
||||
<@include LightingModel.slh@>
|
||||
<@include model/Light.slh@>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// point_light.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/18/14.
|
||||
// Created by Sam Gateau on 9/18/15.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
|
@ -15,18 +15,24 @@
|
|||
// Everything about deferred buffer
|
||||
<@include DeferredBufferRead.slh@>
|
||||
|
||||
//Everything about deferred lighting
|
||||
<@include DeferredLighting.slh@>
|
||||
<$declareDeferredCurvature()$>
|
||||
|
||||
// Everything about light
|
||||
<@include model/Light.slh@>
|
||||
|
||||
<@include LightingModel.slh@>
|
||||
<$declareLightingModel()$>
|
||||
|
||||
<@include LightPoint.slh@>
|
||||
<$declareLightingPoint(supportScattering)$>
|
||||
|
||||
|
||||
|
||||
in vec4 _texCoord0;
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
DeferredTransform deferredTransform = getDeferredTransform();
|
||||
DeferredFrameTransform deferredTransform = getDeferredFrameTransform();
|
||||
|
||||
// Grab the fragment data from the uv
|
||||
vec2 texCoord = _texCoord0.st / _texCoord0.q;
|
||||
|
@ -36,8 +42,6 @@ void main(void) {
|
|||
discard;
|
||||
}
|
||||
|
||||
mat4 invViewMat = deferredTransform.viewInverse;
|
||||
|
||||
// Kill if in front of the light volume
|
||||
float depth = frag.depthVal;
|
||||
if (depth < gl_FragCoord.z) {
|
||||
|
@ -47,38 +51,41 @@ void main(void) {
|
|||
// Need the light now
|
||||
Light light = getLight();
|
||||
|
||||
// Make the Light vector going from fragment to light center in world space
|
||||
// Frag pos in world
|
||||
mat4 invViewMat = getViewInverse();
|
||||
vec4 fragPos = invViewMat * frag.position;
|
||||
vec3 fragLightVec = getLightPosition(light) - fragPos.xyz;
|
||||
|
||||
// Kill if too far from the light center
|
||||
if (dot(fragLightVec, fragLightVec) > getLightCutoffSquareRadius(light)) {
|
||||
// Clip againgst the light volume and Make the Light vector going from fragment to light center in world space
|
||||
vec4 fragLightVecLen2;
|
||||
if (!clipFragToLightVolumePoint(light, fragPos.xyz, fragLightVecLen2)) {
|
||||
discard;
|
||||
}
|
||||
|
||||
// Allright we re valid in the volume
|
||||
float fragLightDistance = length(fragLightVec);
|
||||
vec3 fragLightDir = fragLightVec / fragLightDistance;
|
||||
|
||||
// Eval shading
|
||||
vec3 fragNormal = vec3(frag.normal);
|
||||
// Frag to eye vec
|
||||
vec4 fragEyeVector = invViewMat * vec4(-frag.position.xyz, 0.0);
|
||||
vec3 fragEyeDir = normalize(fragEyeVector.xyz);
|
||||
vec4 shading = evalFragShading(fragNormal, fragLightDir, fragEyeDir, frag.metallic, frag.specular, frag.roughness);
|
||||
|
||||
// Eval attenuation
|
||||
float radialAttenuation = evalLightAttenuation(light, fragLightDistance);
|
||||
|
||||
// Final Lighting color
|
||||
vec3 fragColor = (shading.w * frag.diffuse + shading.xyz);
|
||||
_fragColor = vec4(fragColor * radialAttenuation * getLightColor(light) * getLightIntensity(light) * frag.obscurance, 0.0);
|
||||
vec3 diffuse;
|
||||
vec3 specular;
|
||||
|
||||
if (getLightShowContour(light) > 0.0) {
|
||||
// Show edge
|
||||
float edge = abs(2.0 * ((getLightRadius(light) - fragLightDistance) / (0.1)) - 1.0);
|
||||
if (edge < 1) {
|
||||
float edgeCoord = exp2(-8.0*edge*edge);
|
||||
_fragColor = vec4(edgeCoord * edgeCoord * getLightShowContour(light) * getLightColor(light), 0.0);
|
||||
}
|
||||
if (frag.mode == FRAG_MODE_SCATTERING) {
|
||||
vec4 blurredCurvature = fetchCurvature(texCoord);
|
||||
vec4 diffusedCurvature = fetchDiffusedCurvature(texCoord);
|
||||
vec3 midNormal = normalize((blurredCurvature.xyz - 0.5f) * 2.0f);
|
||||
vec3 lowNormal = normalize((diffusedCurvature.xyz - 0.5f) * 2.0f);
|
||||
float highCurvature = unpackCurvature(blurredCurvature.w);
|
||||
float lowCurvature = unpackCurvature(diffusedCurvature.w);
|
||||
evalLightingPointScattering(diffuse, specular, light,
|
||||
fragLightVecLen2.xyz, fragEyeDir, frag.normal, frag.roughness,
|
||||
frag.metallic, frag.specular, frag.diffuse, 1.0,
|
||||
frag.scattering * isScatteringEnabled(), midNormal, lowNormal, lowCurvature);
|
||||
} else {
|
||||
evalLightingPoint(diffuse, specular, light,
|
||||
fragLightVecLen2.xyz, fragEyeDir, frag.normal, frag.roughness,
|
||||
frag.metallic, frag.specular, frag.diffuse, 1.0);
|
||||
}
|
||||
|
||||
_fragColor.rgb += diffuse * isDiffuseEnabled() * isPointEnabled();
|
||||
_fragColor.rgb += specular * isSpecularEnabled() * isPointEnabled();
|
||||
}
|
||||
|
|
|
@ -54,6 +54,6 @@ void main(void) {
|
|||
normal, 1.0, diffuse, max(0, 1.0 - shininess / 128.0), DEFAULT_METALLIC, specular, specular);
|
||||
} else {
|
||||
packDeferredFragment(
|
||||
normal, 1.0, diffuse, max(0, 1.0 - shininess / 128.0), length(specular), DEFAULT_EMISSIVE, DEFAULT_OCCLUSION);
|
||||
normal, 1.0, diffuse, max(0, 1.0 - shininess / 128.0), length(specular), DEFAULT_EMISSIVE, DEFAULT_OCCLUSION, DEFAULT_SCATTERING);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,5 +36,6 @@ void main(void) {
|
|||
DEFAULT_ROUGHNESS,
|
||||
DEFAULT_METALLIC,
|
||||
DEFAULT_EMISSIVE,
|
||||
DEFAULT_OCCLUSION);
|
||||
DEFAULT_OCCLUSION,
|
||||
DEFAULT_SCATTERING);
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
// spot_light.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/18/14.
|
||||
// Created by Sam Gateau on 9/18/15.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
|
@ -15,18 +15,24 @@
|
|||
// Everything about deferred buffer
|
||||
<@include DeferredBufferRead.slh@>
|
||||
|
||||
//Everything about deferred lighting
|
||||
<@include DeferredLighting.slh@>
|
||||
<$declareDeferredCurvature()$>
|
||||
|
||||
// Everything about light
|
||||
<@include model/Light.slh@>
|
||||
|
||||
<@include LightingModel.slh@>
|
||||
<$declareLightingModel()$>
|
||||
|
||||
<@include LightSpot.slh@>
|
||||
<$declareLightingSpot(supportScattering)$>
|
||||
|
||||
|
||||
in vec4 _texCoord0;
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
|
||||
DeferredTransform deferredTransform = getDeferredTransform();
|
||||
DeferredFrameTransform deferredTransform = getDeferredFrameTransform();
|
||||
|
||||
// Grab the fragment data from the uv
|
||||
vec2 texCoord = _texCoord0.st / _texCoord0.q;
|
||||
|
@ -36,8 +42,6 @@ void main(void) {
|
|||
discard;
|
||||
}
|
||||
|
||||
mat4 invViewMat = deferredTransform.viewInverse;
|
||||
|
||||
// Kill if in front of the light volume
|
||||
float depth = frag.depthVal;
|
||||
if (depth < gl_FragCoord.z) {
|
||||
|
@ -47,50 +51,43 @@ void main(void) {
|
|||
// Need the light now
|
||||
Light light = getLight();
|
||||
|
||||
// Make the Light vector going from fragment to light center in world space
|
||||
// Frag pos in world
|
||||
mat4 invViewMat = getViewInverse();
|
||||
vec4 fragPos = invViewMat * frag.position;
|
||||
vec3 fragLightVec = getLightPosition(light) - fragPos.xyz;
|
||||
|
||||
// Kill if too far from the light center
|
||||
if (dot(fragLightVec, fragLightVec) > getLightCutoffSquareRadius(light)) {
|
||||
// Clip againgst the light volume and Make the Light vector going from fragment to light center in world space
|
||||
vec4 fragLightVecLen2;
|
||||
vec4 fragLightDirLen;
|
||||
float cosSpotAngle;
|
||||
if (!clipFragToLightVolumeSpot(light, fragPos.xyz, fragLightVecLen2, fragLightDirLen, cosSpotAngle)) {
|
||||
discard;
|
||||
}
|
||||
|
||||
// Allright we re valid in the volume
|
||||
float fragLightDistance = length(fragLightVec);
|
||||
vec3 fragLightDir = fragLightVec / fragLightDistance;
|
||||
|
||||
// Kill if not in the spot light (ah ah !)
|
||||
vec3 lightSpotDir = getLightDirection(light);
|
||||
float cosSpotAngle = max(-dot(fragLightDir, lightSpotDir), 0.0);
|
||||
if (cosSpotAngle < getLightSpotAngleCos(light)) {
|
||||
discard;
|
||||
}
|
||||
|
||||
// Eval shading
|
||||
vec3 fragNormal = vec3(frag.normal);
|
||||
// Frag to eye vec
|
||||
vec4 fragEyeVector = invViewMat * vec4(-frag.position.xyz, 0.0);
|
||||
vec3 fragEyeDir = normalize(fragEyeVector.xyz);
|
||||
vec4 shading = evalFragShading(fragNormal, fragLightDir, fragEyeDir, frag.metallic, frag.specular, frag.roughness);
|
||||
|
||||
// Eval attenuation
|
||||
float radialAttenuation = evalLightAttenuation(light, fragLightDistance);
|
||||
float angularAttenuation = evalLightSpotAttenuation(light, cosSpotAngle);
|
||||
|
||||
// Final Lighting color
|
||||
vec3 fragColor = (shading.w * frag.diffuse + shading.xyz);
|
||||
_fragColor = vec4(fragColor * angularAttenuation * radialAttenuation * getLightColor(light) * getLightIntensity(light) * frag.obscurance, 0.0);
|
||||
|
||||
if (getLightShowContour(light) > 0.0) {
|
||||
// Show edges
|
||||
float edgeDistR = (getLightRadius(light) - fragLightDistance);
|
||||
float edgeDistS = dot(fragLightDistance * vec2(cosSpotAngle, sqrt(1.0 - cosSpotAngle * cosSpotAngle)), -getLightSpotOutsideNormal2(light));
|
||||
float edgeDist = min(edgeDistR, edgeDistS);
|
||||
float edge = abs(2.0 * (edgeDist / (0.1)) - 1.0);
|
||||
if (edge < 1) {
|
||||
float edgeCoord = exp2(-8.0*edge*edge);
|
||||
_fragColor = vec4(edgeCoord * edgeCoord * getLightColor(light), 0.0);
|
||||
}
|
||||
vec3 diffuse;
|
||||
vec3 specular;
|
||||
if (frag.mode == FRAG_MODE_SCATTERING) {
|
||||
vec4 blurredCurvature = fetchCurvature(texCoord);
|
||||
vec4 diffusedCurvature = fetchDiffusedCurvature(texCoord);
|
||||
vec3 midNormal = normalize((blurredCurvature.xyz - 0.5f) * 2.0f);
|
||||
vec3 lowNormal = normalize((diffusedCurvature.xyz - 0.5f) * 2.0f);
|
||||
float highCurvature = unpackCurvature(blurredCurvature.w);
|
||||
float lowCurvature = unpackCurvature(diffusedCurvature.w);
|
||||
evalLightingSpotScattering(diffuse, specular, light,
|
||||
fragLightDirLen.xyzw, cosSpotAngle, fragEyeDir, frag.normal, frag.roughness,
|
||||
frag.metallic, frag.specular, frag.diffuse, 1.0,
|
||||
frag.scattering * isScatteringEnabled(), midNormal, lowNormal, lowCurvature);
|
||||
} else {
|
||||
evalLightingSpot(diffuse, specular, light,
|
||||
fragLightDirLen.xyzw, cosSpotAngle, fragEyeDir, frag.normal, frag.roughness,
|
||||
frag.metallic, frag.specular, frag.diffuse, 1.0);
|
||||
}
|
||||
|
||||
_fragColor.rgb += diffuse * isDiffuseEnabled() * isSpotEnabled();
|
||||
_fragColor.rgb += specular * isSpecularEnabled() * isSpotEnabled();
|
||||
}
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ void main(void) {
|
|||
|
||||
// From now on, ssC is the pixel pos in the side
|
||||
ssC.x -= side.y;
|
||||
vec2 fragPos = (vec2(ssC) + 0.5) / getStereoSideWidth();
|
||||
vec2 fragPos = (vec2(ssC) + 0.5) / getStereoSideWidth();
|
||||
|
||||
// The position and normal of the pixel fragment in Eye space
|
||||
vec3 Cp = evalEyePositionFromZeye(side.x, Zeye, fragPos);
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/8/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
|
||||
<@include DeferredBufferRead.slh@>
|
||||
<@include model/Light.slh@>
|
||||
|
||||
<$declareDeferredCurvature()$>
|
||||
|
||||
<@include SubsurfaceScattering.slh@>
|
||||
<$declareSubsurfaceScatteringBRDF()$>
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
out vec4 _fragColor;
|
||||
|
||||
uniform vec2 uniformCursorTexcoord = vec2(0.5);
|
||||
|
||||
//uniform vec3 uniformLightVector = vec3(1.0);
|
||||
|
||||
vec3 evalScatteringBRDF(vec2 texcoord) {
|
||||
DeferredFragment fragment = unpackDeferredFragmentNoPosition(texcoord);
|
||||
|
||||
vec3 normal = fragment.normal; // .getWorldNormal(varTexCoord0);
|
||||
vec4 blurredCurvature = fetchCurvature(texcoord);
|
||||
vec4 diffusedCurvature = fetchDiffusedCurvature(texcoord);
|
||||
vec3 midNormal = normalize((blurredCurvature.xyz - 0.5f) * 2.0f);
|
||||
vec3 lowNormal = normalize((diffusedCurvature.xyz - 0.5f) * 2.0f);
|
||||
float curvature = unpackCurvature(diffusedCurvature.w);
|
||||
|
||||
|
||||
// Transform directions to worldspace
|
||||
vec3 fragNormal = vec3((normal));
|
||||
|
||||
// Get light
|
||||
Light light = getLight();
|
||||
vec3 fresnel = vec3(0.028); // Default Di-electric fresnel value for skin
|
||||
float metallic = 0.0;
|
||||
|
||||
vec3 fragLightDir = -normalize(getLightDirection(light));
|
||||
|
||||
|
||||
vec3 brdf = evalSkinBRDF(fragLightDir, fragNormal, midNormal, lowNormal, curvature);
|
||||
|
||||
return brdf;
|
||||
}
|
||||
|
||||
vec3 drawScatteringTableUV(vec2 cursor, vec2 texcoord) {
|
||||
DeferredFragment fragment = unpackDeferredFragmentNoPosition(cursor);
|
||||
|
||||
vec3 normal = fragment.normal; // .getWorldNormal(varTexCoord0);
|
||||
vec4 blurredCurvature = fetchCurvature(cursor);
|
||||
vec4 diffusedCurvature = fetchDiffusedCurvature(cursor);
|
||||
vec3 midNormal = normalize((blurredCurvature.xyz - 0.5f) * 2.0f);
|
||||
vec3 lowNormal = normalize((diffusedCurvature.xyz - 0.5f) * 2.0f);
|
||||
float curvature = unpackCurvature(diffusedCurvature.w);
|
||||
|
||||
// Get light
|
||||
Light light = getLight();
|
||||
vec3 fresnel = vec3(0.028); // Default Di-electric fresnel value for skin
|
||||
|
||||
vec3 fragLightDir = -normalize(getLightDirection(light));
|
||||
|
||||
vec3 bentNdotL = evalScatteringBentNdotL(normal, midNormal, lowNormal, fragLightDir);
|
||||
|
||||
// return clamp(bentNdotL * 0.5 + 0.5, 0.0, 1.0);
|
||||
|
||||
vec3 distance = vec3(0.0);
|
||||
for (int c = 0; c < 3; c++) {
|
||||
vec2 BRDFuv = vec2(clamp(bentNdotL[c] * 0.5 + 0.5, 0.0, 1.0), clamp(2 * curvature, 0.0, 1.0));
|
||||
vec2 delta = BRDFuv - texcoord;
|
||||
distance[c] = 1.0 - dot(delta, delta);
|
||||
}
|
||||
|
||||
distance *= distance;
|
||||
|
||||
float threshold = 0.999;
|
||||
vec3 color = vec3(0.0);
|
||||
bool keep = false;
|
||||
for (int c = 0; c < 3; c++) {
|
||||
if (distance[c] > threshold) {
|
||||
keep = true;
|
||||
color[c] += 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
if (!keep)
|
||||
discard;
|
||||
|
||||
return color;
|
||||
}
|
||||
|
||||
void main(void) {
|
||||
// _fragColor = vec4(evalScatteringBRDF(varTexCoord0), 1.0);
|
||||
// _fragColor = vec4(uniformCursorTexcoord, 0.0, 1.0);
|
||||
|
||||
_fragColor = vec4(drawScatteringTableUV(uniformCursorTexcoord, varTexCoord0), 1.0);
|
||||
}
|
||||
|
||||
|
27
libraries/render-utils/src/subsurfaceScattering_makeLUT.slf
Normal file
27
libraries/render-utils/src/subsurfaceScattering_makeLUT.slf
Normal file
|
@ -0,0 +1,27 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/8/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include SubsurfaceScattering.slh@>
|
||||
<$declareSubsurfaceScatteringProfileSource()$>
|
||||
<$declareSubsurfaceScatteringIntegrate(2000)$>
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
|
||||
// Lookup by: x: NDotL y: 1 / r
|
||||
//float y = 2.0 * 1.0 / ((j + 1.0) / (double)height);
|
||||
//float x = ((i / (double)width) * 2.0) - 1.0;
|
||||
|
||||
outFragColor = vec4(integrate(varTexCoord0.x * 2.0 - 1.0, 2.0 / varTexCoord0.y), 1.0);
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/27/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include SubsurfaceScattering.slh@>
|
||||
<$declareSubsurfaceScatteringGenerateProfileMap()$>
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
outFragColor = vec4(generateProfile(varTexCoord0.xy), 1.0);
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/30/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
out vec4 outFragColor;
|
||||
|
||||
float specularBeckmann(float ndoth, float roughness) {
|
||||
float alpha = acos(ndoth);
|
||||
float ta = tan(alpha);
|
||||
float val = 1.0 / (roughness * roughness * pow(ndoth, 4.0)) * exp(-(ta * ta) / (roughness * roughness));
|
||||
return val;
|
||||
}
|
||||
|
||||
void main(void) {
|
||||
outFragColor = vec4(vec3(0.5 * pow( specularBeckmann(varTexCoord0.x, varTexCoord0.y), 0.1)), 1.0);
|
||||
}
|
237
libraries/render-utils/src/surfaceGeometry_makeCurvature.slf
Normal file
237
libraries/render-utils/src/surfaceGeometry_makeCurvature.slf
Normal file
|
@ -0,0 +1,237 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/3/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredTransform.slh@>
|
||||
<$declareDeferredFrameTransform()$>
|
||||
|
||||
struct SurfaceGeometryParams {
|
||||
// Resolution info
|
||||
vec4 resolutionInfo;
|
||||
// Curvature algorithm
|
||||
vec4 curvatureInfo;
|
||||
};
|
||||
|
||||
uniform surfaceGeometryParamsBuffer {
|
||||
SurfaceGeometryParams params;
|
||||
};
|
||||
|
||||
float getCurvatureDepthThreshold() {
|
||||
return params.curvatureInfo.x;
|
||||
}
|
||||
|
||||
float getCurvatureBasisScale() {
|
||||
return params.curvatureInfo.y;
|
||||
}
|
||||
|
||||
float getCurvatureScale() {
|
||||
return params.curvatureInfo.w;
|
||||
}
|
||||
|
||||
|
||||
uniform sampler2D linearDepthMap;
|
||||
float getZEye(ivec2 pixel) {
|
||||
return -texelFetch(linearDepthMap, pixel, 0).x;
|
||||
}
|
||||
float getZEyeLinear(vec2 texcoord) {
|
||||
return -texture(linearDepthMap, texcoord).x;
|
||||
}
|
||||
|
||||
vec2 signNotZero(vec2 v) {
|
||||
return vec2((v.x >= 0.0) ? +1.0 : -1.0, (v.y >= 0.0) ? +1.0 : -1.0);
|
||||
}
|
||||
|
||||
vec3 oct_to_float32x3(in vec2 e) {
|
||||
vec3 v = vec3(e.xy, 1.0 - abs(e.x) - abs(e.y));
|
||||
if (v.z < 0) {
|
||||
v.xy = (1.0 - abs(v.yx)) * signNotZero(v.xy);
|
||||
}
|
||||
return normalize(v);
|
||||
}
|
||||
|
||||
vec2 unorm8x3_to_snorm12x2(vec3 u) {
|
||||
u *= 255.0;
|
||||
u.y *= (1.0 / 16.0);
|
||||
vec2 s = vec2( u.x * 16.0 + floor(u.y),
|
||||
fract(u.y) * (16.0 * 256.0) + u.z);
|
||||
return clamp(s * (1.0 / 2047.0) - 1.0, vec2(-1.0), vec2(1.0));
|
||||
}
|
||||
vec3 unpackNormal(in vec3 p) {
|
||||
return oct_to_float32x3(unorm8x3_to_snorm12x2(p));
|
||||
}
|
||||
|
||||
vec2 sideToFrameTexcoord(vec2 side, vec2 texcoordPos) {
|
||||
return vec2((texcoordPos.x + side.x) * side.y, texcoordPos.y);
|
||||
}
|
||||
|
||||
uniform sampler2D normalMap;
|
||||
|
||||
vec3 getRawNormal(vec2 texcoord) {
|
||||
return texture(normalMap, texcoord).xyz;
|
||||
}
|
||||
|
||||
vec3 getWorldNormal(vec2 texcoord) {
|
||||
vec3 rawNormal = getRawNormal(texcoord);
|
||||
return unpackNormal(rawNormal);
|
||||
}
|
||||
|
||||
vec3 getWorldNormalDiff(vec2 texcoord, vec2 delta) {
|
||||
return getWorldNormal(texcoord + delta) - getWorldNormal(texcoord - delta);
|
||||
}
|
||||
|
||||
float getEyeDepthDiff(vec2 texcoord, vec2 delta) {
|
||||
return getZEyeLinear(texcoord + delta) - getZEyeLinear(texcoord - delta);
|
||||
}
|
||||
|
||||
|
||||
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
// Pixel being shaded
|
||||
ivec2 pixelPos;
|
||||
vec2 texcoordPos;
|
||||
ivec4 stereoSide;
|
||||
ivec2 framePixelPos = getPixelPosTexcoordPosAndSide(gl_FragCoord.xy, pixelPos, texcoordPos, stereoSide);
|
||||
vec2 stereoSideClip = vec2(stereoSide.x, (isStereo() ? 0.5 : 1.0));
|
||||
vec2 frameTexcoordPos = sideToFrameTexcoord(stereoSideClip, texcoordPos);
|
||||
|
||||
// Fetch the z under the pixel (stereo or not)
|
||||
float Zeye = getZEye(framePixelPos);
|
||||
|
||||
float nearPlaneScale = 0.5 * getProjectionNear();
|
||||
|
||||
vec3 worldNormal = getWorldNormal(frameTexcoordPos);
|
||||
|
||||
// The position of the pixel fragment in Eye space then in world space
|
||||
vec3 eyePos = evalEyePositionFromZeye(stereoSide.x, Zeye, texcoordPos);
|
||||
vec3 worldPos = (frameTransform._viewInverse * vec4(eyePos, 1.0)).xyz;
|
||||
|
||||
if (texcoordPos.y > 0.5) {
|
||||
outFragColor = vec4(fract(10.0 * worldPos.xyz), 1.0);
|
||||
} else {
|
||||
outFragColor = vec4(fract(10.0 * eyePos.xyz), 1.0);
|
||||
}
|
||||
// return;
|
||||
|
||||
// Calculate the perspective scale.
|
||||
// Clamp to 0.5
|
||||
// float perspectiveScale = max(0.5, (-getProjScaleEye() / Zeye));
|
||||
float perspectiveScale = max(0.5, (-getCurvatureBasisScale() * getProjectionNear() / Zeye));
|
||||
|
||||
// Calculate dF/du and dF/dv
|
||||
vec2 viewportScale = perspectiveScale * getInvWidthHeight();
|
||||
vec2 du = vec2( viewportScale.x * (stereoSide.w > 0.0 ? 0.5 : 1.0), 0.0f );
|
||||
vec2 dv = vec2( 0.0f, viewportScale.y );
|
||||
|
||||
vec4 dFdu = vec4(getWorldNormalDiff(frameTexcoordPos, du), getEyeDepthDiff(frameTexcoordPos, du));
|
||||
vec4 dFdv = vec4(getWorldNormalDiff(frameTexcoordPos, dv), getEyeDepthDiff(frameTexcoordPos, dv));
|
||||
|
||||
float threshold = getCurvatureDepthThreshold();
|
||||
dFdu *= step(abs(dFdu.w), threshold);
|
||||
dFdv *= step(abs(dFdv.w), threshold);
|
||||
|
||||
// Calculate ( du/dx, du/dy, du/dz ) and ( dv/dx, dv/dy, dv/dz )
|
||||
// Eval px, py, pz world positions of the basis centered on the world pos of the fragment
|
||||
float axeLength = nearPlaneScale;
|
||||
|
||||
vec3 ax = (frameTransform._view[0].xyz * axeLength);
|
||||
vec3 ay = (frameTransform._view[1].xyz * axeLength);
|
||||
vec3 az = (frameTransform._view[2].xyz * axeLength);
|
||||
|
||||
vec4 px = vec4(eyePos + ax, 0.0);
|
||||
vec4 py = vec4(eyePos + ay, 0.0);
|
||||
vec4 pz = vec4(eyePos + az, 0.0);
|
||||
|
||||
|
||||
if (texcoordPos.y > 0.5) {
|
||||
outFragColor = vec4(fract(px.xyz), 1.0);
|
||||
} else {
|
||||
outFragColor = vec4(fract(eyePos.xyz), 1.0);
|
||||
}
|
||||
// return;
|
||||
|
||||
|
||||
/* IN case the axis end point goes behind mid way near plane, this shouldn't happen
|
||||
if (px.z >= -nearPlaneScale) {
|
||||
outFragColor = vec4(1.0, 0.0, 0.0, 1.0);
|
||||
return;
|
||||
} else if (py.z >= -nearPlaneScale) {
|
||||
outFragColor = vec4(0.0, 1.0, 0.0, 1.0);
|
||||
return;
|
||||
} else if (pz.z >= -nearPlaneScale) {
|
||||
outFragColor = vec4(0.0, 0.0, 1.0, 1.0);
|
||||
return;
|
||||
}*/
|
||||
|
||||
|
||||
// Project px, py pz to homogeneous clip space
|
||||
// mat4 viewProj = getProjection(stereoSide.x);
|
||||
mat4 viewProj = getProjectionMono();
|
||||
px = viewProj * px;
|
||||
py = viewProj * py;
|
||||
pz = viewProj * pz;
|
||||
|
||||
|
||||
// then to normalized clip space
|
||||
px.xy /= px.w;
|
||||
py.xy /= py.w;
|
||||
pz.xy /= pz.w;
|
||||
|
||||
vec2 nclipPos = (texcoordPos - 0.5) * 2.0;
|
||||
|
||||
|
||||
//vec4 clipPos = frameTransform._projection[stereoSide.x] * vec4(eyePos, 1.0);
|
||||
vec4 clipPos = getProjectionMono() * vec4(eyePos, 1.0);
|
||||
nclipPos = clipPos.xy / clipPos.w;
|
||||
|
||||
if (texcoordPos.y > 0.5) {
|
||||
// outFragColor = vec4(fract(10.0 * worldPos.xyz), 1.0);
|
||||
outFragColor = vec4(fract(10.0 * (nclipPos)), 0.0, 1.0);
|
||||
|
||||
} else {
|
||||
outFragColor = vec4(fract(10.0 * (clipPos.xy / clipPos.w)), 0.0, 1.0);
|
||||
// outFragColor = vec4(nclipPos * 0.5 + 0.5, 0.0, 1.0);
|
||||
}
|
||||
//return;
|
||||
|
||||
|
||||
float pixPerspectiveScaleInv = 1.0 / (perspectiveScale);
|
||||
px.xy = (px.xy - nclipPos) * pixPerspectiveScaleInv;
|
||||
py.xy = (py.xy - nclipPos) * pixPerspectiveScaleInv;
|
||||
pz.xy = (pz.xy - nclipPos) * pixPerspectiveScaleInv;
|
||||
|
||||
if (texcoordPos.y > 0.5) {
|
||||
// outFragColor = vec4(fract(10.0 * worldPos.xyz), 1.0);
|
||||
outFragColor = vec4(fract(10.0 * (px.xy)), 0.0, 1.0);
|
||||
|
||||
} else {
|
||||
outFragColor = vec4(fract(10.0 * (py.xy)), 0.0, 1.0);
|
||||
// outFragColor = vec4(nclipPos * 0.5 + 0.5, 0.0, 1.0);
|
||||
}
|
||||
// return;
|
||||
|
||||
// Calculate dF/dx, dF/dy and dF/dz using chain rule
|
||||
vec4 dFdx = dFdu * px.x + dFdv * px.y;
|
||||
vec4 dFdy = dFdu * py.x + dFdv * py.y;
|
||||
vec4 dFdz = dFdu * pz.x + dFdv * pz.y;
|
||||
|
||||
vec3 trace = vec3(dFdx.x, dFdy.y, dFdz.z);
|
||||
|
||||
if (dot(trace, trace) > params.curvatureInfo.w) {
|
||||
outFragColor = vec4(dFdx.x, dFdy.y, dFdz.z, 1.0);
|
||||
return;
|
||||
}
|
||||
|
||||
// Calculate the mean curvature
|
||||
float meanCurvature = ((trace.x + trace.y + trace.z) * 0.33333333333333333) * params.curvatureInfo.w;
|
||||
|
||||
outFragColor = vec4(vec3(worldNormal + 1.0) * 0.5, (meanCurvature + 1.0) * 0.5);
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/3/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredTransform.slh@>
|
||||
<$declareDeferredFrameTransform()$>
|
||||
|
||||
|
||||
uniform sampler2D depthMap;
|
||||
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
float Zdb = texelFetch(depthMap, ivec2(gl_FragCoord.xy), 0).x;
|
||||
float Zeye = -evalZeyeFromZdb(Zdb);
|
||||
outFragColor = vec4(Zeye, 0.0, 0.0, 1.0);
|
||||
}
|
||||
|
354
libraries/render/src/render/BlurTask.cpp
Normal file
354
libraries/render/src/render/BlurTask.cpp
Normal file
|
@ -0,0 +1,354 @@
|
|||
//
|
||||
// BlurTask.cpp
|
||||
// render/src/render
|
||||
//
|
||||
// Created by Sam Gateau on 6/7/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "BlurTask.h"
|
||||
|
||||
#include <gpu/Context.h>
|
||||
#include <gpu/StandardShaderLib.h>
|
||||
|
||||
#include "blurGaussianV_frag.h"
|
||||
#include "blurGaussianH_frag.h"
|
||||
|
||||
#include "blurGaussianDepthAwareV_frag.h"
|
||||
#include "blurGaussianDepthAwareH_frag.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
enum BlurShaderBufferSlots {
|
||||
BlurTask_ParamsSlot = 0,
|
||||
};
|
||||
enum BlurShaderMapSlots {
|
||||
BlurTask_SourceSlot = 0,
|
||||
BlurTask_DepthSlot,
|
||||
};
|
||||
|
||||
const float BLUR_NUM_SAMPLES = 7.0f;
|
||||
|
||||
BlurParams::BlurParams() {
|
||||
Params params;
|
||||
_parametersBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Params), (const gpu::Byte*) ¶ms));
|
||||
}
|
||||
|
||||
void BlurParams::setWidthHeight(int width, int height, bool isStereo) {
|
||||
auto resolutionInfo = _parametersBuffer.get<Params>().resolutionInfo;
|
||||
bool resChanged = false;
|
||||
if (width != resolutionInfo.x || height != resolutionInfo.y) {
|
||||
resChanged = true;
|
||||
_parametersBuffer.edit<Params>().resolutionInfo = glm::vec4((float) width, (float) height, 1.0f / (float) width, 1.0f / (float) height);
|
||||
}
|
||||
|
||||
auto stereoInfo = _parametersBuffer.get<Params>().stereoInfo;
|
||||
if (isStereo || resChanged) {
|
||||
_parametersBuffer.edit<Params>().stereoInfo = glm::vec4((float)width, (float)height, 1.0f / (float)width, 1.0f / (float)height);
|
||||
}
|
||||
}
|
||||
|
||||
void BlurParams::setFilterRadiusScale(float scale) {
|
||||
auto filterInfo = _parametersBuffer.get<Params>().filterInfo;
|
||||
if (scale != filterInfo.x) {
|
||||
_parametersBuffer.edit<Params>().filterInfo.x = scale;
|
||||
_parametersBuffer.edit<Params>().filterInfo.y = scale / BLUR_NUM_SAMPLES;
|
||||
}
|
||||
}
|
||||
|
||||
void BlurParams::setDepthPerspective(float oneOverTan2FOV) {
|
||||
auto depthInfo = _parametersBuffer.get<Params>().depthInfo;
|
||||
if (oneOverTan2FOV != depthInfo.w) {
|
||||
_parametersBuffer.edit<Params>().depthInfo.w = oneOverTan2FOV;
|
||||
}
|
||||
}
|
||||
|
||||
void BlurParams::setDepthThreshold(float threshold) {
|
||||
auto depthInfo = _parametersBuffer.get<Params>().depthInfo;
|
||||
if (threshold != depthInfo.x) {
|
||||
_parametersBuffer.edit<Params>().depthInfo.x = threshold;
|
||||
}
|
||||
}
|
||||
|
||||
BlurInOutResource::BlurInOutResource(bool generateOutputFramebuffer) :
|
||||
_generateOutputFramebuffer(generateOutputFramebuffer)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
bool BlurInOutResource::updateResources(const gpu::FramebufferPointer& sourceFramebuffer, Resources& blurringResources) {
|
||||
if (!sourceFramebuffer) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!_blurredFramebuffer) {
|
||||
_blurredFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
|
||||
// attach depthStencil if present in source
|
||||
if (sourceFramebuffer->hasDepthStencil()) {
|
||||
_blurredFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat());
|
||||
}
|
||||
auto blurringSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT);
|
||||
auto blurringTarget = gpu::TexturePointer(gpu::Texture::create2D(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), blurringSampler));
|
||||
_blurredFramebuffer->setRenderBuffer(0, blurringTarget);
|
||||
} else {
|
||||
// it would be easier to just call resize on the bluredFramebuffer and let it work if needed but the source might loose it's depth buffer when doing so
|
||||
if ((_blurredFramebuffer->getWidth() != sourceFramebuffer->getWidth()) || (_blurredFramebuffer->getHeight() != sourceFramebuffer->getHeight())) {
|
||||
_blurredFramebuffer->resize(sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), sourceFramebuffer->getNumSamples());
|
||||
if (sourceFramebuffer->hasDepthStencil()) {
|
||||
_blurredFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
blurringResources.sourceTexture = sourceFramebuffer->getRenderBuffer(0);
|
||||
blurringResources.blurringFramebuffer = _blurredFramebuffer;
|
||||
blurringResources.blurringTexture = _blurredFramebuffer->getRenderBuffer(0);
|
||||
|
||||
if (_generateOutputFramebuffer) {
|
||||
// The job output the blur result in a new Framebuffer spawning here.
|
||||
// Let s make sure it s ready for this
|
||||
if (!_outputFramebuffer) {
|
||||
_outputFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
|
||||
// attach depthStencil if present in source
|
||||
if (sourceFramebuffer->hasDepthStencil()) {
|
||||
_outputFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat());
|
||||
}
|
||||
auto blurringSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT);
|
||||
auto blurringTarget = gpu::TexturePointer(gpu::Texture::create2D(sourceFramebuffer->getRenderBuffer(0)->getTexelFormat(), sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), blurringSampler));
|
||||
_outputFramebuffer->setRenderBuffer(0, blurringTarget);
|
||||
} else {
|
||||
if ((_outputFramebuffer->getWidth() != sourceFramebuffer->getWidth()) || (_outputFramebuffer->getHeight() != sourceFramebuffer->getHeight())) {
|
||||
_outputFramebuffer->resize(sourceFramebuffer->getWidth(), sourceFramebuffer->getHeight(), sourceFramebuffer->getNumSamples());
|
||||
if (sourceFramebuffer->hasDepthStencil()) {
|
||||
_outputFramebuffer->setDepthStencilBuffer(sourceFramebuffer->getDepthStencilBuffer(), sourceFramebuffer->getDepthStencilBufferFormat());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Should be good to use the output Framebuffer as final
|
||||
blurringResources.finalFramebuffer = _outputFramebuffer;
|
||||
} else {
|
||||
// Just the reuse the input as output to blur itself.
|
||||
blurringResources.finalFramebuffer = sourceFramebuffer;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
BlurGaussian::BlurGaussian(bool generateOutputFramebuffer) :
|
||||
_inOutResources(generateOutputFramebuffer)
|
||||
{
|
||||
_parameters = std::make_shared<BlurParams>();
|
||||
}
|
||||
|
||||
gpu::PipelinePointer BlurGaussian::getBlurVPipeline() {
|
||||
if (!_blurVPipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(blurGaussianV_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("blurParamsBuffer"), BlurTask_ParamsSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("sourceMap"), BlurTask_SourceSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
// Stencil test the curvature pass for objects pixels only, not the background
|
||||
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::NOT_EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
|
||||
|
||||
_blurVPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _blurVPipeline;
|
||||
}
|
||||
|
||||
gpu::PipelinePointer BlurGaussian::getBlurHPipeline() {
|
||||
if (!_blurHPipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(blurGaussianH_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("blurParamsBuffer"), BlurTask_ParamsSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("sourceMap"), BlurTask_SourceSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
// Stencil test the curvature pass for objects pixels only, not the background
|
||||
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::NOT_EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
|
||||
|
||||
_blurHPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _blurHPipeline;
|
||||
}
|
||||
|
||||
void BlurGaussian::configure(const Config& config) {
|
||||
_parameters->setFilterRadiusScale(config.filterScale);
|
||||
}
|
||||
|
||||
|
||||
void BlurGaussian::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& blurredFramebuffer) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
|
||||
BlurInOutResource::Resources blurringResources;
|
||||
if (!_inOutResources.updateResources(sourceFramebuffer, blurringResources)) {
|
||||
// early exit if no valid blurring resources
|
||||
return;
|
||||
}
|
||||
blurredFramebuffer = blurringResources.finalFramebuffer;
|
||||
|
||||
auto blurVPipeline = getBlurVPipeline();
|
||||
auto blurHPipeline = getBlurHPipeline();
|
||||
|
||||
_parameters->setWidthHeight(args->_viewport.z, args->_viewport.w, args->_context->isStereo());
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, _parameters->_parametersBuffer);
|
||||
|
||||
batch.setFramebuffer(blurringResources.blurringFramebuffer);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
|
||||
batch.setPipeline(blurVPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, blurringResources.sourceTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setFramebuffer(blurringResources.finalFramebuffer);
|
||||
if (_inOutResources._generateOutputFramebuffer) {
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
}
|
||||
|
||||
batch.setPipeline(blurHPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, blurringResources.blurringTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, nullptr);
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, nullptr);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
BlurGaussianDepthAware::BlurGaussianDepthAware(bool generateOutputFramebuffer) :
|
||||
_inOutResources(generateOutputFramebuffer)
|
||||
{
|
||||
_parameters = std::make_shared<BlurParams>();
|
||||
}
|
||||
|
||||
gpu::PipelinePointer BlurGaussianDepthAware::getBlurVPipeline() {
|
||||
if (!_blurVPipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(blurGaussianDepthAwareV_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("blurParamsBuffer"), BlurTask_ParamsSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("sourceMap"), BlurTask_SourceSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), BlurTask_DepthSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
// Stencil test the curvature pass for objects pixels only, not the background
|
||||
// state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::NOT_EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
|
||||
|
||||
_blurVPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _blurVPipeline;
|
||||
}
|
||||
|
||||
gpu::PipelinePointer BlurGaussianDepthAware::getBlurHPipeline() {
|
||||
if (!_blurHPipeline) {
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::Shader::createPixel(std::string(blurGaussianDepthAwareH_frag));
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("blurParamsBuffer"), BlurTask_ParamsSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("sourceMap"), BlurTask_SourceSlot));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), BlurTask_DepthSlot));
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
// Stencil test the curvature pass for objects pixels only, not the background
|
||||
// state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::NOT_EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
|
||||
|
||||
_blurHPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
return _blurHPipeline;
|
||||
}
|
||||
|
||||
void BlurGaussianDepthAware::configure(const Config& config) {
|
||||
_parameters->setFilterRadiusScale(config.filterScale);
|
||||
_parameters->setDepthThreshold(config.depthThreshold);
|
||||
}
|
||||
|
||||
|
||||
void BlurGaussianDepthAware::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& SourceAndDepth, gpu::FramebufferPointer& blurredFramebuffer) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->hasViewFrustum());
|
||||
|
||||
RenderArgs* args = renderContext->args;
|
||||
|
||||
auto& sourceFramebuffer = SourceAndDepth.get0();
|
||||
auto& depthTexture = SourceAndDepth.get1();
|
||||
|
||||
BlurInOutResource::Resources blurringResources;
|
||||
if (!_inOutResources.updateResources(sourceFramebuffer, blurringResources)) {
|
||||
// early exit if no valid blurring resources
|
||||
return;
|
||||
}
|
||||
|
||||
blurredFramebuffer = blurringResources.finalFramebuffer;
|
||||
|
||||
auto blurVPipeline = getBlurVPipeline();
|
||||
auto blurHPipeline = getBlurHPipeline();
|
||||
|
||||
_parameters->setWidthHeight(args->_viewport.z, args->_viewport.w, args->_context->isStereo());
|
||||
_parameters->setDepthPerspective(args->getViewFrustum().getProjection()[1][1]);
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, _parameters->_parametersBuffer);
|
||||
|
||||
batch.setResourceTexture(BlurTask_DepthSlot, depthTexture);
|
||||
|
||||
batch.setFramebuffer(blurringResources.blurringFramebuffer);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
|
||||
batch.setPipeline(blurVPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, blurringResources.sourceTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setFramebuffer(blurringResources.finalFramebuffer);
|
||||
if (_inOutResources._generateOutputFramebuffer) {
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
}
|
||||
|
||||
batch.setPipeline(blurHPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, blurringResources.blurringTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, nullptr);
|
||||
batch.setResourceTexture(BlurTask_DepthSlot, nullptr);
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, nullptr);
|
||||
});
|
||||
}
|
152
libraries/render/src/render/BlurTask.h
Normal file
152
libraries/render/src/render/BlurTask.h
Normal file
|
@ -0,0 +1,152 @@
|
|||
//
|
||||
// BlurTask.h
|
||||
// render/src/render
|
||||
//
|
||||
// Created by Sam Gateau on 6/7/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_render_BlurTask_h
|
||||
#define hifi_render_BlurTask_h
|
||||
|
||||
#include "Engine.h"
|
||||
|
||||
namespace render {
|
||||
|
||||
|
||||
class BlurParams {
|
||||
public:
|
||||
|
||||
void setWidthHeight(int width, int height, bool isStereo);
|
||||
|
||||
void setFilterRadiusScale(float scale);
|
||||
|
||||
void setDepthPerspective(float oneOverTan2FOV);
|
||||
void setDepthThreshold(float threshold);
|
||||
|
||||
// Class describing the uniform buffer with all the parameters common to the blur shaders
|
||||
class Params {
|
||||
public:
|
||||
// Resolution info (width, height, inverse of width, inverse of height)
|
||||
glm::vec4 resolutionInfo{ 0.0f, 0.0f, 0.0f, 0.0f };
|
||||
|
||||
// Filter info (radius scale
|
||||
glm::vec4 filterInfo{ 1.0f, 0.0f, 0.0f, 0.0f };
|
||||
|
||||
// Depth info (radius scale
|
||||
glm::vec4 depthInfo{ 1.0f, 0.0f, 0.0f, 0.0f };
|
||||
|
||||
// stereo info if blurring a stereo render
|
||||
glm::vec4 stereoInfo{ 0.0f };
|
||||
|
||||
Params() {}
|
||||
};
|
||||
gpu::BufferView _parametersBuffer;
|
||||
|
||||
BlurParams();
|
||||
};
|
||||
using BlurParamsPointer = std::shared_ptr<BlurParams>;
|
||||
|
||||
class BlurInOutResource {
|
||||
public:
|
||||
BlurInOutResource(bool generateOutputFramebuffer = false);
|
||||
|
||||
struct Resources {
|
||||
gpu::TexturePointer sourceTexture;
|
||||
gpu::FramebufferPointer blurringFramebuffer;
|
||||
gpu::TexturePointer blurringTexture;
|
||||
gpu::FramebufferPointer finalFramebuffer;
|
||||
};
|
||||
|
||||
bool updateResources(const gpu::FramebufferPointer& sourceFramebuffer, Resources& resources);
|
||||
|
||||
gpu::FramebufferPointer _blurredFramebuffer;
|
||||
|
||||
// the output framebuffer defined if the job needs to output the result in a new framebuffer and not in place in th einput buffer
|
||||
gpu::FramebufferPointer _outputFramebuffer;
|
||||
bool _generateOutputFramebuffer{ false };
|
||||
};
|
||||
|
||||
|
||||
class BlurGaussianConfig : public Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(bool enabled WRITE setEnabled READ isEnabled NOTIFY dirty) // expose enabled flag
|
||||
Q_PROPERTY(float filterScale MEMBER filterScale NOTIFY dirty) // expose enabled flag
|
||||
public:
|
||||
|
||||
BlurGaussianConfig() : Job::Config(true) {}
|
||||
|
||||
float filterScale{ 0.2f };
|
||||
signals :
|
||||
void dirty();
|
||||
|
||||
protected:
|
||||
};
|
||||
|
||||
|
||||
class BlurGaussian {
|
||||
public:
|
||||
using Config = BlurGaussianConfig;
|
||||
using JobModel = Job::ModelIO<BlurGaussian, gpu::FramebufferPointer, gpu::FramebufferPointer, Config>;
|
||||
|
||||
BlurGaussian(bool generateOutputFramebuffer = false);
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const gpu::FramebufferPointer& sourceFramebuffer, gpu::FramebufferPointer& blurredFramebuffer);
|
||||
|
||||
protected:
|
||||
|
||||
BlurParamsPointer _parameters;
|
||||
|
||||
gpu::PipelinePointer _blurVPipeline;
|
||||
gpu::PipelinePointer _blurHPipeline;
|
||||
|
||||
gpu::PipelinePointer getBlurVPipeline();
|
||||
gpu::PipelinePointer getBlurHPipeline();
|
||||
|
||||
BlurInOutResource _inOutResources;
|
||||
};
|
||||
|
||||
class BlurGaussianDepthAwareConfig : public BlurGaussianConfig {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(float depthThreshold MEMBER depthThreshold NOTIFY dirty) // expose enabled flag
|
||||
public:
|
||||
BlurGaussianDepthAwareConfig() : BlurGaussianConfig() {}
|
||||
|
||||
float depthThreshold{ 1.0f };
|
||||
signals:
|
||||
void dirty();
|
||||
protected:
|
||||
};
|
||||
|
||||
class BlurGaussianDepthAware {
|
||||
public:
|
||||
using Inputs = VaryingSet2<gpu::FramebufferPointer, gpu::TexturePointer>;
|
||||
using Config = BlurGaussianDepthAwareConfig;
|
||||
using JobModel = Job::ModelIO<BlurGaussianDepthAware, Inputs, gpu::FramebufferPointer, Config>;
|
||||
|
||||
BlurGaussianDepthAware(bool generateNewOutput = false);
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& SourceAndDepth, gpu::FramebufferPointer& blurredFramebuffer);
|
||||
|
||||
protected:
|
||||
|
||||
BlurParamsPointer _parameters;
|
||||
|
||||
gpu::PipelinePointer _blurVPipeline;
|
||||
gpu::PipelinePointer _blurHPipeline;
|
||||
|
||||
gpu::PipelinePointer getBlurVPipeline();
|
||||
gpu::PipelinePointer getBlurHPipeline();
|
||||
|
||||
BlurInOutResource _inOutResources;
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
||||
#endif // hifi_render_BlurTask_h
|
126
libraries/render/src/render/BlurTask.slh
Normal file
126
libraries/render/src/render/BlurTask.slh
Normal file
|
@ -0,0 +1,126 @@
|
|||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/7/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@func declareBlurUniforms()@>
|
||||
|
||||
#define NUM_TAPS 7
|
||||
#define NUM_TAPS_OFFSET 3.0f
|
||||
|
||||
float uniformFilterWidth = 0.05f;
|
||||
|
||||
const float gaussianDistributionCurve[NUM_TAPS] = float[](
|
||||
0.383f, 0.006f, 0.061f, 0.242f, 0.242f, 0.061f, 0.006f
|
||||
);
|
||||
const float gaussianDistributionOffset[NUM_TAPS] = float[](
|
||||
0.0f, -3.0f, -2.0f, -1.0f, 1.0f, 2.0f, 3.0f
|
||||
);
|
||||
|
||||
struct BlurParameters {
|
||||
vec4 resolutionInfo;
|
||||
vec4 filterInfo;
|
||||
vec4 depthInfo;
|
||||
vec4 stereoInfo;
|
||||
};
|
||||
|
||||
uniform blurParamsBuffer {
|
||||
BlurParameters parameters;
|
||||
};
|
||||
|
||||
vec2 getViewportInvWidthHeight() {
|
||||
return parameters.resolutionInfo.zw;
|
||||
}
|
||||
|
||||
float getFilterScale() {
|
||||
return parameters.filterInfo.x;
|
||||
}
|
||||
|
||||
|
||||
float getDepthThreshold() {
|
||||
return parameters.depthInfo.x;
|
||||
}
|
||||
|
||||
float getDepthPerspective() {
|
||||
return parameters.depthInfo.w;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
|
||||
<@func declareBlurGaussian()@>
|
||||
|
||||
<$declareBlurUniforms()$>
|
||||
|
||||
uniform sampler2D sourceMap;
|
||||
|
||||
vec4 pixelShaderGaussian(vec2 texcoord, vec2 direction, vec2 pixelStep) {
|
||||
|
||||
vec4 sampleCenter = texture(sourceMap, texcoord);
|
||||
|
||||
vec2 finalStep = getFilterScale() * direction * pixelStep;
|
||||
vec4 srcBlurred = vec4(0.0);
|
||||
|
||||
for(int i = 0; i < NUM_TAPS; i++) {
|
||||
// Fetch color and depth for current sample.
|
||||
vec2 sampleCoord = texcoord + (gaussianDistributionOffset[i] * finalStep);
|
||||
vec4 srcSample = texture(sourceMap, sampleCoord);
|
||||
// Accumulate.
|
||||
srcBlurred += gaussianDistributionCurve[i] * srcSample;
|
||||
}
|
||||
|
||||
return srcBlurred;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
||||
|
||||
<@func declareBlurGaussianDepthAware()@>
|
||||
|
||||
<$declareBlurUniforms()$>
|
||||
|
||||
uniform sampler2D sourceMap;
|
||||
uniform sampler2D depthMap;
|
||||
|
||||
vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep) {
|
||||
|
||||
float sampleDepth = texture(depthMap, texcoord).x;
|
||||
vec4 sampleCenter = texture(sourceMap, texcoord);
|
||||
|
||||
// Calculate the width scale.
|
||||
float distanceToProjectionWindow = getDepthPerspective();
|
||||
|
||||
float depthThreshold = getDepthThreshold();
|
||||
|
||||
// Calculate the final step to fetch the surrounding pixels.
|
||||
float filterScale = getFilterScale();
|
||||
float scale = distanceToProjectionWindow / sampleDepth;
|
||||
|
||||
vec2 finalStep = filterScale * scale * direction * pixelStep;
|
||||
|
||||
// Accumulate the center sample
|
||||
vec4 srcBlurred = gaussianDistributionCurve[0] * sampleCenter;
|
||||
|
||||
for(int i = 1; i < NUM_TAPS; i++) {
|
||||
// Fetch color and depth for current sample.
|
||||
vec2 sampleCoord = texcoord + (gaussianDistributionOffset[i] * finalStep);
|
||||
float srcDepth = texture(depthMap, sampleCoord).x;
|
||||
vec4 srcSample = texture(sourceMap, sampleCoord);
|
||||
|
||||
|
||||
// If the difference in depth is huge, we lerp color back.
|
||||
float s = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepth - sampleDepth), 0.0, 1.0);
|
||||
// float s = clamp(depthThreshold * distanceToProjectionWindow * filterScale * abs(srcDepth - sampleDepth), 0.0, 1.0);
|
||||
srcSample = mix(srcSample, sampleCenter, s);
|
||||
|
||||
// Accumulate.
|
||||
srcBlurred += gaussianDistributionCurve[i] * srcSample;
|
||||
}
|
||||
|
||||
return srcBlurred;
|
||||
}
|
||||
|
||||
<@endfunc@>
|
|
@ -140,16 +140,6 @@ namespace render {
|
|||
int getNumItems() { return numItems; }
|
||||
};
|
||||
|
||||
template < class T, int NUM >
|
||||
class VaryingArray : public std::array<Varying, NUM> {
|
||||
public:
|
||||
VaryingArray() {
|
||||
for (size_t i = 0; i < NUM; i++) {
|
||||
(*this)[i] = Varying(T());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template <int NUM_FILTERS>
|
||||
class MultiFilterItem {
|
||||
public:
|
||||
|
|
|
@ -60,6 +60,7 @@ void ShapePlumber::addPipeline(const Filter& filter, const gpu::ShaderPointer& p
|
|||
slotBindings.insert(gpu::Shader::Binding(std::string("metallicMap"), Slot::MAP::METALLIC));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("emissiveMap"), Slot::MAP::EMISSIVE_LIGHTMAP));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("occlusionMap"), Slot::MAP::OCCLUSION));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("scatteringMap"), Slot::MAP::SCATTERING));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), Slot::BUFFER::LIGHT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), Slot::MAP::LIGHT_AMBIENT));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("normalFittingMap"), Slot::NORMAL_FITTING));
|
||||
|
@ -68,6 +69,10 @@ void ShapePlumber::addPipeline(const Filter& filter, const gpu::ShaderPointer& p
|
|||
|
||||
auto locations = std::make_shared<Locations>();
|
||||
locations->normalFittingMapUnit = program->getTextures().findLocation("normalFittingMap");
|
||||
if (program->getTextures().findLocation("normalFittingMap") > -1) {
|
||||
locations->normalFittingMapUnit = program->getTextures().findLocation("normalFittingMap");
|
||||
|
||||
}
|
||||
locations->albedoTextureUnit = program->getTextures().findLocation("albedoMap");
|
||||
locations->roughnessTextureUnit = program->getTextures().findLocation("roughnessMap");
|
||||
locations->normalTextureUnit = program->getTextures().findLocation("normalMap");
|
||||
|
|
|
@ -209,6 +209,7 @@ public:
|
|||
EMISSIVE_LIGHTMAP,
|
||||
ROUGHNESS,
|
||||
OCCLUSION,
|
||||
SCATTERING,
|
||||
LIGHT_AMBIENT,
|
||||
|
||||
NORMAL_FITTING = 10,
|
||||
|
|
|
@ -23,3 +23,18 @@ void TaskConfig::refresh() {
|
|||
|
||||
_task->configure(*this);
|
||||
}
|
||||
|
||||
|
||||
namespace render{
|
||||
|
||||
template <> void varyingGet(const VaryingPairBase& data, uint8_t index, Varying& var) {
|
||||
if (index == 0) {
|
||||
var = data.first;
|
||||
} else {
|
||||
var = data.second;
|
||||
}
|
||||
}
|
||||
|
||||
template <> uint8_t varyingLength(const VaryingPairBase& data) { return 2; }
|
||||
|
||||
}
|
|
@ -11,6 +11,7 @@
|
|||
|
||||
#ifndef hifi_render_Task_h
|
||||
#define hifi_render_Task_h
|
||||
#include <tuple>
|
||||
|
||||
#include <QtCore/qobject.h>
|
||||
|
||||
|
@ -28,21 +29,40 @@
|
|||
|
||||
namespace render {
|
||||
|
||||
class Varying;
|
||||
|
||||
|
||||
|
||||
template < class T > void varyingGet(const T& data, uint8_t index, Varying& var) {}
|
||||
template <class T> uint8_t varyingLength(const T& data) { return 0; }
|
||||
|
||||
// A varying piece of data, to be used as Job/Task I/O
|
||||
// TODO: Task IO
|
||||
class Varying {
|
||||
public:
|
||||
Varying() {}
|
||||
Varying(const Varying& var) : _concept(var._concept) {}
|
||||
Varying& operator=(const Varying& var) {
|
||||
_concept = var._concept;
|
||||
return (*this);
|
||||
}
|
||||
template <class T> Varying(const T& data) : _concept(std::make_shared<Model<T>>(data)) {}
|
||||
|
||||
template <class T> T& edit() { return std::static_pointer_cast<Model<T>>(_concept)->_data; }
|
||||
template <class T> const T& get() const { return std::static_pointer_cast<const Model<T>>(_concept)->_data; }
|
||||
|
||||
|
||||
// access potential sub varyings contained in this one.
|
||||
Varying operator[] (uint8_t index) const { return (*_concept)[index]; }
|
||||
uint8_t length() const { return (*_concept).length(); }
|
||||
|
||||
protected:
|
||||
class Concept {
|
||||
public:
|
||||
virtual ~Concept() = default;
|
||||
|
||||
virtual Varying operator[] (uint8_t index) const = 0;
|
||||
virtual uint8_t length() const = 0;
|
||||
};
|
||||
template <class T> class Model : public Concept {
|
||||
public:
|
||||
|
@ -50,6 +70,13 @@ protected:
|
|||
|
||||
Model(const Data& data) : _data(data) {}
|
||||
virtual ~Model() = default;
|
||||
|
||||
virtual Varying operator[] (uint8_t index) const {
|
||||
Varying var;
|
||||
varyingGet< T >(_data, index, var);
|
||||
return var;
|
||||
}
|
||||
virtual uint8_t length() const { return varyingLength<T>(_data); }
|
||||
|
||||
Data _data;
|
||||
};
|
||||
|
@ -57,6 +84,106 @@ protected:
|
|||
std::shared_ptr<Concept> _concept;
|
||||
};
|
||||
|
||||
using VaryingPairBase = std::pair<Varying, Varying>;
|
||||
|
||||
template <> void varyingGet(const VaryingPairBase& data, uint8_t index, Varying& var);
|
||||
template <> uint8_t varyingLength(const VaryingPairBase& data);
|
||||
|
||||
template < typename T0, typename T1 >
|
||||
class VaryingSet2 : public VaryingPairBase {
|
||||
public:
|
||||
using Parent = VaryingPairBase;
|
||||
typedef void is_proxy_tag;
|
||||
|
||||
VaryingSet2() : Parent(Varying(T0()), Varying(T1())) {}
|
||||
VaryingSet2(const VaryingSet2& pair) : Parent(pair.first, pair.second) {}
|
||||
VaryingSet2(const Varying& first, const Varying& second) : Parent(first, second) {}
|
||||
|
||||
const T0& get0() const { return first.get<T0>(); }
|
||||
T0& edit0() { return first.edit<T0>(); }
|
||||
|
||||
const T1& get1() const { return second.get<T1>(); }
|
||||
T1& edit1() { return second.edit<T1>(); }
|
||||
};
|
||||
|
||||
|
||||
template <class T0, class T1, class T2>
|
||||
class VaryingSet3 : public std::tuple<Varying, Varying,Varying>{
|
||||
public:
|
||||
using Parent = std::tuple<Varying, Varying, Varying>;
|
||||
|
||||
VaryingSet3() : Parent(Varying(T0()), Varying(T1()), Varying(T2())) {}
|
||||
VaryingSet3(const VaryingSet3& src) : Parent(std::get<0>(src), std::get<1>(src), std::get<2>(src)) {}
|
||||
VaryingSet3(const Varying& first, const Varying& second, const Varying& third) : Parent(first, second, third) {}
|
||||
|
||||
const T0& get0() const { return std::get<0>((*this)).template get<T0>(); }
|
||||
T0& edit0() { return std::get<0>((*this)).template edit<T0>(); }
|
||||
|
||||
const T1& get1() const { return std::get<1>((*this)).template get<T1>(); }
|
||||
T1& edit1() { return std::get<1>((*this)).template edit<T1>(); }
|
||||
|
||||
const T2& get2() const { return std::get<2>((*this)).template get<T2>(); }
|
||||
T2& edit2() { return std::get<2>((*this)).template edit<T2>(); }
|
||||
};
|
||||
|
||||
template <class T0, class T1, class T2, class T3>
|
||||
class VaryingSet4 : public std::tuple<Varying, Varying, Varying, Varying>{
|
||||
public:
|
||||
using Parent = std::tuple<Varying, Varying, Varying, Varying>;
|
||||
|
||||
VaryingSet4() : Parent(Varying(T0()), Varying(T1()), Varying(T2()), Varying(T3())) {}
|
||||
VaryingSet4(const VaryingSet4& src) : Parent(std::get<0>(src), std::get<1>(src), std::get<2>(src), std::get<3>(src)) {}
|
||||
VaryingSet4(const Varying& first, const Varying& second, const Varying& third, const Varying& fourth) : Parent(first, second, third, fourth) {}
|
||||
|
||||
const T0& get0() const { return std::get<0>((*this)).template get<T0>(); }
|
||||
T0& edit0() { return std::get<0>((*this)).template edit<T0>(); }
|
||||
|
||||
const T1& get1() const { return std::get<1>((*this)).template get<T1>(); }
|
||||
T1& edit1() { return std::get<1>((*this)).template edit<T1>(); }
|
||||
|
||||
const T2& get2() const { return std::get<2>((*this)).template get<T2>(); }
|
||||
T2& edit2() { return std::get<2>((*this)).template edit<T2>(); }
|
||||
|
||||
const T3& get3() const { return std::get<3>((*this)).template get<T3>(); }
|
||||
T3& edit3() { return std::get<3>((*this)).template edit<T3>(); }
|
||||
};
|
||||
|
||||
|
||||
template <class T0, class T1, class T2, class T3, class T4>
|
||||
class VaryingSet5 : public std::tuple<Varying, Varying, Varying, Varying, Varying>{
|
||||
public:
|
||||
using Parent = std::tuple<Varying, Varying, Varying, Varying, Varying>;
|
||||
|
||||
VaryingSet5() : Parent(Varying(T0()), Varying(T1()), Varying(T2()), Varying(T3()), Varying(T4())) {}
|
||||
VaryingSet5(const VaryingSet5& src) : Parent(std::get<0>(src), std::get<1>(src), std::get<2>(src), std::get<3>(src), std::get<4>(src)) {}
|
||||
VaryingSet5(const Varying& first, const Varying& second, const Varying& third, const Varying& fourth, const Varying& fifth) : Parent(first, second, third, fourth, fifth) {}
|
||||
|
||||
const T0& get0() const { return std::get<0>((*this)).template get<T0>(); }
|
||||
T0& edit0() { return std::get<0>((*this)).template edit<T0>(); }
|
||||
|
||||
const T1& get1() const { return std::get<1>((*this)).template get<T1>(); }
|
||||
T1& edit1() { return std::get<1>((*this)).template edit<T1>(); }
|
||||
|
||||
const T2& get2() const { return std::get<2>((*this)).template get<T2>(); }
|
||||
T2& edit2() { return std::get<2>((*this)).template edit<T2>(); }
|
||||
|
||||
const T3& get3() const { return std::get<3>((*this)).template get<T3>(); }
|
||||
T3& edit3() { return std::get<3>((*this)).template edit<T3>(); }
|
||||
|
||||
const T4& get4() const { return std::get<4>((*this)).template get<T4>(); }
|
||||
T4& edit4() { return std::get<4>((*this)).template edit<T4>(); }
|
||||
};
|
||||
|
||||
template < class T, int NUM >
|
||||
class VaryingArray : public std::array<Varying, NUM> {
|
||||
public:
|
||||
VaryingArray() {
|
||||
for (size_t i = 0; i < NUM; i++) {
|
||||
(*this)[i] = Varying(T());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class Job;
|
||||
class Task;
|
||||
class JobNoIO {};
|
||||
|
@ -137,6 +264,7 @@ public:
|
|||
JobConfig(bool enabled) : alwaysEnabled{ false }, enabled{ enabled } {}
|
||||
|
||||
bool isEnabled() { return alwaysEnabled || enabled; }
|
||||
void setEnabled(bool enable) { enabled = enable; }
|
||||
|
||||
bool alwaysEnabled{ true };
|
||||
bool enabled{ true };
|
||||
|
@ -265,7 +393,7 @@ public:
|
|||
|
||||
void run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
renderContext->jobConfig = std::static_pointer_cast<Config>(_config);
|
||||
if (renderContext->jobConfig->alwaysEnabled || renderContext->jobConfig->enabled) {
|
||||
if (renderContext->jobConfig->alwaysEnabled || renderContext->jobConfig->isEnabled()) {
|
||||
jobRun(_data, sceneContext, renderContext, _input.get<I>(), _output.edit<O>());
|
||||
}
|
||||
renderContext->jobConfig.reset();
|
||||
|
@ -400,4 +528,5 @@ protected:
|
|||
|
||||
}
|
||||
|
||||
|
||||
#endif // hifi_render_Task_h
|
||||
|
|
22
libraries/render/src/render/blurGaussianDepthAwareH.slf
Normal file
22
libraries/render/src/render/blurGaussianDepthAwareH.slf
Normal file
|
@ -0,0 +1,22 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/7/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include BlurTask.slh@>
|
||||
<$declareBlurGaussianDepthAware()$>
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
outFragColor = pixelShaderGaussianDepthAware(varTexCoord0, vec2(1.0, 0.0), getViewportInvWidthHeight());
|
||||
}
|
||||
|
22
libraries/render/src/render/blurGaussianDepthAwareV.slf
Normal file
22
libraries/render/src/render/blurGaussianDepthAwareV.slf
Normal file
|
@ -0,0 +1,22 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/7/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include BlurTask.slh@>
|
||||
<$declareBlurGaussianDepthAware()$>
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
outFragColor = pixelShaderGaussianDepthAware(varTexCoord0, vec2(0.0, 1.0), getViewportInvWidthHeight());
|
||||
}
|
||||
|
23
libraries/render/src/render/blurGaussianH.slf
Normal file
23
libraries/render/src/render/blurGaussianH.slf
Normal file
|
@ -0,0 +1,23 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/7/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include BlurTask.slh@>
|
||||
<$declareBlurGaussian()$>
|
||||
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
outFragColor = pixelShaderGaussian(varTexCoord0, vec2(1.0, 0.0), getViewportInvWidthHeight());
|
||||
}
|
||||
|
22
libraries/render/src/render/blurGaussianV.slf
Normal file
22
libraries/render/src/render/blurGaussianV.slf
Normal file
|
@ -0,0 +1,22 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// Created by Sam Gateau on 6/7/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
<@include BlurTask.slh@>
|
||||
<$declareBlurGaussian()$>
|
||||
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
|
||||
out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
outFragColor = pixelShaderGaussian(varTexCoord0, vec2(0.0, 1.0), getViewportInvWidthHeight());
|
||||
}
|
||||
|
|
@ -40,7 +40,7 @@ Item {
|
|||
Label {
|
||||
text: sliderControl.value.toFixed(root.integral ? 0 : 2)
|
||||
anchors.left: root.left
|
||||
anchors.leftMargin: 140
|
||||
anchors.leftMargin: 200
|
||||
anchors.top: root.top
|
||||
anchors.topMargin: 7
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ Item {
|
|||
Slider {
|
||||
id: sliderControl
|
||||
stepSize: root.integral ? 1.0 : 0.0
|
||||
width: 192
|
||||
width: 150
|
||||
height: 20
|
||||
anchors.right: root.right
|
||||
anchors.rightMargin: 8
|
||||
|
|
73
scripts/developer/utilities/render/currentZone.js
Normal file
73
scripts/developer/utilities/render/currentZone.js
Normal file
|
@ -0,0 +1,73 @@
|
|||
//
|
||||
// currentZone.js
|
||||
// examples/utilities/tools/render
|
||||
//
|
||||
// Sam Gateau created on 6/18/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set up the qml ui
|
||||
/*var qml = Script.resolvePath('framebuffer.qml');
|
||||
var window = new OverlayWindow({
|
||||
title: 'Framebuffer Debug',
|
||||
source: qml,
|
||||
width: 400, height: 400,
|
||||
});
|
||||
window.setPosition(25, 50);
|
||||
window.closed.connect(function() { Script.stop(); });
|
||||
*/
|
||||
|
||||
|
||||
|
||||
function findCurrentZones() {
|
||||
var foundEntitiesArray = Entities.findEntities(MyAvatar.position, 2.0);
|
||||
//print(foundEntitiesArray.length);
|
||||
var zones = [];
|
||||
|
||||
foundEntitiesArray.forEach(function(foundID){
|
||||
var properties = Entities.getEntityProperties(foundID);
|
||||
if (properties.type == "Zone") {
|
||||
zones.push(foundID);
|
||||
}
|
||||
});
|
||||
return zones;
|
||||
}
|
||||
|
||||
|
||||
var currentZone;
|
||||
var currentZoneProperties;
|
||||
|
||||
function setCurrentZone(newCurrentZone) {
|
||||
if (currentZone == newCurrentZone) {
|
||||
return;
|
||||
}
|
||||
|
||||
currentZone = newCurrentZone;
|
||||
currentZoneProperties = Entities.getEntityProperties(currentZone);
|
||||
|
||||
print(JSON.stringify(currentZoneProperties));
|
||||
}
|
||||
|
||||
var checkCurrentZone = function() {
|
||||
|
||||
var currentZones = findCurrentZones();
|
||||
if (currentZones.length > 0) {
|
||||
if (currentZone != currentZones[0]) {
|
||||
print("New Zone");
|
||||
setCurrentZone(currentZones[0]);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
var ticker = Script.setInterval(checkCurrentZone, 2000);
|
||||
|
||||
//checkCurrentZone();
|
||||
|
||||
function onQuit() {
|
||||
Script.clearInterval(ticker);
|
||||
print("Quit Zone");
|
||||
}
|
||||
Script.scriptEnding.connect(onQuit);
|
20
scripts/developer/utilities/render/debugDeferredLighting.js
Normal file
20
scripts/developer/utilities/render/debugDeferredLighting.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// debugDeferredLighting.js
|
||||
//
|
||||
// Created by Sam Gateau on 6/6/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set up the qml ui
|
||||
var qml = Script.resolvePath('deferredLighting.qml');
|
||||
var window = new OverlayWindow({
|
||||
title: 'Deferred Lighting Pass',
|
||||
source: qml,
|
||||
width: 400, height: 100,
|
||||
});
|
||||
window.setPosition(250, 800);
|
||||
window.closed.connect(function() { Script.stop(); });
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
//
|
||||
// debugSubsirfaceScattering.js
|
||||
//
|
||||
// Created by Sam Gateau on 6/6/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set up the qml ui
|
||||
var qml = Script.resolvePath('subsurfaceScattering.qml');
|
||||
var window = new OverlayWindow({
|
||||
title: 'Subsurface Scattering',
|
||||
source: qml,
|
||||
width: 400, height: 350,
|
||||
});
|
||||
window.setPosition(250, 950);
|
||||
window.closed.connect(function() { Script.stop(); });
|
||||
|
||||
var moveDebugCursor = false;
|
||||
Controller.mousePressEvent.connect(function (e) {
|
||||
if (e.isMiddleButton) {
|
||||
moveDebugCursor = true;
|
||||
setDebugCursor(e.x, e.y);
|
||||
}
|
||||
});
|
||||
Controller.mouseReleaseEvent.connect(function() { moveDebugCursor = false; });
|
||||
Controller.mouseMoveEvent.connect(function (e) { if (moveDebugCursor) setDebugCursor(e.x, e.y); });
|
||||
|
||||
|
||||
function setDebugCursor(x, y) {
|
||||
nx = (x / Window.innerWidth);
|
||||
ny = 1.0 - ((y) / (Window.innerHeight - 32));
|
||||
|
||||
Render.getConfig("DebugScattering").debugCursorTexcoord = { x: nx, y: ny };
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// debugSurfaceGeometryPass.js
|
||||
//
|
||||
// Created by Sam Gateau on 6/6/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set up the qml ui
|
||||
var qml = Script.resolvePath('surfaceGeometryPass.qml');
|
||||
var window = new OverlayWindow({
|
||||
title: 'Surface Geometry Pass',
|
||||
source: qml,
|
||||
width: 400, height: 300,
|
||||
});
|
||||
window.setPosition(250, 400);
|
||||
window.closed.connect(function() { Script.stop(); });
|
||||
|
20
scripts/developer/utilities/render/debugToneMapping.js
Normal file
20
scripts/developer/utilities/render/debugToneMapping.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// debugToneMapping.js
|
||||
//
|
||||
// Created by Sam Gateau on 6/30/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set up the qml ui
|
||||
var qml = Script.resolvePath('toneMapping.qml');
|
||||
var window = new OverlayWindow({
|
||||
title: 'Tone Mapping',
|
||||
source: qml,
|
||||
width: 400, height: 200,
|
||||
});
|
||||
window.setPosition(250, 1000);
|
||||
window.closed.connect(function() { Script.stop(); });
|
||||
|
69
scripts/developer/utilities/render/deferredLighting.qml
Normal file
69
scripts/developer/utilities/render/deferredLighting.qml
Normal file
|
@ -0,0 +1,69 @@
|
|||
//
|
||||
// deferredLighting.qml
|
||||
//
|
||||
// Created by Sam Gateau on 6/6/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "configSlider"
|
||||
|
||||
Row {
|
||||
spacing: 8
|
||||
|
||||
|
||||
Column {
|
||||
spacing: 10
|
||||
Repeater {
|
||||
model: [
|
||||
"Unlit:LightingModel:enableUnlit",
|
||||
"Shaded:LightingModel:enableShaded",
|
||||
"Emissive:LightingModel:enableEmissive",
|
||||
"Lightmap:LightingModel:enableLightmap",
|
||||
]
|
||||
CheckBox {
|
||||
text: modelData.split(":")[0]
|
||||
checked: Render.getConfig(modelData.split(":")[1])
|
||||
onCheckedChanged: { Render.getConfig(modelData.split(":")[1])[modelData.split(":")[2]] = checked }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Column {
|
||||
spacing: 10
|
||||
Repeater {
|
||||
model: [
|
||||
"Scattering:LightingModel:enableScattering",
|
||||
"Diffuse:LightingModel:enableDiffuse",
|
||||
"Specular:LightingModel:enableSpecular",
|
||||
]
|
||||
CheckBox {
|
||||
text: modelData.split(":")[0]
|
||||
checked: Render.getConfig(modelData.split(":")[1])
|
||||
onCheckedChanged: { Render.getConfig(modelData.split(":")[1])[modelData.split(":")[2]] = checked }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Column {
|
||||
spacing: 10
|
||||
Repeater {
|
||||
model: [
|
||||
"Ambient:LightingModel:enableAmbientLight",
|
||||
"Directional:LightingModel:enableDirectionalLight",
|
||||
"Point:LightingModel:enablePointLight",
|
||||
"Spot:LightingModel:enableSpotLight"
|
||||
]
|
||||
CheckBox {
|
||||
text: modelData.split(":")[0]
|
||||
checked: Render.getConfig(modelData.split(":")[1])
|
||||
onCheckedChanged: { Render.getConfig(modelData.split(":")[1])[modelData.split(":")[2]] = checked }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -10,6 +10,7 @@
|
|||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "configSlider"
|
||||
|
||||
Column {
|
||||
spacing: 8
|
||||
|
@ -22,7 +23,19 @@ Column {
|
|||
debug.config.mode = mode;
|
||||
}
|
||||
|
||||
Label { text: qsTr("Debug Buffer") }
|
||||
function setX(x) {
|
||||
print(x)
|
||||
|
||||
debug.config.size = Vec4({ x: x, y: -1, z: 1, w: 1 });
|
||||
}
|
||||
Slider {
|
||||
minimumValue: -1.0
|
||||
value: debug.config.size.x
|
||||
onValueChanged: {
|
||||
debug.setX( value);
|
||||
}
|
||||
}
|
||||
|
||||
ExclusiveGroup { id: bufferGroup }
|
||||
Repeater {
|
||||
model: [
|
||||
|
@ -36,9 +49,15 @@ Column {
|
|||
"Unlit",
|
||||
"Occlusion",
|
||||
"Lightmap",
|
||||
"Scattering",
|
||||
"Lighting",
|
||||
"Shadow",
|
||||
"Pyramid Depth",
|
||||
"Curvature",
|
||||
"NormalCurvature",
|
||||
"DiffusedCurvature",
|
||||
"DiffusedNormalCurvature",
|
||||
"Debug Scattering",
|
||||
"Ambient Occlusion",
|
||||
"Ambient Occlusion Blurred",
|
||||
"Custom Shader"
|
||||
|
|
36
scripts/developer/utilities/render/globalLight.qml
Normal file
36
scripts/developer/utilities/render/globalLight.qml
Normal file
|
@ -0,0 +1,36 @@
|
|||
//
|
||||
// globalLight.qml
|
||||
// examples/utilities/render
|
||||
//
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "configSlider"
|
||||
|
||||
Column {
|
||||
id: root
|
||||
spacing: 8
|
||||
property var currentZoneID
|
||||
property var zoneProperties
|
||||
|
||||
Component.onCompleted: {
|
||||
Entities.getProperties
|
||||
sceneOctree.enabled = true;
|
||||
itemSelection.enabled = true;
|
||||
sceneOctree.showVisibleCells = false;
|
||||
sceneOctree.showEmptyCells = false;
|
||||
itemSelection.showInsideItems = false;
|
||||
itemSelection.showInsideSubcellItems = false;
|
||||
itemSelection.showPartialItems = false;
|
||||
itemSelection.showPartialSubcellItems = false;
|
||||
}
|
||||
Component.onDestruction: {
|
||||
sceneOctree.enabled = false;
|
||||
itemSelection.enabled = false;
|
||||
Render.getConfig("FetchSceneSelection").freezeFrustum = false;
|
||||
Render.getConfig("CullSceneSelection").freezeFrustum = false;
|
||||
}
|
81
scripts/developer/utilities/render/subsurfaceScattering.qml
Normal file
81
scripts/developer/utilities/render/subsurfaceScattering.qml
Normal file
|
@ -0,0 +1,81 @@
|
|||
//
|
||||
// subsurfaceScattering.qml
|
||||
//
|
||||
// Created by Sam Gateau on 6/6/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "configSlider"
|
||||
|
||||
Column {
|
||||
spacing: 8
|
||||
Column {
|
||||
id: scattering
|
||||
spacing: 10
|
||||
|
||||
Column{
|
||||
CheckBox {
|
||||
text: "Scattering"
|
||||
checked: Render.getConfig("Scattering").enableScattering
|
||||
onCheckedChanged: { Render.getConfig("Scattering").enableScattering = checked }
|
||||
}
|
||||
|
||||
CheckBox {
|
||||
text: "Show Scattering BRDF"
|
||||
checked: Render.getConfig("Scattering").showScatteringBRDF
|
||||
onCheckedChanged: { Render.getConfig("Scattering").showScatteringBRDF = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Show Curvature"
|
||||
checked: Render.getConfig("Scattering").showCurvature
|
||||
onCheckedChanged: { Render.getConfig("Scattering").showCurvature = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Show Diffused Normal"
|
||||
checked: Render.getConfig("Scattering").showDiffusedNormal
|
||||
onCheckedChanged: { Render.getConfig("Scattering").showDiffusedNormal = checked }
|
||||
}
|
||||
Repeater {
|
||||
model: [ "Scattering Bent Red:Scattering:bentRed:2.0",
|
||||
"Scattering Bent Green:Scattering:bentGreen:2.0",
|
||||
"Scattering Bent Blue:Scattering:bentBlue:2.0",
|
||||
"Scattering Bent Scale:Scattering:bentScale:5.0",
|
||||
"Scattering Curvature Offset:Scattering:curvatureOffset:1.0",
|
||||
"Scattering Curvature Scale:Scattering:curvatureScale:2.0",
|
||||
]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: false
|
||||
config: Render.getConfig(modelData.split(":")[1])
|
||||
property: modelData.split(":")[2]
|
||||
max: modelData.split(":")[3]
|
||||
min: 0.0
|
||||
}
|
||||
}
|
||||
CheckBox {
|
||||
text: "Scattering Profile"
|
||||
checked: Render.getConfig("DebugScattering").showProfile
|
||||
onCheckedChanged: { Render.getConfig("DebugScattering").showProfile = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Scattering Table"
|
||||
checked: Render.getConfig("DebugScattering").showLUT
|
||||
onCheckedChanged: { Render.getConfig("DebugScattering").showLUT = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Cursor Pixel"
|
||||
checked: Render.getConfig("DebugScattering").showCursorPixel
|
||||
onCheckedChanged: { Render.getConfig("DebugScattering").showCursorPixel = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: "Skin Specular Beckmann"
|
||||
checked: Render.getConfig("DebugScattering").showSpecularTable
|
||||
onCheckedChanged: { Render.getConfig("DebugScattering").showSpecularTable = checked }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
59
scripts/developer/utilities/render/surfaceGeometryPass.qml
Normal file
59
scripts/developer/utilities/render/surfaceGeometryPass.qml
Normal file
|
@ -0,0 +1,59 @@
|
|||
//
|
||||
// surfaceGeometryPass.qml
|
||||
//
|
||||
// Created by Sam Gateau on 6/6/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "configSlider"
|
||||
|
||||
Column {
|
||||
spacing: 8
|
||||
Column {
|
||||
id: surfaceGeometry
|
||||
spacing: 10
|
||||
|
||||
Column{
|
||||
Repeater {
|
||||
model: [ "Depth Threshold:depthThreshold:0.1", "Basis Scale:basisScale:2.0", "Curvature Scale:curvatureScale:100.0" ]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: false
|
||||
config: Render.getConfig("SurfaceGeometry")
|
||||
property: modelData.split(":")[1]
|
||||
max: modelData.split(":")[2]
|
||||
min: 0.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Column{
|
||||
CheckBox {
|
||||
text: "Diffuse Curvature 1"
|
||||
checked: true
|
||||
onCheckedChanged: { Render.getConfig("DiffuseCurvature").enabled = checked }
|
||||
}
|
||||
Repeater {
|
||||
model: [ "Blur Scale:DiffuseCurvature:filterScale:2.0", "Blur Depth Threshold:DiffuseCurvature:depthThreshold:1.0", "Blur Scale2:DiffuseCurvature2:filterScale:2.0", "Blur Depth Threshold 2:DiffuseCurvature2:depthThreshold:1.0"]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: false
|
||||
config: Render.getConfig(modelData.split(":")[1])
|
||||
property: modelData.split(":")[2]
|
||||
max: modelData.split(":")[3]
|
||||
min: 0.0
|
||||
}
|
||||
}
|
||||
|
||||
CheckBox {
|
||||
text: "Diffuse Curvature 2"
|
||||
checked: true
|
||||
onCheckedChanged: { Render.getConfig("DiffuseCurvature2").enabled = checked }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue