mirror of
https://github.com/overte-org/overte.git
synced 2025-07-09 12:39:56 +02:00
177 lines
No EOL
4.8 KiB
Text
177 lines
No EOL
4.8 KiB
Text
<!
|
|
// DeferredTransform.slh
|
|
// libraries/render-utils/src
|
|
//
|
|
// Created by Sam Gateau on 6/2/16.
|
|
// Copyright 2016 High Fidelity, Inc.
|
|
// Copyright 2024 Overte e.V.
|
|
//
|
|
// Distributed under the Apache License, Version 2.0.
|
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
|
!>
|
|
<@if not DEFERRED_TRANSFORM_SLH@>
|
|
<@def DEFERRED_TRANSFORM_SLH@>
|
|
|
|
<@include gpu/ShaderConstants.h@>
|
|
<@include render-utils/ShaderConstants.h@>
|
|
|
|
<@func declareDeferredFrameTransform()@>
|
|
|
|
<@include DeferredTransform_shared.slh@>
|
|
|
|
#define DeferredFrameTransform _DeferredFrameTransform
|
|
#define TransformCamera _TransformCamera
|
|
|
|
LAYOUT_STD140(binding=RENDER_UTILS_BUFFER_DEFERRED_FRAME_TRANSFORM) uniform deferredFrameTransformBuffer {
|
|
DeferredFrameTransform frameTransform;
|
|
};
|
|
|
|
vec2 getWidthHeight(int resolutionLevel) {
|
|
return vec2(ivec2(frameTransform.infos.pixelInfo.zw) >> resolutionLevel);
|
|
}
|
|
|
|
vec2 getInvWidthHeight() {
|
|
return frameTransform.infos.invPixelInfo.xy;
|
|
}
|
|
|
|
mat4 getProjection(int side) {
|
|
return frameTransform.cameras[side]._projection;
|
|
}
|
|
|
|
mat4 getProjectionInverse(int side) {
|
|
return frameTransform.cameras[side]._projectionInverse;
|
|
}
|
|
|
|
float getProjScaleEye() {
|
|
return getProjection(0)[1][1];
|
|
}
|
|
|
|
float getProjScale(int resolutionLevel) {
|
|
return getWidthHeight(resolutionLevel).y * getProjScaleEye() * 0.5;
|
|
}
|
|
|
|
mat4 getProjectionMono() {
|
|
return frameTransform.infos.projectionMono;
|
|
}
|
|
|
|
// positive near distance of the projection
|
|
float getProjectionNear() {
|
|
mat4 projection = getProjection(0);
|
|
float planeC = projection[2][3] + projection[2][2];
|
|
float planeD = projection[3][2];
|
|
return planeD / planeC;
|
|
}
|
|
|
|
// positive far distance of the projection
|
|
float getPosLinearDepthFar() {
|
|
return -frameTransform.infos.depthInfo.z;
|
|
}
|
|
|
|
mat4 getViewInverse(int side) {
|
|
return frameTransform.cameras[side]._viewInverse;
|
|
}
|
|
|
|
mat4 getView(int side) {
|
|
return frameTransform.cameras[side]._view;
|
|
}
|
|
|
|
mat4 getPreviousView(int side) {
|
|
return frameTransform.cameras[side]._previousView;
|
|
}
|
|
|
|
mat4 getPreviousViewInverse(int side) {
|
|
return frameTransform.cameras[side]._previousViewInverse;
|
|
}
|
|
|
|
bool isStereo() {
|
|
return frameTransform.infos.stereoInfo.x > 0.0f;
|
|
}
|
|
|
|
float getStereoSideWidth(int resolutionLevel) {
|
|
return float(int(frameTransform.infos.stereoInfo.y) >> resolutionLevel);
|
|
}
|
|
|
|
float getStereoSideHeight(int resolutionLevel) {
|
|
return float(int(frameTransform.infos.pixelInfo.w) >> resolutionLevel);
|
|
}
|
|
|
|
vec2 getSideImageSize(int resolutionLevel) {
|
|
return vec2(float(int(frameTransform.infos.stereoInfo.y) >> resolutionLevel), float(int(frameTransform.infos.pixelInfo.w) >> resolutionLevel));
|
|
}
|
|
|
|
int getStereoSideFromPixel(int xPos, int resolutionLevel) {
|
|
int sideWidth = int(getStereoSideWidth(resolutionLevel));
|
|
return int(xPos >= sideWidth && isStereo());
|
|
}
|
|
|
|
int getStereoSideFromPixel(int xPos) {
|
|
return getStereoSideFromPixel(xPos, 0);
|
|
}
|
|
|
|
int getStereoSideFromFragCoord() {
|
|
return getStereoSideFromPixel(int(gl_FragCoord.x), 0);
|
|
}
|
|
|
|
int getStereoSideFromUV(float uPos) {
|
|
return int(uPos >= 0.5 && isStereo());
|
|
}
|
|
|
|
vec2 getStereoSideSize(int resolutionLevel) {
|
|
return vec2(getStereoSideWidth(resolutionLevel), getStereoSideHeight(resolutionLevel));
|
|
}
|
|
|
|
ivec4 getStereoSideInfoFromWidth(int xPos, int sideWidth) {
|
|
return ivec4((1 - int(xPos < sideWidth)) * ivec2(1, sideWidth), sideWidth, isStereo());
|
|
}
|
|
|
|
ivec4 getStereoSideInfo(int xPos, int resolutionLevel) {
|
|
int sideWidth = int(getStereoSideWidth(resolutionLevel));
|
|
return getStereoSideInfoFromWidth(xPos, sideWidth);
|
|
}
|
|
|
|
int getStereoSide(ivec4 sideInfo) {
|
|
return sideInfo.x;
|
|
}
|
|
|
|
float evalZeyeFromZdb(float depth) {
|
|
return frameTransform.infos.depthInfo.x / (depth * frameTransform.infos.depthInfo.y + frameTransform.infos.depthInfo.z);
|
|
}
|
|
|
|
float evalZdbFromZeye(float Zeye) {
|
|
return (frameTransform.infos.depthInfo.x - Zeye * frameTransform.infos.depthInfo.z) / (Zeye * frameTransform.infos.depthInfo.y);
|
|
}
|
|
|
|
vec3 evalEyeNormal(vec3 C) {
|
|
return normalize(cross(dFdx(C), dFdy(C)));
|
|
}
|
|
|
|
vec3 evalEyePositionFromZdb(int side, float Zdb, vec2 texcoord) {
|
|
// compute the view space position using the depth
|
|
vec3 clipPos;
|
|
clipPos.xyz = vec3(texcoord.xy, Zdb) * 2.0 - 1.0;
|
|
vec4 eyePos = getProjectionInverse(side) * vec4(clipPos.xyz, 1.0);
|
|
return eyePos.xyz / eyePos.w;
|
|
}
|
|
|
|
vec3 evalEyePositionFromZeye(int side, float Zeye, vec2 texcoord) {
|
|
float Zdb = evalZdbFromZeye(Zeye);
|
|
return evalEyePositionFromZdb(side, Zdb, texcoord);
|
|
}
|
|
|
|
ivec2 getPixelPosTexcoordPosAndSide(in vec2 glFragCoord, out ivec2 pixelPos, out vec2 texcoordPos, out ivec4 stereoSide) {
|
|
ivec2 fragPos = ivec2(glFragCoord.xy);
|
|
|
|
stereoSide = getStereoSideInfo(fragPos.x, 0);
|
|
|
|
pixelPos = fragPos;
|
|
pixelPos.x -= stereoSide.y;
|
|
|
|
texcoordPos = (vec2(pixelPos) + 0.5) * getInvWidthHeight();
|
|
|
|
return fragPos;
|
|
}
|
|
|
|
<@endfunc@>
|
|
|
|
|
|
<@endif@> |