Used unjittered projection matrix for a sharper and more stable TAA

This commit is contained in:
Olivier Prat 2018-04-17 15:13:03 +02:00
parent d0d974d4f5
commit 6ec9378739
7 changed files with 43 additions and 11 deletions

View file

@ -174,6 +174,11 @@ void Context::getStereoViews(mat4* eyeViews) const {
} }
} }
void Context::setProjectionJitter(float jx, float jy) {
_projectionJitter.x = jx;
_projectionJitter.y = jy;
}
void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) { void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) {
_backend->downloadFramebuffer(srcFramebuffer, region, destImage); _backend->downloadFramebuffer(srcFramebuffer, region, destImage);
} }

View file

@ -208,6 +208,8 @@ public:
void setStereoViews(const mat4 eyeViews[2]); void setStereoViews(const mat4 eyeViews[2]);
void getStereoProjections(mat4* eyeProjections) const; void getStereoProjections(mat4* eyeProjections) const;
void getStereoViews(mat4* eyeViews) const; void getStereoViews(mat4* eyeViews) const;
void setProjectionJitter(float jx, float jy);
gpu::Vec2 getProjectionJitter() const { return _projectionJitter; }
// Downloading the Framebuffer is a synchronous action that is not efficient. // Downloading the Framebuffer is a synchronous action that is not efficient.
// It s here for convenience to easily capture a snapshot // It s here for convenience to easily capture a snapshot
@ -254,6 +256,7 @@ protected:
FramePointer _currentFrame; FramePointer _currentFrame;
RangeTimerPointer _frameRangeTimer; RangeTimerPointer _frameRangeTimer;
StereoState _stereo; StereoState _stereo;
gpu::Vec2 _projectionJitter{ 0.0f, 0.0f };
// Sampled at the end of every frame, the stats of all the counters // Sampled at the end of every frame, the stats of all the counters
mutable ContextStats _frameStats; mutable ContextStats _frameStats;

View file

@ -541,6 +541,7 @@ void JitterSample::run(const render::RenderContextPointer& renderContext) {
args->_context->setStereoProjections(projMats); args->_context->setStereoProjections(projMats);
} }
args->_context->setProjectionJitter(jx, jy);
} }

View file

@ -38,9 +38,6 @@ void DeferredFrameTransform::update(RenderArgs* args) {
args->getViewFrustum().evalProjectionMatrix(frameTransformBuffer.projectionMono); args->getViewFrustum().evalProjectionMatrix(frameTransformBuffer.projectionMono);
frameTransformBuffer.previousProjection[0] = frameTransformBuffer.projection[0];
frameTransformBuffer.previousProjection[1] = frameTransformBuffer.projection[1];
// Running in stereo ? // Running in stereo ?
bool isStereo = args->isStereo(); bool isStereo = args->isStereo();
if (!isStereo) { if (!isStereo) {
@ -48,6 +45,11 @@ void DeferredFrameTransform::update(RenderArgs* args) {
frameTransformBuffer.stereoInfo = glm::vec4(0.0f, (float)args->_viewport.z, 0.0f, 0.0f); frameTransformBuffer.stereoInfo = glm::vec4(0.0f, (float)args->_viewport.z, 0.0f, 0.0f);
frameTransformBuffer.invpixelInfo = glm::vec4(1.0f / args->_viewport.z, 1.0f / args->_viewport.w, 0.0f, 0.0f); frameTransformBuffer.invpixelInfo = glm::vec4(1.0f / args->_viewport.z, 1.0f / args->_viewport.w, 0.0f, 0.0f);
frameTransformBuffer.invProjection[0] = glm::inverse(frameTransformBuffer.projection[0]); frameTransformBuffer.invProjection[0] = glm::inverse(frameTransformBuffer.projection[0]);
frameTransformBuffer.projectionUnjittered[0] = frameTransformBuffer.projection[0];
frameTransformBuffer.projectionUnjittered[0][2][0] -= args->_context->getProjectionJitter().x;
frameTransformBuffer.projectionUnjittered[0][2][1] -= args->_context->getProjectionJitter().y;
frameTransformBuffer.invProjectionUnjittered[0] = glm::inverse(frameTransformBuffer.projectionUnjittered[0]);
} else { } else {
mat4 projMats[2]; mat4 projMats[2];
@ -60,6 +62,11 @@ void DeferredFrameTransform::update(RenderArgs* args) {
auto sideViewMat = projMats[i] * eyeViews[i]; auto sideViewMat = projMats[i] * eyeViews[i];
frameTransformBuffer.projection[i] = sideViewMat; frameTransformBuffer.projection[i] = sideViewMat;
frameTransformBuffer.invProjection[i] = glm::inverse(sideViewMat); frameTransformBuffer.invProjection[i] = glm::inverse(sideViewMat);
frameTransformBuffer.projectionUnjittered[i] = frameTransformBuffer.projection[i];
frameTransformBuffer.projectionUnjittered[i][2][0] -= args->_context->getProjectionJitter().x;
frameTransformBuffer.projectionUnjittered[i][2][1] -= args->_context->getProjectionJitter().y;
frameTransformBuffer.invProjectionUnjittered[i] = glm::inverse(frameTransformBuffer.projectionUnjittered[i]);
} }
frameTransformBuffer.stereoInfo = glm::vec4(1.0f, (float)(args->_viewport.z >> 1), 0.0f, 1.0f); frameTransformBuffer.stereoInfo = glm::vec4(1.0f, (float)(args->_viewport.z >> 1), 0.0f, 1.0f);

View file

@ -47,15 +47,16 @@ protected:
glm::mat4 projection[2]; glm::mat4 projection[2];
// Inverse proj matrix or Left and Right proj matrix going from Mono Eye space to side clip space // Inverse proj matrix or Left and Right proj matrix going from Mono Eye space to side clip space
glm::mat4 invProjection[2]; glm::mat4 invProjection[2];
// Mono proj matrix or Left and Right proj matrix going from Mono Eye space to side clip space of
// the previous frame
glm::mat4 previousProjection[2];
// THe mono projection for sure // THe mono projection for sure
glm::mat4 projectionMono; glm::mat4 projectionMono;
// Inv View matrix from eye space (mono) to world space // Inv View matrix from eye space (mono) to world space
glm::mat4 invView; glm::mat4 invView;
// View matrix from world space to eye space (mono) // View matrix from world space to eye space (mono)
glm::mat4 view; glm::mat4 view;
// Mono proj matrix or Left and Right proj matrix going from Mono Eye space to side clip space without jittering
glm::mat4 projectionUnjittered[2];
// Inverse proj matrix or Left and Right proj matrix going from Mono Eye space to side clip space without jittering
glm::mat4 invProjectionUnjittered[2];
FrameTransform() {} FrameTransform() {}
}; };

View file

@ -32,10 +32,11 @@ struct DeferredFrameTransform {
vec4 _stereoInfo; vec4 _stereoInfo;
mat4 _projection[2]; mat4 _projection[2];
mat4 _invProjection[2]; mat4 _invProjection[2];
mat4 _previousProjection[2];
mat4 _projectionMono; mat4 _projectionMono;
mat4 _viewInverse; mat4 _viewInverse;
mat4 _view; mat4 _view;
mat4 _projectionUnJittered[2];
mat4 _invProjectionUnJittered[2];
}; };
uniform deferredFrameTransformBuffer { uniform deferredFrameTransformBuffer {
@ -63,6 +64,12 @@ mat4 getProjection(int side) {
mat4 getProjectionMono() { mat4 getProjectionMono() {
return frameTransform._projectionMono; return frameTransform._projectionMono;
} }
mat4 getUnjitteredProjection(int side) {
return frameTransform._projectionUnJittered[side];
}
mat4 getUnjitteredInvProjection(int side) {
return frameTransform._invProjectionUnJittered[side];
}
// positive near distance of the projection // positive near distance of the projection
float getProjectionNear() { float getProjectionNear() {
@ -139,6 +146,14 @@ vec3 evalEyePositionFromZdb(int side, float Zdb, vec2 texcoord) {
return eyePos.xyz / eyePos.w; return eyePos.xyz / eyePos.w;
} }
vec3 evalUnjitteredEyePositionFromZdb(int side, float Zdb, vec2 texcoord) {
// compute the view space position using the depth
vec3 clipPos;
clipPos.xyz = vec3(texcoord.xy, Zdb) * 2.0 - 1.0;
vec4 eyePos = frameTransform._invProjectionUnJittered[side] * vec4(clipPos.xyz, 1.0);
return eyePos.xyz / eyePos.w;
}
vec3 evalEyePositionFromZeye(int side, float Zeye, vec2 texcoord) { vec3 evalEyePositionFromZeye(int side, float Zeye, vec2 texcoord) {
float Zdb = evalZdbFromZeye(Zeye); float Zdb = evalZdbFromZeye(Zeye);
return evalEyePositionFromZdb(side, Zdb, texcoord); return evalEyePositionFromZdb(side, Zdb, texcoord);

View file

@ -28,11 +28,11 @@ void main(void) {
float Zdb = texelFetch(depthMap, ivec2(gl_FragCoord.xy), 0).x; float Zdb = texelFetch(depthMap, ivec2(gl_FragCoord.xy), 0).x;
// The position of the pixel fragment in Eye space then in world space // The position of the pixel fragment in Eye space then in world space
vec3 eyePos = evalEyePositionFromZdb(stereoSide.x, Zdb, texcoordPos); vec3 eyePos = evalUnjitteredEyePositionFromZdb(stereoSide.x, Zdb, texcoordPos);
vec3 worldPos = (getViewInverse() * vec4(eyePos, 1.0)).xyz; vec3 worldPos = (getViewInverse() * vec4(eyePos, 1.0)).xyz;
vec3 prevEyePos = (getPreviousView() * vec4(worldPos, 1.0)).xyz; vec3 prevEyePos = (getPreviousView() * vec4(worldPos, 1.0)).xyz;
vec4 prevClipPos = (frameTransform._projection[stereoSide.x] * vec4(prevEyePos, 1.0)); vec4 prevClipPos = (getUnjitteredProjection(stereoSide.x) * vec4(prevEyePos, 1.0));
vec2 prevUV = 0.5 * (prevClipPos.xy / prevClipPos.w) + vec2(0.5); vec2 prevUV = 0.5 * (prevClipPos.xy / prevClipPos.w) + vec2(0.5);
//vec2 imageSize = getWidthHeight(0); //vec2 imageSize = getWidthHeight(0);