Adding the full code path, starting debugging the problems in hmd,

This commit is contained in:
samcake 2017-09-05 18:09:46 -07:00
parent bff5d654a6
commit e70f261de4
13 changed files with 232 additions and 95 deletions

View file

@ -356,7 +356,7 @@ void OpenGLDisplayPlugin::customizeContext() {
auto presentThread = DependencyManager::get<PresentThread>();
Q_ASSERT(thread() == presentThread->thread());
getGLBackend()->setCameraCorrection(mat4());
getGLBackend()->setCameraCorrection(mat4(), true);
for (auto& cursorValue : _cursorsData) {
auto& cursorData = cursorValue.second;

View file

@ -779,9 +779,12 @@ void GLBackend::recycle() const {
}
void GLBackend::setCameraCorrection(const Mat4& correction) {
void GLBackend::setCameraCorrection(const Mat4& correction, bool reset) {
auto invCorrection = glm::inverse(correction);
_transform._correction.prevCorrection = (reset ? correction : _transform._correction.correction);
_transform._correction.prevCorrectionInverse = (reset ? invCorrection : _transform._correction.correctionInverse);
_transform._correction.correction = correction;
_transform._correction.correctionInverse = glm::inverse(correction);
_transform._correction.correctionInverse = invCorrection;
_pipeline._cameraCorrectionBuffer._buffer->setSubData(0, _transform._correction);
_pipeline._cameraCorrectionBuffer._buffer->flush();
}

View file

@ -68,7 +68,7 @@ public:
virtual ~GLBackend();
void setCameraCorrection(const Mat4& correction);
void setCameraCorrection(const Mat4& correction, bool reset = false);
void render(const Batch& batch) final override;
// This call synchronize the Full Backend cache with the current GLState
@ -302,9 +302,12 @@ protected:
// Allows for correction of the camera pose to account for changes
// between the time when a was recorded and the time(s) when it is
// executed
// Prev is the previous correction used at previous frame
struct CameraCorrection {
Mat4 correction;
Mat4 correctionInverse;
mat4 correction;
mat4 correctionInverse;
mat4 prevCorrection;
mat4 prevCorrectionInverse;
};
struct TransformStageState {

View file

@ -288,6 +288,9 @@ void Antialiasing::configure(const Config& config) {
_params.edit().setUnjitter(config.unjitter);
_params.edit().setConstrainColor(config.constrainColor);
_params.edit().setConstrainColor9Taps(config.constrainColor9Taps);
_params.edit().setClipHistoryColor(config.clipHistoryColor);
_params.edit().setFeedbackColor(config.feedbackColor);
_params.edit().debugShowVelocityThreshold = config.debugShowVelocityThreshold;
@ -393,34 +396,22 @@ int JitterSampleConfig::cycleStopPauseRun() {
_state = (_state + 1) % 3;
switch (_state) {
case 0: {
stop = true;
freeze = false;
setIndex(-1);
return none();
break;
}
case 1: {
stop = false;
freeze = true;
setIndex(0);
return pause();
break;
}
case 2:
default: {
stop = false;
freeze = false;
setIndex(0);
return play();
break;
}
}
return _state;
}
int JitterSampleConfig::pause() {
freeze = true;
emit dirty();
return _index;
}
int JitterSampleConfig::prev() {
setIndex(_index - 1);
return _index;
@ -431,12 +422,33 @@ int JitterSampleConfig::next() {
return _index;
}
int JitterSampleConfig::play() {
int JitterSampleConfig::none() {
_state = 0;
stop = true;
freeze = false;
emit dirty();
return _index;
setIndex(-1);
return _state;
}
int JitterSampleConfig::pause() {
_state = 1;
stop = false;
freeze = true;
setIndex(0);
return _state;
}
int JitterSampleConfig::play() {
_state = 2;
stop = false;
freeze = false;
setIndex(0);
return _state;
}
template <int B> class Halton {
public:
float eval(int index) {

View file

@ -36,9 +36,10 @@ public:
public slots:
int cycleStopPauseRun();
int pause();
int prev();
int next();
int none();
int pause();
int play();
int getIndex() const { return _index; }
@ -89,6 +90,9 @@ class AntialiasingConfig : public render::Job::Config {
Q_PROPERTY(bool unjitter MEMBER unjitter NOTIFY dirty)
Q_PROPERTY(bool constrainColor MEMBER constrainColor NOTIFY dirty)
Q_PROPERTY(bool constrainColor9Taps MEMBER constrainColor9Taps NOTIFY dirty)
Q_PROPERTY(bool clipHistoryColor MEMBER clipHistoryColor NOTIFY dirty)
Q_PROPERTY(bool feedbackColor MEMBER feedbackColor NOTIFY dirty)
Q_PROPERTY(bool debug MEMBER debug NOTIFY dirty)
Q_PROPERTY(float debugX MEMBER debugX NOTIFY dirty)
@ -115,6 +119,9 @@ public:
bool unjitter{ true };
bool constrainColor{ true };
bool constrainColor9Taps{ true };
bool clipHistoryColor{ true };
bool feedbackColor{ true };
bool debug { false };
bool showCursorPixel { false };
@ -144,6 +151,15 @@ struct TAAParams {
void setConstrainColor(bool enabled) { SET_BIT(flags.y, 1, enabled); }
bool isConstrainColor() const { return (bool)GET_BIT(flags.y, 1); }
void setConstrainColor9Taps(bool enabled) { SET_BIT(flags.y, 2, enabled); }
bool isConstrainColor9Taps() const { return (bool)GET_BIT(flags.y, 2); }
void setClipHistoryColor(bool enabled) { SET_BIT(flags.y, 3, enabled); }
bool isClipHistoryColor() const { return (bool)GET_BIT(flags.y, 3); }
void setFeedbackColor(bool enabled) { SET_BIT(flags.y, 4, enabled); }
bool isFeedbackColor() const { return (bool)GET_BIT(flags.y, 4); }
void setDebug(bool enabled) { SET_BIT(flags.x, 0, enabled); }
bool isDebug() const { return (bool) GET_BIT(flags.x, 0); }

View file

@ -16,6 +16,9 @@
struct CameraCorrection {
mat4 _correction;
mat4 _correctionInverse;
mat4 _prevCorrection;
mat4 _prevCorrectionInverse;
};
uniform cameraCorrectionBuffer {

View file

@ -159,7 +159,7 @@ const gpu::PipelinePointer& VelocityBufferPass::getCameraMotionPipeline() {
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
// Stencil test the curvature pass for objects pixels only, not the background
PrepareStencil::testShape(*state);
// PrepareStencil::testShape(*state);
state->setColorWriteMask(true, true, false, false);

View file

@ -45,9 +45,17 @@ void main() {
vec3 nextColor = mix(historyColor, sourceColor, params.blend);
if (taa_constrainColor()) {
nextColor = taa_temporalReprojection(sourceColor, historyColor, fragUV, fragVel, nearFragUV.z, fragJitterPix);
if (taa_constrainColor()) {
mat3 colorMinMaxAvg = taa_evalNeighbourColorRegion(sourceColor, fragUV, fragVel, nearFragUV.z, fragJitterPix);
// clamp history to neighbourhood of current sample
historyColor = taa_clampColor(colorMinMaxAvg[0], colorMinMaxAvg[1], colorMinMaxAvg[2], historyColor);
nextColor = historyColor;
}
if (taa_feedbackColor()) {
nextColor = taa_evalFeedbackColor(sourceColor, historyColor, params.blend);
}
outFragColor = vec4(taa_resolveColor(nextColor), 1.0);
}

View file

@ -82,6 +82,15 @@ bool taa_unjitter() {
bool taa_constrainColor() {
return GET_BIT(params.flags.y, 1);
}
bool taa_constrainColor9Taps() {
return GET_BIT(params.flags.y, 2);
}
bool taa_clipHistoryColor() {
return GET_BIT(params.flags.y, 3);
}
bool taa_feedbackColor() {
return GET_BIT(params.flags.y, 4);
}
vec2 taa_getDebugCursorTexcoord() {
return params.pixelInfo_orbZoom.xy;
@ -203,11 +212,14 @@ float Luminance(vec3 rgb) {
return rgb.x/4.0 + rgb.y/2.0 + rgb.z/4.0;
}
mat3 taa_evalNeighbourColorRegion(vec2 fragUV, vec2 fragVelocity, float fragZe, vec2 fragJitterPix) {
#define MINMAX_3X3_ROUNDED 1
mat3 taa_evalNeighbourColorRegion(vec3 sourceColor, vec2 fragUV, vec2 fragVelocity, float fragZe, vec2 fragJitterPix) {
vec2 imageSize = getWidthHeight(0);
vec2 texelSize = getInvWidthHeight();
vec3 cmin, cmax, cavg;
#if MINMAX_3X3 || MINMAX_3X3_ROUNDED
if (taa_constrainColor9Taps()) {
vec2 du = vec2(texelSize.x, 0.0);
vec2 dv = vec2(0.0, texelSize.y);
@ -222,11 +234,13 @@ mat3 taa_evalNeighbourColorRegion(vec2 fragUV, vec2 fragVelocity, float fragZe,
vec3 cbc = taa_fetchSourceMap(fragUV + dv).rgb;
vec3 cbr = taa_fetchSourceMap(fragUV + dv + du).rgb;
vec3 cmin = min(ctl, min(ctc, min(ctr, min(cml, min(cmc, min(cmr, min(cbl, min(cbc, cbr))))))));
vec3 cmax = max(ctl, max(ctc, max(ctr, max(cml, max(cmc, max(cmr, max(cbl, max(cbc, cbr))))))));
cmin = min(ctl, min(ctc, min(ctr, min(cml, min(cmc, min(cmr, min(cbl, min(cbc, cbr))))))));
cmax = max(ctl, max(ctc, max(ctr, max(cml, max(cmc, max(cmr, max(cbl, max(cbc, cbr))))))));
#if MINMAX_3X3_ROUNDED || USE_YCOCG || USE_CLIPPING
vec3 cavg = (ctl + ctc + ctr + cml + cmc + cmr + cbl + cbc + cbr) / 9.0;
cavg = (ctl + ctc + ctr + cml + cmc + cmr + cbl + cbc + cbr) / 9.0;
#elif
cavg = (cmin + cmax ) * 0.5;
#endif
#if MINMAX_3X3_ROUNDED
@ -237,12 +251,10 @@ mat3 taa_evalNeighbourColorRegion(vec2 fragUV, vec2 fragVelocity, float fragZe,
cmax = 0.5 * (cmax + cmax5);
cavg = 0.5 * (cavg + cavg5);
#endif
#else
} else {
const float _SubpixelThreshold = 0.5;
const float _GatherBase = 0.5;
const float _GatherSubpixelMotion = 0.1666;
const float _FeedbackMin = 0.1;
const float _FeedbackMax = 0.9;
vec2 texel_vel = fragVelocity * imageSize;
float texel_vel_mag = length(texel_vel) * -fragZe;
@ -256,19 +268,22 @@ mat3 taa_evalNeighbourColorRegion(vec2 fragUV, vec2 fragVelocity, float fragZe,
vec3 c01 = taa_fetchSourceMap(fragUV + ss_offset01).rgb;
vec3 c11 = taa_fetchSourceMap(fragUV + ss_offset11).rgb;
vec3 cmin = min(c00, min(c10, min(c01, c11)));
vec3 cmax = max(c00, max(c10, max(c01, c11)));
cmin = min(c00, min(c10, min(c01, c11)));
cmax = max(c00, max(c10, max(c01, c11)));
cavg = (cmin + cmax ) * 0.5;
#if USE_YCOCG || USE_CLIPPING
vec3 cavg = (c00 + c10 + c01 + c11) / 4.0;
cavg = (c00 + c10 + c01 + c11) / 4.0;
#elif
cavg = (cmin + cmax ) * 0.5;
#endif
#endif
}
// shrink chroma min-max
// shrink chroma min-max
#if USE_YCOCG
vec2 chroma_extent = vec2(0.25 * 0.5 * (cmax.r - cmin.r));
vec2 chroma_center = texel0.gb;
colorMinMaxAvg[0].yz = chroma_center - chroma_extent;
vec2 chroma_center = sourceColor.gb;
cmin.yz = chroma_center - chroma_extent;
cmax.yz = chroma_center + chroma_extent;
cavg.yz = chroma_center;
#endif
@ -276,47 +291,77 @@ mat3 taa_evalNeighbourColorRegion(vec2 fragUV, vec2 fragVelocity, float fragZe,
return mat3(cmin, cmax, cavg);
}
#define USE_OPTIMIZATIONS 1
vec3 taa_temporalReprojection(vec3 sourceColor, vec3 historyColor, vec2 fragUV, vec2 fragVelocity, float fragZe, vec2 fragJitterPix)
{
vec3 texel0 = (sourceColor);
vec3 texel1 = (historyColor);
vec3 taa_clampColor(vec3 colorMin, vec3 colorMax, vec3 colorAvg, vec3 color) {
if (taa_clipHistoryColor()) {
vec3 p = clamp(colorAvg, colorMin, colorMax);
vec3 q = color;
const float eps = 0.00001;
vec2 imageSize = getWidthHeight(0);
vec2 texelSize = getInvWidthHeight();
#if USE_OPTIMIZATIONS
// note: only clips towards aabb center (but fast!)
vec3 p_clip = 0.5 * (colorMax + colorMin);
vec3 e_clip = 0.5 * (colorMax - colorMin) + vec3(eps);
if (taa_unjitter()) {
fragUV -= fragJitterPix * texelSize;
}
vec3 v_clip = q - p_clip;
vec3 v_unit = v_clip.xyz / e_clip;
vec3 a_unit = abs(v_unit);
float ma_unit = max(a_unit.x, max(a_unit.y, a_unit.z));
mat3 colorMinMaxAvg = taa_evalNeighbourColorRegion(fragUV, fragVelocity, fragZe, fragJitterPix);
// clamp to neighbourhood of current sample
#if USE_CLIPPING
texel1 = clip_aabb(colorMinMaxAvg[0], colorMinMaxAvg[1], clamp(colorMinMaxAvg[2], colorMinMaxAvg[0], colorMinMaxAvg[1]), texel1);
#else
texel1 = clamp(texel1, colorMinMaxAvg[0], colorMinMaxAvg[1]);
#endif
if (ma_unit > 1.0)
return p_clip + v_clip / ma_unit;
else
return q;// point inside aabb
#else
vec3 r = q - p;
vec3 rmax = colorMax - p.xyz;
vec3 rmin = colorMin - p.xyz;
// feedback weight from unbiased luminance diff (t.lottes)
if (r.x > rmax.x + eps)
r *= (rmax.x / r.x);
if (r.y > rmax.y + eps)
r *= (rmax.y / r.y);
if (r.z > rmax.z + eps)
r *= (rmax.z / r.z);
if (r.x < rmin.x - eps)
r *= (rmin.x / r.x);
if (r.y < rmin.y - eps)
r *= (rmin.y / r.y);
if (r.z < rmin.z - eps)
r *= (rmin.z / r.z);
return p + r;
#endif
} else {
return clamp(color, colorMin, colorMax);
}
}
vec3 taa_evalFeedbackColor(vec3 sourceColor, vec3 historyColor, float blendFactor) {
const float _FeedbackMin = 0.1;
const float _FeedbackMax = 0.9;
// feedback weight from unbiased luminance diff (t.lottes)
#if USE_YCOCG
float lum0 = texel0.r;
float lum1 = texel1.r;
float lum0 = sourceColor.r;
float lum1 = historyColor.r;
#else
float lum0 = Luminance(texel0.rgb);
float lum1 = Luminance(texel1.rgb);
float lum0 = Luminance(sourceColor.rgb);
float lum1 = Luminance(historyColor.rgb);
#endif
float unbiased_diff = abs(lum0 - lum1) / max(lum0, max(lum1, 0.2));
float unbiased_weight = 1.0 - unbiased_diff;
float unbiased_weight_sqr = unbiased_weight * unbiased_weight;
float k_feedback = mix(_FeedbackMin, _FeedbackMax, unbiased_weight_sqr);
// output
vec3 nextColor = mix(texel1, texel0, k_feedback).xyz;
float unbiased_diff = abs(lum0 - lum1) / max(lum0, max(lum1, 0.2));
float unbiased_weight = 1.0 - unbiased_diff;
float unbiased_weight_sqr = unbiased_weight * unbiased_weight;
float k_feedback = mix(_FeedbackMin, _FeedbackMax, unbiased_weight_sqr);
vec3 nextColor = mix(historyColor, sourceColor, k_feedback * blendFactor).xyz;
return nextColor;
}
<$declareColorWheel()$>
vec3 taa_getVelocityColorRelative(float velocityPixLength) {

View file

@ -34,9 +34,9 @@ void main(void) {
// The position of the pixel fragment in Eye space then in world space
vec3 eyePos = evalEyePositionFromZeye(stereoSide.x, Zeye, texcoordPos);
vec3 worldPos = (frameTransform._viewInverse * vec4(eyePos, 1.0)).xyz;
vec3 worldPos = (frameTransform._viewInverse * cameraCorrection._correction * vec4(eyePos, 1.0)).xyz;
vec3 prevEyePos = (frameTransform._prevView * vec4(worldPos, 1.0)).xyz;
vec3 prevEyePos = (cameraCorrection._prevCorrectionInverse * frameTransform._prevView * vec4(worldPos, 1.0)).xyz;
vec4 prevClipPos = (frameTransform._projection[stereoSide.x] * vec4(prevEyePos, 1.0));
vec2 prevUV = 0.5 * (prevClipPos.xy / prevClipPos.w) + vec2(0.5);

View file

@ -45,8 +45,8 @@ Item {
HifiControls.Label {
id: labelValue
text: sliderControl.value.toFixed(root.integral ? 0 : 2)
anchors.left: root.left
anchors.leftMargin: 200
anchors.right: root.right
anchors.rightMargin: 8
anchors.top: root.top
anchors.topMargin: 15
}

View file

@ -30,25 +30,32 @@ Rectangle {
spacing: 20
Column{
spacing: 10
HifiControls.ConfigSlider {
label: qsTr("Source blend")
integral: false
config: Render.getConfig("RenderMainView.Antialiasing")
property: "blend"
max: 1.0
min: 0.0
Row {
spacing: 10
var debugFXAA = false
HifiControls.Button {
text: {
if (debugFXAA) {
return "FXAA"
} else {
return "TAA"
}
}
onClicked: {
if (debugFXAA) {
Render.getConfig("RenderMainView.JitterCam").stop();
Render.getConfig("RenderMainView.Antialiasing").debugFXAAX = 0;
} else {
Render.getConfig("RenderMainView.JitterCam").run();
Render.getConfig("RenderMainView.Antialiasing").debugFXAAX = 1.0;
}
debugFXAA = !debugFXAA
}
}
}
ConfigSlider {
label: qsTr("Velocity scale")
integral: false
config: Render.getConfig("RenderMainView.Antialiasing")
property: "velocityScale"
max: 1.0
min: 0.0
}
Separator {}
Row {
spacing: 10
@ -90,7 +97,7 @@ Rectangle {
}
}
Separator {}
Row {
Column {
spacing: 10
HifiControls.CheckBox {
boxSize: 20
@ -98,6 +105,37 @@ Rectangle {
checked: Render.getConfig("RenderMainView.Antialiasing")["constrainColor"]
onCheckedChanged: { Render.getConfig("RenderMainView.Antialiasing")["constrainColor"] = checked }
}
HifiControls.CheckBox {
boxSize: 20
text: "Constrain color 9 Taps"
checked: Render.getConfig("RenderMainView.Antialiasing")["constrainColor9Taps"]
onCheckedChanged: { Render.getConfig("RenderMainView.Antialiasing")["constrainColor9Taps"] = checked }
}
HifiControls.CheckBox {
boxSize: 20
text: "Clip / Clamp History color"
checked: Render.getConfig("RenderMainView.Antialiasing")["clipHistoryColor"]
onCheckedChanged: { Render.getConfig("RenderMainView.Antialiasing")["clipHistoryColor"] = checked }
}
Row {
spacing: 10
HifiControls.CheckBox {
boxSize: 20
text: "Feedback history color"
checked: Render.getConfig("RenderMainView.Antialiasing")["feedbackColor"]
onCheckedChanged: { Render.getConfig("RenderMainView.Antialiasing")["feedbackColor"] = checked }
}
HifiControls.ConfigSlider {
label: qsTr("Source blend")
integral: false
config: Render.getConfig("RenderMainView.Antialiasing")
property: "blend"
max: 1.0
min: 0.0
}
}
}
Row {
@ -156,6 +194,15 @@ Rectangle {
max: 32.0
min: 1.0
}
Separator {}
ConfigSlider {
label: qsTr("Velocity scale")
integral: false
config: Render.getConfig("RenderMainView.Antialiasing")
property: "velocityScale"
max: 1.0
min: 0.0
}
}
}
}