3
0
Fork 0
mirror of https://github.com/JulianGro/overte.git synced 2025-04-30 22:49:00 +02:00

Merge remote-tracking branch 'upstream/master' into tony/hmd-recenter-on-rotation

This commit is contained in:
Anthony J. Thibault 2016-01-22 13:37:43 -08:00
commit 8bfa80d5fc
131 changed files with 1861 additions and 3127 deletions
assignment-client
domain-server
examples
interface
libraries
embedded-webserver/src
entities-renderer/src
entities
environment
gpu/src/gpu
model/src/model
networking/src
physics/src
render-utils

View file

@ -4,7 +4,7 @@ setup_hifi_project(Core Gui Network Script Quick Widgets WebSockets)
# link in the shared libraries
link_hifi_libraries(
audio avatars octree environment gpu model fbx entities
audio avatars octree gpu model fbx entities
networking animation recording shared script-engine embedded-webserver
controllers physics
)

View file

@ -29,10 +29,6 @@
AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
QCoreApplication(argc, argv)
{
// to work around the Qt constant wireless scanning, set the env for polling interval very high
const QByteArray EXTREME_BEARER_POLL_TIMEOUT = QString::number(INT_MAX).toLocal8Bit();
qputenv("QT_BEARER_POLL_TIMEOUT", EXTREME_BEARER_POLL_TIMEOUT);
# ifndef WIN32
setvbuf(stdout, NULL, _IOLBF, 0);
# endif

View file

@ -53,11 +53,19 @@ void AssetServer::run() {
const QString RESOURCES_PATH = "assets";
_resourcesDirectory = QDir(ServerPathUtils::getDataDirectory()).filePath(RESOURCES_PATH);
if (!_resourcesDirectory.exists()) {
qDebug() << "Creating resources directory";
_resourcesDirectory.mkpath(".");
bool noExistingAssets = !_resourcesDirectory.exists() \
|| _resourcesDirectory.entryList(QDir::Files).size() == 0;
if (noExistingAssets) {
qDebug() << "Asset resources directory not found, searching for existing asset resources";
QString oldDataDirectory = QCoreApplication::applicationDirPath();
auto oldResourcesDirectory = QDir(oldDataDirectory).filePath("resources/" + RESOURCES_PATH);
if (QDir(oldResourcesDirectory).exists()) {
qDebug() << "Existing assets found in " << oldResourcesDirectory << ", copying to " << _resourcesDirectory;
@ -68,11 +76,16 @@ void AssetServer::run() {
resourcesParentDirectory.mkpath(".");
}
QFile::copy(oldResourcesDirectory, _resourcesDirectory.absolutePath());
}
auto files = QDir(oldResourcesDirectory).entryList(QDir::Files);
qDebug() << "Creating resources directory";
_resourcesDirectory.mkpath(".");
for (auto& file : files) {
auto from = oldResourcesDirectory + QDir::separator() + file;
auto to = _resourcesDirectory.absoluteFilePath(file);
qDebug() << "\tCopying from " << from << " to " << to;
QFile::copy(from, to);
}
}
}
qDebug() << "Serving files from: " << _resourcesDirectory.path();

View file

@ -9,11 +9,15 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AssignmentClientApp.h"
#include <QtCore/QDebug>
#include <SharedUtil.h>
#include "AssignmentClientApp.h"
int main(int argc, char* argv[]) {
disableQtBearerPoll(); // Fixes wifi ping spikes
AssignmentClientApp app(argc, argv);
int acReturn = app.exec();

View file

@ -21,7 +21,6 @@
#include <HTTPManager.h>
#include <ThreadedAssignment.h>
#include <EnvironmentData.h>
#include "OctreePersistThread.h"
#include "OctreeSendThread.h"

View file

@ -65,7 +65,7 @@
{
"name": "viewpoint",
"label": "Viewpoint",
"placeholder": "/512,512,512"
"placeholder": "/0,0,0"
}
]
}

View file

@ -65,10 +65,6 @@ DomainServer::DomainServer(int argc, char* argv[]) :
LogUtils::init();
Setting::init();
// to work around the Qt constant wireless scanning, set the env for polling interval very high
const QByteArray EXTREME_BEARER_POLL_TIMEOUT = QString::number(INT_MAX).toLocal8Bit();
qputenv("QT_BEARER_POLL_TIMEOUT", EXTREME_BEARER_POLL_TIMEOUT);
connect(this, &QCoreApplication::aboutToQuit, this, &DomainServer::aboutToQuit);
@ -1872,16 +1868,25 @@ void DomainServer::processPathQueryPacket(QSharedPointer<ReceivedMessage> messag
const QString PATHS_SETTINGS_KEYPATH_FORMAT = "%1.%2";
const QString PATH_VIEWPOINT_KEY = "viewpoint";
const QString INDEX_PATH = "/";
// check out paths in the _configMap to see if we have a match
const QVariant* pathMatch = valueForKeyPath(_settingsManager.getSettingsMap(),
QString(PATHS_SETTINGS_KEYPATH_FORMAT).arg(SETTINGS_PATHS_KEY)
.arg(pathQuery));
if (pathMatch) {
auto keypath = QString(PATHS_SETTINGS_KEYPATH_FORMAT).arg(SETTINGS_PATHS_KEY).arg(pathQuery);
const QVariant* pathMatch = valueForKeyPath(_settingsManager.getSettingsMap(), keypath);
if (pathMatch || pathQuery == INDEX_PATH) {
// we got a match, respond with the resulting viewpoint
auto nodeList = DependencyManager::get<LimitedNodeList>();
QString responseViewpoint = pathMatch->toMap()[PATH_VIEWPOINT_KEY].toString();
QString responseViewpoint;
// if we didn't match the path BUT this is for the index path then send back our default
if (pathMatch) {
responseViewpoint = pathMatch->toMap()[PATH_VIEWPOINT_KEY].toString();
} else {
const QString DEFAULT_INDEX_PATH = "/0,0,0/0,0,0,1";
responseViewpoint = DEFAULT_INDEX_PATH;
}
if (!responseViewpoint.isEmpty()) {
QByteArray viewpointUTF8 = responseViewpoint.toUtf8();

View file

@ -23,6 +23,8 @@
#include "DomainServer.h"
int main(int argc, char* argv[]) {
disableQtBearerPoll(); // Fixes wifi ping spikes
#ifndef WIN32
setvbuf(stdout, NULL, _IOLBF, 0);
#endif

View file

@ -1659,29 +1659,6 @@ PropertiesTool = function(opts) {
});
}
}
} else if (data.action == "centerAtmosphereToZone") {
if (selectionManager.hasSelection()) {
selectionManager.saveProperties();
for (var i = 0; i < selectionManager.selections.length; i++) {
var properties = selectionManager.savedProperties[selectionManager.selections[i]];
if (properties.type == "Zone") {
var centerOfZone = properties.boundingBox.center;
var atmosphereCenter = {
x: centerOfZone.x,
y: centerOfZone.y - properties.atmosphere.innerRadius,
z: centerOfZone.z
};
Entities.editEntity(selectionManager.selections[i], {
atmosphere: {
center: atmosphereCenter
},
});
}
}
pushCommandForSelections();
selectionManager._update();
}
}
}
});

View file

@ -1,71 +0,0 @@
//
// changingAtmosphereExample.js
// examples
//
// Created by Brad Hefta-Gaub on 4/16/15.
// Copyright 2015 High Fidelity, Inc.
//
// This is an example script that demonstrates creating a zone using the atmosphere features that changes scatter properties
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var count = 0;
var stopAfter = 10000;
var zoneEntityA = Entities.addEntity({
type: "Zone",
position: { x: 1000, y: 1000, z: 1000},
dimensions: { x: 2000, y: 2000, z: 2000 },
keyLightColor: { red: 255, green: 0, blue: 0 },
stageSunModelEnabled: false,
shapeType: "sphere",
backgroundMode: "atmosphere",
atmosphere: {
center: { x: 1000, y: 0, z: 1000},
innerRadius: 1000.0,
outerRadius: 1025.0,
rayleighScattering: 0.0025, // Meaningful values 0 to ~0.01
mieScattering: 0.0010, // Meaningful values 0 to ~0.01
// First two, Meaningful values 0 to 1 each, blue, purple; third meaningful 0.3 to 1 - affects shape
scatteringWavelengths: { x: 0.650, y: 0.570, z: 0.475 },
hasStars: true
},
stage: {
latitude: 37.777,
longitude: 122.407,
altitude: 0.03,
day: 183,
hour: 5,
sunModelEnabled: true
}
});
// register the call back so it fires before each data send
Script.update.connect(function(deltaTime) {
// stop it...
if (count >= stopAfter) {
print("calling Script.stop()");
Script.stop();
}
count++;
var rayleighScattering = (count / 100000) % 0.01;
var mieScattering = (count / 100000) % 0.01;
var waveX = (count / 2000) % 1;
var waveZ = ((count / 2000) % 0.7) + 0.3;
Entities.editEntity(zoneEntityA, {
atmosphere: {
rayleighScattering: rayleighScattering,
mieScattering: mieScattering,
scatteringWavelengths: { x: waveX, y: waveX, z: waveZ }
},
});
});

View file

@ -1,65 +0,0 @@
//
// zoneAtmosphereExample.js
// examples
//
// Created by Brad Hefta-Gaub on 4/16/15.
// Copyright 2015 High Fidelity, Inc.
//
// This is an example script that demonstrates creating a zone using the atmosphere features
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var count = 0;
var stopAfter = 10000;
var zoneEntityA = Entities.addEntity({
type: "Zone",
position: { x: 1000, y: 1000, z: 1000},
dimensions: { x: 2000, y: 2000, z: 2000 },
keyLightColor: { red: 255, green: 0, blue: 0 },
stageSunModelEnabled: false,
shapeType: "sphere",
backgroundMode: "atmosphere",
atmosphere: {
center: { x: 1000, y: 0, z: 1000},
innerRadius: 1000.0,
outerRadius: 1025.0,
rayleighScattering: 0.0025,
mieScattering: 0.0010,
scatteringWavelengths: { x: 0.650, y: 0.570, z: 0.475 },
hasStars: false
},
stage: {
latitude: 37.777,
longitude: 122.407,
altitude: 0.03,
day: 60,
hour: 0,
sunModelEnabled: true
}
});
// register the call back so it fires before each data send
Script.update.connect(function(deltaTime) {
// stop it...
if (count >= stopAfter) {
print("calling Script.stop()");
Script.stop();
}
count++;
var newHour = (count / 10) % 24;
var newIntensity = ((count / 10) % 24) / 24;
print("newHour:" + newHour);
print("newIntensity:" + newIntensity);
Entities.editEntity(zoneEntityA, {
stageHour: newHour,
keyLightIntensity: newIntensity
});
});

View file

@ -360,20 +360,6 @@
var elZoneSkyboxColorBlue = document.getElementById("property-zone-skybox-color-blue");
var elZoneSkyboxURL = document.getElementById("property-zone-skybox-url");
var elZoneAtmosphereCenterX = document.getElementById("property-zone-atmosphere-center-x");
var elZoneAtmosphereCenterY = document.getElementById("property-zone-atmosphere-center-y");
var elZoneAtmosphereCenterZ = document.getElementById("property-zone-atmosphere-center-z");
var elCenterAtmosphereToZone = document.getElementById("center-atmosphere-in-zone");
var elZoneAtmosphereInnerRadius = document.getElementById("property-zone-atmosphere-inner-radius");
var elZoneAtmosphereOuterRadius = document.getElementById("property-zone-atmosphere-outer-radius");
var elZoneAtmosphereMieScattering = document.getElementById("property-zone-atmosphere-mie-scattering");
var elZoneAtmosphereRayleighScattering = document.getElementById("property-zone-atmosphere-rayleigh-scattering");
var elZoneAtmosphereScatteringWavelengthsX = document.getElementById("property-zone-atmosphere-scattering-wavelengths-x");
var elZoneAtmosphereScatteringWavelengthsY = document.getElementById("property-zone-atmosphere-scattering-wavelengths-y");
var elZoneAtmosphereScatteringWavelengthsZ = document.getElementById("property-zone-atmosphere-scattering-wavelengths-z");
var elZoneAtmosphereHasStars = document.getElementById("property-zone-atmosphere-has-stars");
var elPolyVoxSections = document.querySelectorAll(".poly-vox-section");
allSections.push(elPolyVoxSections);
var elVoxelVolumeSizeX = document.getElementById("property-voxel-volume-size-x");
@ -606,20 +592,7 @@
elZoneSkyboxColorBlue.value = properties.skybox.color.blue;
elZoneSkyboxURL.value = properties.skybox.url;
elZoneAtmosphereCenterX.value = properties.atmosphere.center.x;
elZoneAtmosphereCenterY.value = properties.atmosphere.center.y;
elZoneAtmosphereCenterZ.value = properties.atmosphere.center.z;
elZoneAtmosphereInnerRadius.value = properties.atmosphere.innerRadius;
elZoneAtmosphereOuterRadius.value = properties.atmosphere.outerRadius;
elZoneAtmosphereMieScattering.value = properties.atmosphere.mieScattering;
elZoneAtmosphereRayleighScattering.value = properties.atmosphere.rayleighScattering;
elZoneAtmosphereScatteringWavelengthsX.value = properties.atmosphere.scatteringWavelengths.x;
elZoneAtmosphereScatteringWavelengthsY.value = properties.atmosphere.scatteringWavelengths.y;
elZoneAtmosphereScatteringWavelengthsZ.value = properties.atmosphere.scatteringWavelengths.z;
elZoneAtmosphereHasStars.checked = properties.atmosphere.hasStars;
showElements(document.getElementsByClassName('skybox-section'), elZoneBackgroundMode.value == 'skybox');
showElements(document.getElementsByClassName('atmosphere-section'), elZoneBackgroundMode.value == 'atmosphere');
} else if (properties.type == "ParticleEffect") {
for (var i = 0; i < elParticleSections.length; i++) {
elParticleSections[i].style.display = 'block';
@ -874,25 +847,6 @@
elZoneSkyboxURL.addEventListener('change', createEmitGroupTextPropertyUpdateFunction('skybox','url'));
var zoneAtmosphereCenterChangeFunction = createEmitGroupVec3PropertyUpdateFunction(
'atmosphere','center', elZoneAtmosphereCenterX, elZoneAtmosphereCenterY, elZoneAtmosphereCenterZ);
elZoneAtmosphereCenterX.addEventListener('change', zoneAtmosphereCenterChangeFunction);
elZoneAtmosphereCenterY.addEventListener('change', zoneAtmosphereCenterChangeFunction);
elZoneAtmosphereCenterZ.addEventListener('change', zoneAtmosphereCenterChangeFunction);
elZoneAtmosphereInnerRadius.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('atmosphere','innerRadius'));
elZoneAtmosphereOuterRadius.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('atmosphere','outerRadius'));
elZoneAtmosphereMieScattering.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('atmosphere','mieScattering'));
elZoneAtmosphereRayleighScattering.addEventListener('change', createEmitGroupNumberPropertyUpdateFunction('atmosphere','rayleighScattering'));
var zoneAtmosphereScatterWavelengthsChangeFunction = createEmitGroupVec3PropertyUpdateFunction(
'atmosphere','scatteringWavelengths', elZoneAtmosphereScatteringWavelengthsX,
elZoneAtmosphereScatteringWavelengthsY, elZoneAtmosphereScatteringWavelengthsZ);
elZoneAtmosphereScatteringWavelengthsX.addEventListener('change', zoneAtmosphereScatterWavelengthsChangeFunction);
elZoneAtmosphereScatteringWavelengthsY.addEventListener('change', zoneAtmosphereScatterWavelengthsChangeFunction);
elZoneAtmosphereScatteringWavelengthsZ.addEventListener('change', zoneAtmosphereScatterWavelengthsChangeFunction);
elZoneAtmosphereHasStars.addEventListener('change', createEmitGroupCheckedPropertyUpdateFunction('atmosphere','hasStars'));
var voxelVolumeSizeChangeFunction = createEmitVec3PropertyUpdateFunction(
'voxelVolumeSize', elVoxelVolumeSizeX, elVoxelVolumeSizeY, elVoxelVolumeSizeZ);
elVoxelVolumeSizeX.addEventListener('change', voxelVolumeSizeChangeFunction);
@ -934,12 +888,6 @@
action: "reloadScript"
}));
});
elCenterAtmosphereToZone.addEventListener("click", function() {
EventBridge.emitWebEvent(JSON.stringify({
type: "action",
action: "centerAtmosphereToZone",
}));
});
elPreviewCameraButton.addEventListener("click", function() {
EventBridge.emitWebEvent(JSON.stringify({
type: "action",
@ -1601,7 +1549,6 @@
<select name="SelectBackgroundMode" id="property-zone-background-mode">
<option value='inherit'>Nothing</option>
<option value='skybox'>Skybox</option>
<option value='atmosphere'>Atmosphere</option>
</select>
</div>
</div>
@ -1626,61 +1573,6 @@
<input type="text" id="property-zone-skybox-url" class="url">
</div>
</div>
<div class="sub-section-header zone-section atmosphere-section">
<label>Atmosphere</label>
</div>
<div class="zone-section atmosphere-section property">
<div class="label">Atmosphere Center</div>
<div class="value">
<div class="input-area">X <br><input class="coord" type='number' id="property-zone-atmosphere-center-x"></div>
<div class="input-area">Y <br><input class="coord" type='number' id="property-zone-atmosphere-center-y"></div>
<div class="input-area">Z <br><input class="coord" type='number' id="property-zone-atmosphere-center-z"></div>
<div>
<input type="button" id="center-atmosphere-in-zone" value="Center to Zone">
</div>
</div>
</div>
<div class="zone-section atmosphere-section property">
<div class="label">Atmosphere Inner Radius</div>
<div class="value">
<input class="coord" type='number' id="property-zone-atmosphere-inner-radius" step="1">
</div>
</div>
<div class="zone-section atmosphere-section property">
<div class="label">Atmosphere Outer Radius</div>
<div class="value">
<input class="coord" type='number' id="property-zone-atmosphere-outer-radius" step="1">
</div>
</div>
<div class="zone-section atmosphere-section property">
<div class="label">Atmosphere Mie Scattering</div>
<div class="value">
<input class="coord no-spin" type='number' id="property-zone-atmosphere-mie-scattering" min="0" max="0.5" step="any">
</div>
</div>
<div class="zone-section atmosphere-section property">
<div class="label">Atmosphere Rayleigh Scattering</div>
<div class="value">
<input class="coord no-spin" type='number' id="property-zone-atmosphere-rayleigh-scattering" min="0" max="0.5" step="any">
</div>
</div>
<div class="zone-section atmosphere-section property">
<div class="label">Atmosphere Scattering Wavelenghts</div>
<div class="value">
<div class="input-area">X <br><input class="coord no-spin" type='number' id="property-zone-atmosphere-scattering-wavelengths-x" min="0" max="1" step="any"></div>
<div class="input-area">Y <br><input class="coord no-spin" type='number' id="property-zone-atmosphere-scattering-wavelengths-y" min="0" max="1" step="any"></div>
<div class="input-area">Z <br><input class="coord no-spin" type='number' id="property-zone-atmosphere-scattering-wavelengths-z" min="0" max="1" step="any"></div>
</div>
</div>
<div class="zone-section atmosphere-section property" style="display:none">
<span class="label">Atmosphere Has Stars</span>
<span class="value">
<input type='checkbox' id="property-zone-atmosphere-has-stars">
</span>
</div>
</div>
</body>
</html>

View file

@ -12,7 +12,7 @@
Script.include("cookies.js");
var MENU = "Developer>Render>Debug Deferred Buffer";
var ACTIONS = ["Off", "Diffuse", "Alpha", "Specular", "Roughness", "Normal", "Depth", "Lighting", "Custom"];
var ACTIONS = ["Off", "Diffuse", "Metallic", "Roughness", "Normal", "Depth", "Lighting", "Shadow", "PyramidDepth", "AmbientOcclusion", "OcclusionBlurred", "Custom"];
var SETTINGS_KEY = "EngineDebugScript.DebugMode";
Number.prototype.clamp = function(min, max) {
@ -52,6 +52,7 @@ var overlaysCounter = new CounterWidget(panel, "Overlays", Render.overlay3D);
var resizing = false;
var previousMode = Settings.getValue(SETTINGS_KEY, -1);
previousMode = 8;
Menu.addActionGroup(MENU, ACTIONS, ACTIONS[previousMode + 1]);
Render.deferredDebugMode = previousMode;
Render.deferredDebugSize = { x: 0.0, y: -1.0, z: 1.0, w: 1.0 }; // Reset to default size
@ -98,12 +99,70 @@ panel.newSlider("Tone Mapping Exposure", -10, 10,
function() { return Render.tone.exposure; },
function (value) { return (value); });
panel.newSlider("Ambient Occlusion Resolution Level", 0.0, 4.0,
function (value) { Render.ambientOcclusion.resolutionLevel = value; },
function() { return Render.ambientOcclusion.resolutionLevel; },
function (value) { return (value); });
panel.newSlider("Ambient Occlusion Radius", 0.0, 2.0,
function (value) { Render.ambientOcclusion.radius = value; },
function() { return Render.ambientOcclusion.radius; },
function (value) { return (value.toFixed(2)); });
panel.newSlider("Ambient Occlusion Level", 0.0, 1.0,
function (value) { Render.ambientOcclusion.level = value; },
function() { return Render.ambientOcclusion.level; },
function (value) { return (value.toFixed(2)); });
panel.newSlider("Ambient Occlusion Num Samples", 1, 32,
function (value) { Render.ambientOcclusion.numSamples = value; },
function() { return Render.ambientOcclusion.numSamples; },
function (value) { return (value); });
panel.newSlider("Ambient Occlusion Num Spiral Turns", 0.0, 30.0,
function (value) { Render.ambientOcclusion.numSpiralTurns = value; },
function() { return Render.ambientOcclusion.numSpiralTurns; },
function (value) { return (value.toFixed(2)); });
panel.newCheckbox("Ambient Occlusion Dithering",
function (value) { Render.ambientOcclusion.ditheringEnabled = value; },
function() { return Render.ambientOcclusion.ditheringEnabled; },
function (value) { return (value); });
panel.newSlider("Ambient Occlusion Falloff Bias", 0.0, 0.2,
function (value) { Render.ambientOcclusion.falloffBias = value; },
function() { return Render.ambientOcclusion.falloffBias; },
function (value) { return (value.toFixed(2)); });
panel.newSlider("Ambient Occlusion Edge Sharpness", 0.0, 1.0,
function (value) { Render.ambientOcclusion.edgeSharpness = value; },
function() { return Render.ambientOcclusion.edgeSharpness; },
function (value) { return (value.toFixed(2)); });
panel.newSlider("Ambient Occlusion Blur Radius", 0.0, 6.0,
function (value) { Render.ambientOcclusion.blurRadius = value; },
function() { return Render.ambientOcclusion.blurRadius; },
function (value) { return (value); });
panel.newSlider("Ambient Occlusion Blur Deviation", 0.0, 3.0,
function (value) { Render.ambientOcclusion.blurDeviation = value; },
function() { return Render.ambientOcclusion.blurDeviation; },
function (value) { return (value.toFixed(2)); });
panel.newSlider("Ambient Occlusion GPU time", 0.0, 10.0,
function (value) {},
function() { return Render.ambientOcclusion.gpuTime; },
function (value) { return (value.toFixed(2) + " ms"); });
var tickTackPeriod = 500;
function updateCounters() {
opaquesCounter.update();
transparentsCounter.update();
overlaysCounter.update();
panel.update("Ambient Occlusion GPU time");
}
Script.setInterval(updateCounters, tickTackPeriod);

View file

@ -129,7 +129,7 @@ if (WIN32)
endif()
# link required hifi libraries
link_hifi_libraries(shared octree environment gpu gl procedural model render
link_hifi_libraries(shared octree gpu gl procedural model render
recording fbx networking model-networking entities avatars
audio audio-client animation script-engine physics
render-utils entities-renderer ui auto-updater

View file

@ -99,8 +99,7 @@
<p>
Use your best headphones<br/>
and microphone for high<br/>
fidelity audio. Chat via text by<br/>
pressing the \ key.
fidelity audio.
</p>
</div>
<div class="grid-unit">

View file

@ -16,12 +16,7 @@ FocusScope {
property bool desktopRoot: true
// The VR version of the primary menu
property var rootMenu: Menu {
id: rootMenu; objectName: "rootMenu"
Component.onCompleted: {
console.log("ROOT_MENU " + rootMenu);
}
}
property var rootMenu: Menu { objectName: "rootMenu" }
QtObject {
id: d
@ -210,7 +205,13 @@ FocusScope {
// Debugging help for figuring out focus issues
property var offscreenWindow;
onOffscreenWindowChanged: offscreenWindow.activeFocusItemChanged.connect(onWindowFocusChanged);
onOffscreenWindowChanged: {
offscreenWindow.activeFocusItemChanged.connect(onWindowFocusChanged);
focusHack.start();
}
FocusHack { id: focusHack; }
function onWindowFocusChanged() {
console.log("Focus item is " + offscreenWindow.activeFocusItem);
var focusedItem = offscreenWindow.activeFocusItem ;
@ -223,11 +224,14 @@ FocusScope {
focusDebugger.height = rect.height
}
}
Rectangle {
id: focusDebugger;
z: 9999; visible: false; color: "red"
ColorAnimation on color { from: "#7fffff00"; to: "#7f0000ff"; duration: 1000; loops: 9999 }
}
}

View file

@ -0,0 +1,26 @@
import QtQuick 2.5
FocusScope {
id: root
TextInput {
id: textInput;
focus: true
width: 10; height: 10
onActiveFocusChanged: root.destroy()
}
Timer {
id: focusTimer
running: false
interval: 100
onTriggered: textInput.forceActiveFocus()
}
function start() {
focusTimer.running = true;
}
}

View file

@ -8,23 +8,20 @@ import "../windows"
import "../styles"
// Work in progress....
Window {
ModalWindow {
id: root
HifiConstants { id: hifi }
signal selectedFile(var file);
signal canceled();
anchors.centerIn: parent
resizable: true
width: 640
height: 480
modality: Qt.ApplicationModal
property string settingsName: ""
property alias folder: folderModel.folder
property alias filterModel: selectionType.model
Rectangle {
anchors.fill: parent
color: "white"

View file

@ -7,19 +7,14 @@ import "../styles"
import "../windows"
// FIXME respect default button functionality
// FIXME force active focus at all times (modal dialog)
Window {
ModalWindow {
id: root
HifiConstants { id: hifi }
implicitWidth: 640
implicitHeight: 320
destroyOnCloseButton: true
destroyOnInvisible: true
visible: true
modality: Qt.ApplicationModal
anchors.centerIn: parent
frame: ModalFrame {}
signal selected(int button);

View file

@ -17,9 +17,10 @@ Frame {
color: "#7f7f7f7f";
radius: 3;
MouseArea {
enabled: window.visible
anchors.fill: parent
acceptedButtons: Qt.AllButtons
onClicked: { }
onClicked: {}
onDoubleClicked: {}
onPressAndHold: {}
onReleased: {}

View file

@ -0,0 +1,12 @@
import QtQuick 2.5
import "."
Window {
id: root
anchors.centerIn: parent
modality: Qt.ApplicationModal
frame: ModalFrame{}
}

View file

@ -69,28 +69,9 @@ Fadable {
// Default to a standard frame. Can be overriden to provide custom
// frame styles, like a full desktop frame to simulate a modal window
property var frame;
property var frame: DefaultFrame { }
Component {
id: defaultFrameBuilder;
DefaultFrame { anchors.fill: parent }
}
Component {
id: modalFrameBuilder;
ModalFrame { anchors.fill: parent }
}
Component.onCompleted: {
if (!frame) {
if (modality === Qt.NonModal) {
frame = defaultFrameBuilder.createObject(window);
} else {
frame = modalFrameBuilder.createObject(window);
}
}
raise();
}
Component.onCompleted: raise();
children: [ frame, content, activator ]

View file

@ -95,6 +95,7 @@
#include <PathUtils.h>
#include <PerfStat.h>
#include <PhysicsEngine.h>
#include <PhysicsHelpers.h>
#include <plugins/PluginContainer.h>
#include <plugins/PluginManager.h>
#include <RenderableWebEntityItem.h>
@ -422,9 +423,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
auto controllerScriptingInterface = DependencyManager::get<controller::ScriptingInterface>().data();
_controllerScriptingInterface = dynamic_cast<ControllerScriptingInterface*>(controllerScriptingInterface);
// to work around the Qt constant wireless scanning, set the env for polling interval very high
const QByteArray EXTREME_BEARER_POLL_TIMEOUT = QString::number(INT_MAX).toLocal8Bit();
qputenv("QT_BEARER_POLL_TIMEOUT", EXTREME_BEARER_POLL_TIMEOUT);
_entityClipboard->createRootElement();
@ -1883,12 +1881,6 @@ void Application::keyPressEvent(QKeyEvent* event) {
break;
}
case Qt::Key_A:
if (isShifted) {
Menu::getInstance()->triggerOption(MenuOption::Atmosphere);
}
break;
case Qt::Key_Backslash:
Menu::getInstance()->triggerOption(MenuOption::Chat);
break;
@ -2696,8 +2688,6 @@ void Application::init() {
// Make sure Login state is up to date
DependencyManager::get<DialogsManager>()->toggleLoginDialog();
_environment.init();
DependencyManager::get<DeferredLightingEffect>()->init();
DependencyManager::get<AvatarManager>()->init();
@ -3136,7 +3126,7 @@ void Application::update(float deltaTime) {
PerformanceTimer perfTimer("havestChanges");
if (_physicsEngine->hasOutgoingChanges()) {
getEntities()->getTree()->withWriteLock([&] {
_entitySimulation.handleOutgoingChanges(_physicsEngine->getOutgoingChanges(), _physicsEngine->getSessionID());
_entitySimulation.handleOutgoingChanges(_physicsEngine->getOutgoingChanges(), Physics::getSessionUUID());
avatarManager->handleOutgoingChanges(_physicsEngine->getOutgoingChanges());
});
@ -3618,10 +3608,6 @@ public:
typedef Payload::DataPointer Pointer;
Stars _stars;
Environment* _environment;
BackgroundRenderData(Environment* environment) : _environment(environment) {
}
static render::ItemID _item; // unique WorldBoxRenderData
};
@ -3663,63 +3649,8 @@ namespace render {
"Application::payloadRender<BackgroundRenderData>() ... stars...");
// should be the first rendering pass - w/o depth buffer / lighting
// compute starfield alpha based on distance from atmosphere
float alpha = 1.0f;
bool hasStars = true;
if (Menu::getInstance()->isOptionChecked(MenuOption::Atmosphere)) {
// TODO: handle this correctly for zones
const EnvironmentData& closestData = background->_environment->getClosestData(args->_viewFrustum->getPosition()); // was theCamera instead of _viewFrustum
if (closestData.getHasStars()) {
const float APPROXIMATE_DISTANCE_FROM_HORIZON = 0.1f;
const float DOUBLE_APPROXIMATE_DISTANCE_FROM_HORIZON = 0.2f;
glm::vec3 sunDirection = (args->_viewFrustum->getPosition()/*getAvatarPosition()*/ - closestData.getSunLocation())
/ closestData.getAtmosphereOuterRadius();
float height = glm::distance(args->_viewFrustum->getPosition()/*theCamera.getPosition()*/, closestData.getAtmosphereCenter());
if (height < closestData.getAtmosphereInnerRadius()) {
// If we're inside the atmosphere, then determine if our keyLight is below the horizon
alpha = 0.0f;
if (sunDirection.y > -APPROXIMATE_DISTANCE_FROM_HORIZON) {
float directionY = glm::clamp(sunDirection.y,
-APPROXIMATE_DISTANCE_FROM_HORIZON, APPROXIMATE_DISTANCE_FROM_HORIZON)
+ APPROXIMATE_DISTANCE_FROM_HORIZON;
alpha = (directionY / DOUBLE_APPROXIMATE_DISTANCE_FROM_HORIZON);
}
} else if (height < closestData.getAtmosphereOuterRadius()) {
alpha = (height - closestData.getAtmosphereInnerRadius()) /
(closestData.getAtmosphereOuterRadius() - closestData.getAtmosphereInnerRadius());
if (sunDirection.y > -APPROXIMATE_DISTANCE_FROM_HORIZON) {
float directionY = glm::clamp(sunDirection.y,
-APPROXIMATE_DISTANCE_FROM_HORIZON, APPROXIMATE_DISTANCE_FROM_HORIZON)
+ APPROXIMATE_DISTANCE_FROM_HORIZON;
alpha = (directionY / DOUBLE_APPROXIMATE_DISTANCE_FROM_HORIZON);
}
}
} else {
hasStars = false;
}
}
// finally render the starfield
if (hasStars) {
background->_stars.render(args, alpha);
}
// draw the sky dome
if (/*!selfAvatarOnly &&*/ Menu::getInstance()->isOptionChecked(MenuOption::Atmosphere)) {
PerformanceTimer perfTimer("atmosphere");
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"Application::displaySide() ... atmosphere...");
background->_environment->renderAtmospheres(batch, *(args->_viewFrustum));
}
static const float alpha = 1.0f;
background->_stars.render(args, alpha);
}
}
break;
@ -3759,12 +3690,10 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
// Background rendering decision
if (BackgroundRenderData::_item == 0) {
auto backgroundRenderData = make_shared<BackgroundRenderData>(&_environment);
auto backgroundRenderData = make_shared<BackgroundRenderData>();
auto backgroundRenderPayload = make_shared<BackgroundRenderData::Payload>(backgroundRenderData);
BackgroundRenderData::_item = _main3DScene->allocateID();
pendingChanges.resetItem(BackgroundRenderData::_item, backgroundRenderPayload);
} else {
}
// Assuming nothing get's rendered through that
@ -3804,7 +3733,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
DependencyManager::get<DeferredLightingEffect>()->setAmbientLightMode(getRenderAmbientLight());
auto skyStage = DependencyManager::get<SceneScriptingInterface>()->getSkyStage();
DependencyManager::get<DeferredLightingEffect>()->setGlobalLight(skyStage->getSunLight()->getDirection(), skyStage->getSunLight()->getColor(), skyStage->getSunLight()->getIntensity(), skyStage->getSunLight()->getAmbientIntensity());
DependencyManager::get<DeferredLightingEffect>()->setGlobalAtmosphere(skyStage->getAtmosphere());
auto skybox = model::SkyboxPointer();
if (skyStage->getBackgroundMode() == model::SunSkyStage::SKY_BOX) {
@ -3844,6 +3772,8 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
auto engineContext = _renderEngine->getRenderContext();
renderInterface->setItemCounts(engineContext->getItemsConfig());
renderInterface->setJobGPUTimes(engineContext->getAmbientOcclusion().gpuTime);
}
activeRenderingThread = nullptr;
@ -4296,6 +4226,9 @@ bool Application::acceptURL(const QString& urlString, bool defaultUpload) {
}
void Application::setSessionUUID(const QUuid& sessionUUID) {
// HACK: until we swap the library dependency order between physics and entities
// we cache the sessionID in two distinct places for physics.
Physics::setSessionUUID(sessionUUID); // TODO: remove this one
_physicsEngine->setSessionUUID(sessionUUID);
}

View file

@ -44,7 +44,6 @@
#include "avatar/MyAvatar.h"
#include "Bookmarks.h"
#include "Camera.h"
#include "Environment.h"
#include "FileLogger.h"
#include "gpu/Context.h"
#include "Menu.h"
@ -172,8 +171,6 @@ public:
virtual QThread* getMainThread() { return thread(); }
virtual PickRay computePickRay(float x, float y) const;
virtual glm::vec3 getAvatarPosition() const;
virtual void overrideEnvironmentData(const EnvironmentData& newData) { _environment.override(newData); }
virtual void endOverrideEnvironmentData() { _environment.endOverride(); }
virtual qreal getDevicePixelRatio();
void setActiveDisplayPlugin(const QString& pluginName);
@ -440,8 +437,6 @@ private:
float _rotateMirror;
float _raiseMirror;
Environment _environment;
QSet<int> _keysPressed;
bool _enableProcessOctreeThread;

View file

@ -323,7 +323,6 @@ Menu::Menu() {
// Developer > Render >>>
MenuWrapper* renderOptionsMenu = developerMenu->addMenu("Render");
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Atmosphere, 0, true);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::WorldAxes);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::DebugAmbientOcclusion);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Antialiasing);

View file

@ -154,7 +154,6 @@ namespace MenuOption {
const QString AnimDebugDrawPosition= "Debug Draw Position";
const QString Antialiasing = "Antialiasing";
const QString AssetMigration = "ATP Asset Migration";
const QString Atmosphere = "Atmosphere";
const QString Attachments = "Attachments...";
const QString AudioNetworkStats = "Audio Network Stats";
const QString AudioNoiseReduction = "Audio Noise Reduction";
@ -187,7 +186,7 @@ namespace MenuOption {
const QString CopyPath = "Copy Path to Clipboard";
const QString CoupleEyelids = "Couple Eyelids";
const QString CrashInterface = "Crash Interface";
const QString DebugAmbientOcclusion = "Debug Ambient Occlusion";
const QString DebugAmbientOcclusion = "Ambient Occlusion";
const QString DecreaseAvatarSize = "Decrease Avatar Size";
const QString DeleteBookmark = "Delete Bookmark...";
const QString DisableActivityLogger = "Disable Activity Logger";

View file

@ -146,6 +146,6 @@ QUuid AvatarMotionState::getSimulatorID() const {
// virtual
void AvatarMotionState::computeCollisionGroupAndMask(int16_t& group, int16_t& mask) const {
group = BULLET_COLLISION_GROUP_OTHER_AVATAR;
mask = PhysicsEngine::getCollisionMask(group);
mask = Physics::getDefaultCollisionMask(group);
}

View file

@ -43,7 +43,6 @@
#include "Application.h"
#include "devices/Faceshift.h"
#include "AvatarManager.h"
#include "Environment.h"
#include "Menu.h"
#include "MyAvatar.h"
#include "Physics.h"

View file

@ -25,6 +25,8 @@
#include "MainWindow.h"
int main(int argc, const char* argv[]) {
disableQtBearerPoll(); // Fixes wifi ping spikes
QString applicationName = "High Fidelity Interface - " + qgetenv("USERNAME");
bool instanceMightBeRunning = true;
@ -81,7 +83,6 @@ int main(int argc, const char* argv[]) {
#endif
}
QElapsedTimer startupTime;
startupTime.start();

View file

@ -184,13 +184,15 @@ bool HTTPManager::bindSocket() {
return true;
} else {
qCritical() << "Failed to open HTTP server socket:" << errorString() << " can't continue";
QMetaObject::invokeMethod(this, "queuedExit", Qt::QueuedConnection);
QString errorMessage = "Failed to open HTTP server socket: " + errorString() + ", can't continue";
QMetaObject::invokeMethod(this, "queuedExit", Qt::QueuedConnection, Q_ARG(QString, errorMessage));
return false;
}
}
void HTTPManager::queuedExit() {
void HTTPManager::queuedExit(QString errorMessage) {
if (!errorMessage.isEmpty()) {
qCCritical(embeddedwebserver) << qPrintable(errorMessage);
}
QCoreApplication::exit(SOCKET_ERROR_EXIT_CODE);
}

View file

@ -39,7 +39,7 @@ public:
private slots:
void isTcpServerListening();
void queuedExit();
void queuedExit(QString errorMessage);
private:
bool bindSocket();

View file

@ -277,8 +277,7 @@ void EntityTreeRenderer::applyZonePropertiesToScene(std::shared_ptr<ZoneEntityIt
_hasPreviousZone = false;
}
_viewState->endOverrideEnvironmentData();
skyStage->setBackgroundMode(model::SunSkyStage::SKY_DOME); // let the application atmosphere through
skyStage->setBackgroundMode(model::SunSkyStage::SKY_DOME); // let the application background through
return; // Early exit
}
@ -308,28 +307,6 @@ void EntityTreeRenderer::applyZonePropertiesToScene(std::shared_ptr<ZoneEntityIt
sceneTime->setDay(zone->getStageProperties().calculateDay());
switch (zone->getBackgroundMode()) {
case BACKGROUND_MODE_ATMOSPHERE: {
EnvironmentData data = zone->getEnvironmentData();
glm::vec3 keyLightDirection = sceneKeyLight->getDirection();
glm::vec3 inverseKeyLightDirection = keyLightDirection * -1.0f;
// NOTE: is this right? It seems like the "sun" should be based on the center of the
// atmosphere, not where the camera is.
glm::vec3 keyLightLocation = _viewState->getAvatarPosition() +
(inverseKeyLightDirection * data.getAtmosphereOuterRadius());
data.setSunLocation(keyLightLocation);
const float KEY_LIGHT_INTENSITY_TO_SUN_BRIGHTNESS_RATIO = 20.0f;
float sunBrightness = sceneKeyLight->getIntensity() * KEY_LIGHT_INTENSITY_TO_SUN_BRIGHTNESS_RATIO;
data.setSunBrightness(sunBrightness);
_viewState->overrideEnvironmentData(data);
skyStage->setBackgroundMode(model::SunSkyStage::SKY_DOME);
_pendingSkyboxTexture = false;
_skyboxTexture.clear();
break;
}
case BACKGROUND_MODE_SKYBOX: {
auto skybox = std::dynamic_pointer_cast<ProceduralSkybox>(skyStage->getSkybox());
skybox->setColor(zone->getSkyboxProperties().getColorVec3());
@ -360,13 +337,13 @@ void EntityTreeRenderer::applyZonePropertiesToScene(std::shared_ptr<ZoneEntityIt
}
}
_viewState->endOverrideEnvironmentData();
skyStage->setBackgroundMode(model::SunSkyStage::SKY_BOX);
break;
}
case BACKGROUND_MODE_INHERIT:
_viewState->endOverrideEnvironmentData();
skyStage->setBackgroundMode(model::SunSkyStage::SKY_DOME); // let the application atmosphere through
default:
skyStage->setBackgroundMode(model::SunSkyStage::SKY_DOME); // let the application background through
_pendingSkyboxTexture = false;
_skyboxTexture.clear();
break;

View file

@ -1,6 +1,6 @@
set(TARGET_NAME entities)
setup_hifi_library(Network Script)
link_hifi_libraries(avatars shared audio octree gpu model fbx networking animation environment)
link_hifi_libraries(avatars shared audio octree gpu model fbx networking animation)
target_bullet()

View file

@ -1,230 +0,0 @@
//
// AtmospherePropertyGroup.cpp
// libraries/entities/src
//
// Created by Brad Hefta-Gaub on 12/4/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <OctreePacketData.h>
#include "AtmospherePropertyGroup.h"
#include "EntityItemProperties.h"
#include "EntityItemPropertiesMacros.h"
const glm::vec3 AtmospherePropertyGroup::DEFAULT_CENTER = glm::vec3(0.0f, -1000.0f, 0.0f);
const float AtmospherePropertyGroup::DEFAULT_INNER_RADIUS = 1000.0f;
const float AtmospherePropertyGroup::DEFAULT_OUTER_RADIUS = 1025.0f;
const float AtmospherePropertyGroup::DEFAULT_RAYLEIGH_SCATTERING = 0.0025f;
const float AtmospherePropertyGroup::DEFAULT_MIE_SCATTERING = 0.0010f;
const glm::vec3 AtmospherePropertyGroup::DEFAULT_SCATTERING_WAVELENGTHS = glm::vec3(0.650f, 0.570f, 0.475f);
const bool AtmospherePropertyGroup::DEFAULT_HAS_STARS = true;
void AtmospherePropertyGroup::copyToScriptValue(const EntityPropertyFlags& desiredProperties, QScriptValue& properties, QScriptEngine* engine, bool skipDefaults, EntityItemProperties& defaultEntityProperties) const {
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ATMOSPHERE_CENTER, Atmosphere, atmosphere, Center, center);
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ATMOSPHERE_INNER_RADIUS, Atmosphere, atmosphere, InnerRadius, innerRadius);
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ATMOSPHERE_OUTER_RADIUS, Atmosphere, atmosphere, OuterRadius, outerRadius);
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ATMOSPHERE_MIE_SCATTERING, Atmosphere, atmosphere, MieScattering, mieScattering);
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, Atmosphere, atmosphere, RayleighScattering, rayleighScattering);
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, Atmosphere, atmosphere, ScatteringWavelengths, scatteringWavelengths);
COPY_GROUP_PROPERTY_TO_QSCRIPTVALUE(PROP_ATMOSPHERE_HAS_STARS, Atmosphere, atmosphere, HasStars, hasStars);
}
void AtmospherePropertyGroup::copyFromScriptValue(const QScriptValue& object, bool& _defaultSettings) {
COPY_GROUP_PROPERTY_FROM_QSCRIPTVALUE(atmosphere, center, glmVec3, setCenter);
COPY_GROUP_PROPERTY_FROM_QSCRIPTVALUE(atmosphere, innerRadius, float, setInnerRadius);
COPY_GROUP_PROPERTY_FROM_QSCRIPTVALUE(atmosphere, outerRadius, float, setOuterRadius);
COPY_GROUP_PROPERTY_FROM_QSCRIPTVALUE(atmosphere, mieScattering, float, setMieScattering);
COPY_GROUP_PROPERTY_FROM_QSCRIPTVALUE(atmosphere, rayleighScattering, float, setRayleighScattering);
COPY_GROUP_PROPERTY_FROM_QSCRIPTVALUE(atmosphere, scatteringWavelengths, glmVec3, setScatteringWavelengths);
COPY_GROUP_PROPERTY_FROM_QSCRIPTVALUE(atmosphere, hasStars, bool, setHasStars);
}
void AtmospherePropertyGroup::debugDump() const {
qDebug() << " AtmospherePropertyGroup: ---------------------------------------------";
qDebug() << " Center:" << getCenter() << " has changed:" << centerChanged();
qDebug() << " Inner Radius:" << getInnerRadius() << " has changed:" << innerRadiusChanged();
qDebug() << " Outer Radius:" << getOuterRadius() << " has changed:" << outerRadiusChanged();
qDebug() << " Mie Scattering:" << getMieScattering() << " has changed:" << mieScatteringChanged();
qDebug() << " Rayleigh Scattering:" << getRayleighScattering() << " has changed:" << rayleighScatteringChanged();
qDebug() << " Scattering Wavelengths:" << getScatteringWavelengths() << " has changed:" << scatteringWavelengthsChanged();
qDebug() << " Has Stars:" << getHasStars() << " has changed:" << hasStarsChanged();
}
void AtmospherePropertyGroup::listChangedProperties(QList<QString>& out) {
if (centerChanged()) {
out << "center";
}
if (innerRadiusChanged()) {
out << "innerRadius";
}
if (outerRadiusChanged()) {
out << "outerRadius";
}
if (mieScatteringChanged()) {
out << "mieScattering";
}
if (rayleighScatteringChanged()) {
out << "rayleighScattering";
}
if (scatteringWavelengthsChanged()) {
out << "scatteringWavelengths";
}
if (hasStarsChanged()) {
out << "hasStars";
}
}
bool AtmospherePropertyGroup::appendToEditPacket(OctreePacketData* packetData,
EntityPropertyFlags& requestedProperties,
EntityPropertyFlags& propertyFlags,
EntityPropertyFlags& propertiesDidntFit,
int& propertyCount,
OctreeElement::AppendState& appendState) const {
bool successPropertyFits = true;
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_CENTER, getCenter());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_INNER_RADIUS, getInnerRadius());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_OUTER_RADIUS, getOuterRadius());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_MIE_SCATTERING, getMieScattering());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, getRayleighScattering());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, getScatteringWavelengths());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_HAS_STARS, getHasStars());
return true;
}
bool AtmospherePropertyGroup::decodeFromEditPacket(EntityPropertyFlags& propertyFlags, const unsigned char*& dataAt , int& processedBytes) {
int bytesRead = 0;
bool overwriteLocalData = true;
bool somethingChanged = false;
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_CENTER, glm::vec3, setCenter);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_INNER_RADIUS, float, setInnerRadius);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_OUTER_RADIUS, float, setOuterRadius);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_MIE_SCATTERING, float, setMieScattering);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, float, setRayleighScattering);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, glm::vec3, setScatteringWavelengths);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_HAS_STARS, bool, setHasStars);
DECODE_GROUP_PROPERTY_HAS_CHANGED(PROP_ATMOSPHERE_CENTER, Center);
DECODE_GROUP_PROPERTY_HAS_CHANGED(PROP_ATMOSPHERE_INNER_RADIUS, InnerRadius);
DECODE_GROUP_PROPERTY_HAS_CHANGED(PROP_ATMOSPHERE_OUTER_RADIUS, OuterRadius);
DECODE_GROUP_PROPERTY_HAS_CHANGED(PROP_ATMOSPHERE_MIE_SCATTERING, MieScattering);
DECODE_GROUP_PROPERTY_HAS_CHANGED(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, RayleighScattering);
DECODE_GROUP_PROPERTY_HAS_CHANGED(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, ScatteringWavelengths);
DECODE_GROUP_PROPERTY_HAS_CHANGED(PROP_ATMOSPHERE_HAS_STARS, HasStars);
processedBytes += bytesRead;
Q_UNUSED(somethingChanged);
return true;
}
void AtmospherePropertyGroup::markAllChanged() {
_centerChanged = true;
_innerRadiusChanged = true;
_outerRadiusChanged = true;
_mieScatteringChanged = true;
_rayleighScatteringChanged = true;
_scatteringWavelengthsChanged = true;
_hasStarsChanged = true;
}
EntityPropertyFlags AtmospherePropertyGroup::getChangedProperties() const {
EntityPropertyFlags changedProperties;
CHECK_PROPERTY_CHANGE(PROP_ATMOSPHERE_CENTER, center);
CHECK_PROPERTY_CHANGE(PROP_ATMOSPHERE_INNER_RADIUS, innerRadius);
CHECK_PROPERTY_CHANGE(PROP_ATMOSPHERE_OUTER_RADIUS, outerRadius);
CHECK_PROPERTY_CHANGE(PROP_ATMOSPHERE_MIE_SCATTERING, mieScattering);
CHECK_PROPERTY_CHANGE(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, rayleighScattering);
CHECK_PROPERTY_CHANGE(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, scatteringWavelengths);
CHECK_PROPERTY_CHANGE(PROP_ATMOSPHERE_HAS_STARS, hasStars);
return changedProperties;
}
void AtmospherePropertyGroup::getProperties(EntityItemProperties& properties) const {
COPY_ENTITY_GROUP_PROPERTY_TO_PROPERTIES(Atmosphere, Center, getCenter);
COPY_ENTITY_GROUP_PROPERTY_TO_PROPERTIES(Atmosphere, InnerRadius, getInnerRadius);
COPY_ENTITY_GROUP_PROPERTY_TO_PROPERTIES(Atmosphere, OuterRadius, getOuterRadius);
COPY_ENTITY_GROUP_PROPERTY_TO_PROPERTIES(Atmosphere, MieScattering, getMieScattering);
COPY_ENTITY_GROUP_PROPERTY_TO_PROPERTIES(Atmosphere, MieScattering, getMieScattering);
COPY_ENTITY_GROUP_PROPERTY_TO_PROPERTIES(Atmosphere, RayleighScattering, getRayleighScattering);
COPY_ENTITY_GROUP_PROPERTY_TO_PROPERTIES(Atmosphere, ScatteringWavelengths, getScatteringWavelengths);
COPY_ENTITY_GROUP_PROPERTY_TO_PROPERTIES(Atmosphere, HasStars, getHasStars);
}
bool AtmospherePropertyGroup::setProperties(const EntityItemProperties& properties) {
bool somethingChanged = false;
SET_ENTITY_GROUP_PROPERTY_FROM_PROPERTIES(Atmosphere, Center, center, setCenter);
SET_ENTITY_GROUP_PROPERTY_FROM_PROPERTIES(Atmosphere, InnerRadius, innerRadius, setInnerRadius);
SET_ENTITY_GROUP_PROPERTY_FROM_PROPERTIES(Atmosphere, OuterRadius, outerRadius, setOuterRadius);
SET_ENTITY_GROUP_PROPERTY_FROM_PROPERTIES(Atmosphere, MieScattering, mieScattering, setMieScattering);
SET_ENTITY_GROUP_PROPERTY_FROM_PROPERTIES(Atmosphere, RayleighScattering, rayleighScattering, setRayleighScattering);
SET_ENTITY_GROUP_PROPERTY_FROM_PROPERTIES(Atmosphere, ScatteringWavelengths, scatteringWavelengths, setScatteringWavelengths);
SET_ENTITY_GROUP_PROPERTY_FROM_PROPERTIES(Atmosphere, HasStars, hasStars, setHasStars);
return somethingChanged;
}
EntityPropertyFlags AtmospherePropertyGroup::getEntityProperties(EncodeBitstreamParams& params) const {
EntityPropertyFlags requestedProperties;
requestedProperties += PROP_ATMOSPHERE_CENTER;
requestedProperties += PROP_ATMOSPHERE_INNER_RADIUS;
requestedProperties += PROP_ATMOSPHERE_OUTER_RADIUS;
requestedProperties += PROP_ATMOSPHERE_MIE_SCATTERING;
requestedProperties += PROP_ATMOSPHERE_RAYLEIGH_SCATTERING;
requestedProperties += PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS;
requestedProperties += PROP_ATMOSPHERE_HAS_STARS;
return requestedProperties;
}
void AtmospherePropertyGroup::appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
EntityTreeElementExtraEncodeData* entityTreeElementExtraEncodeData,
EntityPropertyFlags& requestedProperties,
EntityPropertyFlags& propertyFlags,
EntityPropertyFlags& propertiesDidntFit,
int& propertyCount,
OctreeElement::AppendState& appendState) const {
bool successPropertyFits = true;
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_CENTER, getCenter());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_INNER_RADIUS, getInnerRadius());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_OUTER_RADIUS, getOuterRadius());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_MIE_SCATTERING, getMieScattering());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, getRayleighScattering());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, getScatteringWavelengths());
APPEND_ENTITY_PROPERTY(PROP_ATMOSPHERE_HAS_STARS, getHasStars());
}
int AtmospherePropertyGroup::readEntitySubclassDataFromBuffer(const unsigned char* data, int bytesLeftToRead,
ReadBitstreamToTreeParams& args,
EntityPropertyFlags& propertyFlags, bool overwriteLocalData,
bool& somethingChanged) {
int bytesRead = 0;
const unsigned char* dataAt = data;
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_CENTER, glm::vec3, setCenter);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_INNER_RADIUS, float, setInnerRadius);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_OUTER_RADIUS, float, setOuterRadius);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_MIE_SCATTERING, float, setMieScattering);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, float, setRayleighScattering);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, glm::vec3, setScatteringWavelengths);
READ_ENTITY_PROPERTY(PROP_ATMOSPHERE_HAS_STARS, bool, setHasStars);
return bytesRead;
}

View file

@ -1,109 +0,0 @@
//
// AtmospherePropertyGroup.h
// libraries/entities/src
//
// Created by Brad Hefta-Gaub on 12/4/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AtmospherePropertyGroup_h
#define hifi_AtmospherePropertyGroup_h
#include <QtScript/QScriptEngine>
#include "PropertyGroup.h"
#include "EntityItemPropertiesMacros.h"
class EntityItemProperties;
class EncodeBitstreamParams;
class OctreePacketData;
class EntityTreeElementExtraEncodeData;
class ReadBitstreamToTreeParams;
#include <stdint.h>
#include <glm/glm.hpp>
/*
#include <glm/gtx/extented_min_max.hpp>
#include <QtCore/QObject>
#include <QVector>
#include <QString>
#include <AACube.h>
#include <FBXReader.h> // for SittingPoint
#include <PropertyFlags.h>
#include <OctreeConstants.h>
#include <ShapeInfo.h>
#include "EntityItemID.h"
#include "AtmospherePropertyGroupMacros.h"
#include "EntityTypes.h"
*/
class AtmospherePropertyGroup : public PropertyGroup {
public:
// EntityItemProperty related helpers
virtual void copyToScriptValue(const EntityPropertyFlags& desiredProperties, QScriptValue& properties, QScriptEngine* engine, bool skipDefaults, EntityItemProperties& defaultEntityProperties) const;
virtual void copyFromScriptValue(const QScriptValue& object, bool& _defaultSettings);
virtual void debugDump() const;
virtual void listChangedProperties(QList<QString>& out);
virtual bool appendToEditPacket(OctreePacketData* packetData,
EntityPropertyFlags& requestedProperties,
EntityPropertyFlags& propertyFlags,
EntityPropertyFlags& propertiesDidntFit,
int& propertyCount,
OctreeElement::AppendState& appendState) const;
virtual bool decodeFromEditPacket(EntityPropertyFlags& propertyFlags, const unsigned char*& dataAt , int& processedBytes);
virtual void markAllChanged();
virtual EntityPropertyFlags getChangedProperties() const;
// EntityItem related helpers
// methods for getting/setting all properties of an entity
virtual void getProperties(EntityItemProperties& propertiesOut) const;
/// returns true if something changed
virtual bool setProperties(const EntityItemProperties& properties);
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const;
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
EntityTreeElementExtraEncodeData* entityTreeElementExtraEncodeData,
EntityPropertyFlags& requestedProperties,
EntityPropertyFlags& propertyFlags,
EntityPropertyFlags& propertiesDidntFit,
int& propertyCount,
OctreeElement::AppendState& appendState) const;
virtual int readEntitySubclassDataFromBuffer(const unsigned char* data, int bytesLeftToRead,
ReadBitstreamToTreeParams& args,
EntityPropertyFlags& propertyFlags, bool overwriteLocalData,
bool& somethingChanged);
static const glm::vec3 DEFAULT_CENTER;
static const float DEFAULT_INNER_RADIUS;
static const float DEFAULT_OUTER_RADIUS;
static const float DEFAULT_RAYLEIGH_SCATTERING;
static const float DEFAULT_MIE_SCATTERING;
static const glm::vec3 DEFAULT_SCATTERING_WAVELENGTHS;
static const bool DEFAULT_HAS_STARS;
DEFINE_PROPERTY_REF(PROP_ATMOSPHERE_CENTER, Center, center, glm::vec3, DEFAULT_CENTER);
DEFINE_PROPERTY(PROP_ATMOSPHERE_INNER_RADIUS, InnerRadius, innerRadius, float, DEFAULT_INNER_RADIUS);
DEFINE_PROPERTY(PROP_ATMOSPHERE_OUTER_RADIUS, OuterRadius, outerRadius, float, DEFAULT_OUTER_RADIUS);
DEFINE_PROPERTY(PROP_ATMOSPHERE_MIE_SCATTERING, MieScattering, mieScattering, float, DEFAULT_MIE_SCATTERING);
DEFINE_PROPERTY(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, RayleighScattering, rayleighScattering, float, DEFAULT_RAYLEIGH_SCATTERING);
DEFINE_PROPERTY_REF(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, ScatteringWavelengths, scatteringWavelengths, glm::vec3, DEFAULT_SCATTERING_WAVELENGTHS);
DEFINE_PROPERTY(PROP_ATMOSPHERE_HAS_STARS, HasStars, hasStars, bool, DEFAULT_HAS_STARS);
};
#endif // hifi_AtmospherePropertyGroup_h

View file

@ -1507,6 +1507,33 @@ void EntityItem::updateCreated(uint64_t value) {
}
}
void EntityItem::computeCollisionGroupAndFinalMask(int16_t& group, int16_t& mask) const {
// TODO: detect attachment status and adopt group of wearer
if (_collisionless) {
group = BULLET_COLLISION_GROUP_COLLISIONLESS;
mask = 0;
} else {
if (_dynamic) {
group = BULLET_COLLISION_GROUP_DYNAMIC;
} else if (isMoving() || hasActions()) {
group = BULLET_COLLISION_GROUP_KINEMATIC;
} else {
group = BULLET_COLLISION_GROUP_STATIC;
}
uint8_t userMask = getCollisionMask();
if ((bool)(userMask & USER_COLLISION_GROUP_MY_AVATAR) !=
(bool)(userMask & USER_COLLISION_GROUP_OTHER_AVATAR)) {
// asymmetric avatar collision mask bits
if (!getSimulatorID().isNull() && (!getSimulatorID().isNull()) && getSimulatorID() != Physics::getSessionUUID()) {
// someone else owns the simulation, so we toggle the avatar bits (swap interpretation)
userMask ^= USER_COLLISION_MASK_AVATARS | ~userMask;
}
}
mask = Physics::getDefaultCollisionMask(group) & (int16_t)(userMask);
}
}
void EntityItem::setSimulationOwner(const QUuid& id, quint8 priority) {
if (wantTerseEditLogging() && (id != _simulationOwner.getID() || priority != _simulationOwner.getPriority())) {
qCDebug(entities) << "sim ownership for" << getDebugName() << "is now" << id << priority;

View file

@ -275,9 +275,10 @@ public:
void setCollisionless(bool value) { _collisionless = value; }
uint8_t getCollisionMask() const { return _collisionMask; }
uint8_t getFinalCollisionMask() const { return _collisionless ? 0 : _collisionMask; }
void setCollisionMask(uint8_t value) { _collisionMask = value; }
void computeCollisionGroupAndFinalMask(int16_t& group, int16_t& mask) const;
bool getDynamic() const { return _dynamic; }
void setDynamic(bool value) { _dynamic = value; }
@ -370,8 +371,8 @@ public:
bool clearActions(EntitySimulation* simulation);
void setActionData(QByteArray actionData);
const QByteArray getActionData() const;
bool hasActions() { return !_objectActions.empty(); }
QList<QUuid> getActionIDs() { return _objectActions.keys(); }
bool hasActions() const { return !_objectActions.empty(); }
QList<QUuid> getActionIDs() const { return _objectActions.keys(); }
QVariantMap getActionArguments(const QUuid& actionID) const;
void deserializeActions();

View file

@ -25,7 +25,6 @@
#include "PolyLineEntityItem.h"
AnimationPropertyGroup EntityItemProperties::_staticAnimation;
AtmospherePropertyGroup EntityItemProperties::_staticAtmosphere;
SkyboxPropertyGroup EntityItemProperties::_staticSkybox;
StagePropertyGroup EntityItemProperties::_staticStage;
KeyLightPropertyGroup EntityItemProperties::_staticKeyLight;
@ -79,7 +78,6 @@ void EntityItemProperties::debugDump() const {
qCDebug(entities) << " _compoundShapeURL=" << _compoundShapeURL;
getAnimation().debugDump();
getAtmosphere().debugDump();
getSkybox().debugDump();
getKeyLight().debugDump();
@ -162,8 +160,8 @@ QString EntityItemProperties::getCollisionMaskAsString() const {
void EntityItemProperties::setCollisionMaskFromString(const QString& maskString) {
QVector<QStringRef> groups = maskString.splitRef(',');
uint8_t mask = 0x00;
for (auto group : groups) {
mask |= getCollisionGroupAsBitMask(group);
for (auto groupName : groups) {
mask |= getCollisionGroupAsBitMask(groupName);
}
_collisionMask = mask;
_collisionMaskChanged = true;
@ -186,39 +184,26 @@ void EntityItemProperties::setShapeTypeFromString(const QString& shapeName) {
}
}
const char* backgroundModeNames[] = {"inherit", "atmosphere", "skybox" };
QHash<QString, BackgroundMode> stringToBackgroundModeLookup;
void addBackgroundMode(BackgroundMode type) {
stringToBackgroundModeLookup[backgroundModeNames[type]] = type;
}
void buildStringToBackgroundModeLookup() {
addBackgroundMode(BACKGROUND_MODE_INHERIT);
addBackgroundMode(BACKGROUND_MODE_ATMOSPHERE);
addBackgroundMode(BACKGROUND_MODE_SKYBOX);
}
using BackgroundPair = std::pair<const BackgroundMode, const QString>;
const std::array<BackgroundPair, BACKGROUND_MODE_ITEM_COUNT> BACKGROUND_MODES = {
BackgroundPair { BACKGROUND_MODE_INHERIT, { "inherit" } },
BackgroundPair { BACKGROUND_MODE_SKYBOX, { "skybox" } }
};
QString EntityItemProperties::getBackgroundModeAsString() const {
if (_backgroundMode < sizeof(backgroundModeNames) / sizeof(char *))
return QString(backgroundModeNames[_backgroundMode]);
return QString(backgroundModeNames[BACKGROUND_MODE_INHERIT]);
return BACKGROUND_MODES[_backgroundMode].second;
}
QString EntityItemProperties::getBackgroundModeString(BackgroundMode mode) {
if (mode < sizeof(backgroundModeNames) / sizeof(char *))
return QString(backgroundModeNames[mode]);
return QString(backgroundModeNames[BACKGROUND_MODE_INHERIT]);
return BACKGROUND_MODES[mode].second;
}
void EntityItemProperties::setBackgroundModeFromString(const QString& backgroundMode) {
if (stringToBackgroundModeLookup.empty()) {
buildStringToBackgroundModeLookup();
}
auto backgroundModeItr = stringToBackgroundModeLookup.find(backgroundMode.toLower());
if (backgroundModeItr != stringToBackgroundModeLookup.end()) {
_backgroundMode = backgroundModeItr.value();
auto result = std::find_if(BACKGROUND_MODES.begin(), BACKGROUND_MODES.end(), [&](const BackgroundPair& pair) {
return (pair.second == backgroundMode);
});
if (result != BACKGROUND_MODES.end()) {
_backgroundMode = result->first;
_backgroundModeChanged = true;
}
}
@ -326,7 +311,6 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
changedProperties += _animation.getChangedProperties();
changedProperties += _keyLight.getChangedProperties();
changedProperties += _atmosphere.getChangedProperties();
changedProperties += _skybox.getChangedProperties();
changedProperties += _stage.getChangedProperties();
@ -475,7 +459,6 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER(PROP_BACKGROUND_MODE, backgroundMode, getBackgroundModeAsString());
_stage.copyToScriptValue(_desiredProperties, properties, engine, skipDefaults, defaultEntityProperties);
_atmosphere.copyToScriptValue(_desiredProperties, properties, engine, skipDefaults, defaultEntityProperties);
_skybox.copyToScriptValue(_desiredProperties, properties, engine, skipDefaults, defaultEntityProperties);
}
@ -668,7 +651,6 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object, bool
_animation.copyFromScriptValue(object, _defaultSettings);
_keyLight.copyFromScriptValue(object, _defaultSettings);
_atmosphere.copyFromScriptValue(object, _defaultSettings);
_skybox.copyFromScriptValue(object, _defaultSettings);
_stage.copyFromScriptValue(object, _defaultSettings);
@ -854,14 +836,6 @@ void EntityItemProperties::entityPropertyFlagsFromScriptValue(const QScriptValue
ADD_GROUP_PROPERTY_TO_MAP(PROP_ANIMATION_LAST_FRAME, Animation, animation, LastFrame, lastFrame);
ADD_GROUP_PROPERTY_TO_MAP(PROP_ANIMATION_HOLD, Animation, animation, Hold, hold);
ADD_GROUP_PROPERTY_TO_MAP(PROP_ATMOSPHERE_CENTER, Atmosphere, atmosphere, Center, center);
ADD_GROUP_PROPERTY_TO_MAP(PROP_ATMOSPHERE_INNER_RADIUS, Atmosphere, atmosphere, InnerRadius, innerRadius);
ADD_GROUP_PROPERTY_TO_MAP(PROP_ATMOSPHERE_OUTER_RADIUS, Atmosphere, atmosphere, OuterRadius, outerRadius);
ADD_GROUP_PROPERTY_TO_MAP(PROP_ATMOSPHERE_MIE_SCATTERING, Atmosphere, atmosphere, MieScattering, mieScattering);
ADD_GROUP_PROPERTY_TO_MAP(PROP_ATMOSPHERE_RAYLEIGH_SCATTERING, Atmosphere, atmosphere, RayleighScattering, rayleighScattering);
ADD_GROUP_PROPERTY_TO_MAP(PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS, Atmosphere, atmosphere, ScatteringWavelengths, scatteringWavelengths);
ADD_GROUP_PROPERTY_TO_MAP(PROP_ATMOSPHERE_HAS_STARS, Atmosphere, atmosphere, HasStars, hasStars);
ADD_GROUP_PROPERTY_TO_MAP(PROP_SKYBOX_COLOR, Skybox, skybox, Color, color);
ADD_GROUP_PROPERTY_TO_MAP(PROP_SKYBOX_URL, Skybox, skybox, URL, url);
@ -1110,9 +1084,6 @@ bool EntityItemProperties::encodeEntityEditPacket(PacketType command, EntityItem
APPEND_ENTITY_PROPERTY(PROP_BACKGROUND_MODE, (uint32_t)properties.getBackgroundMode());
_staticAtmosphere.setProperties(properties);
_staticAtmosphere.appendToEditPacket(packetData, requestedProperties, propertyFlags, propertiesDidntFit, propertyCount, appendState);
_staticSkybox.setProperties(properties);
_staticSkybox.appendToEditPacket(packetData, requestedProperties, propertyFlags, propertiesDidntFit, propertyCount, appendState);
}
@ -1397,7 +1368,6 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_SHAPE_TYPE, ShapeType, setShapeType);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_COMPOUND_SHAPE_URL, QString, setCompoundShapeURL);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_BACKGROUND_MODE, BackgroundMode, setBackgroundMode);
properties.getAtmosphere().decodeFromEditPacket(propertyFlags, dataAt , processedBytes);
properties.getSkybox().decodeFromEditPacket(propertyFlags, dataAt , processedBytes);
}
@ -1549,7 +1519,6 @@ void EntityItemProperties::markAllChanged() {
_backgroundModeChanged = true;
_animation.markAllChanged();
_atmosphere.markAllChanged();
_skybox.markAllChanged();
_stage.markAllChanged();
@ -1910,7 +1879,6 @@ QList<QString> EntityItemProperties::listChangedProperties() {
getAnimation().listChangedProperties(out);
getKeyLight().listChangedProperties(out);
getAtmosphere().listChangedProperties(out);
getSkybox().listChangedProperties(out);
getStage().listChangedProperties(out);

View file

@ -30,7 +30,6 @@
#include <ShapeInfo.h>
#include "AnimationPropertyGroup.h"
#include "AtmospherePropertyGroup.h"
#include "EntityItemID.h"
#include "EntityItemPropertiesDefaults.h"
#include "EntityItemPropertiesMacros.h"
@ -171,7 +170,6 @@ public:
DEFINE_PROPERTY_REF(PROP_NAME, Name, name, QString, ENTITY_ITEM_DEFAULT_NAME);
DEFINE_PROPERTY_REF_ENUM(PROP_BACKGROUND_MODE, BackgroundMode, backgroundMode, BackgroundMode, BACKGROUND_MODE_INHERIT);
DEFINE_PROPERTY_GROUP(Stage, stage, StagePropertyGroup);
DEFINE_PROPERTY_GROUP(Atmosphere, atmosphere, AtmospherePropertyGroup);
DEFINE_PROPERTY_GROUP(Skybox, skybox, SkyboxPropertyGroup);
DEFINE_PROPERTY_GROUP(Animation, animation, AnimationPropertyGroup);
DEFINE_PROPERTY_REF(PROP_SOURCE_URL, SourceUrl, sourceUrl, QString, "");
@ -419,7 +417,6 @@ inline QDebug operator<<(QDebug debug, const EntityItemProperties& properties) {
DEBUG_PROPERTY_IF_CHANGED(debug, properties, JointTranslations, jointTranslations, "");
properties.getAnimation().debugDump();
properties.getAtmosphere().debugDump();
properties.getSkybox().debugDump();
properties.getStage().debugDump();

View file

@ -197,13 +197,6 @@ enum EntityPropertyList {
PROP_STAGE_DAY = PROP_LINEAR_ATTENUATION_UNUSED,
PROP_STAGE_HOUR = PROP_QUADRATIC_ATTENUATION_UNUSED,
PROP_STAGE_AUTOMATIC_HOURDAY = PROP_ANIMATION_FRAME_INDEX,
PROP_ATMOSPHERE_CENTER = PROP_MAX_PARTICLES,
PROP_ATMOSPHERE_INNER_RADIUS = PROP_LIFESPAN,
PROP_ATMOSPHERE_OUTER_RADIUS = PROP_EMIT_RATE,
PROP_ATMOSPHERE_MIE_SCATTERING = PROP_EMIT_SPEED,
PROP_ATMOSPHERE_RAYLEIGH_SCATTERING = PROP_EMIT_STRENGTH,
PROP_ATMOSPHERE_SCATTERING_WAVELENGTHS = PROP_EMIT_ACCELERATION,
PROP_ATMOSPHERE_HAS_STARS = PROP_PARTICLE_RADIUS,
PROP_BACKGROUND_MODE = PROP_MODEL_URL,
PROP_SKYBOX_COLOR = PROP_ANIMATION_URL,
PROP_SKYBOX_URL = PROP_ANIMATION_FPS,

View file

@ -218,7 +218,13 @@ bool EntityTree::updateEntityWithElement(EntityItemPointer entity, const EntityI
QString collisionSoundURLBefore = entity->getCollisionSoundURL();
uint32_t preFlags = entity->getDirtyFlags();
UpdateEntityOperator theOperator(getThisPointer(), containingElement, entity, properties.getQueryAACube());
AACube newQueryAACube;
if (properties.queryAACubeChanged()) {
newQueryAACube = properties.getQueryAACube();
} else {
newQueryAACube = entity->getQueryAACube();
}
UpdateEntityOperator theOperator(getThisPointer(), containingElement, entity, newQueryAACube);
recurseTreeWithOperator(&theOperator);
entity->setProperties(properties);

View file

@ -309,29 +309,29 @@ OctreeElement::AppendState EntityTreeElement::appendElementData(OctreePacketData
}
// Now check the size of the entity, it's possible that a "too small to see" entity is included in a
// larger octree cell because of it's position (for example if it crosses the boundary of a cell it
// pops to the next higher cell. So we want to check to see that the entity is large enough to be seen
// larger octree cell because of its position (for example if it crosses the boundary of a cell it
// pops to the next higher cell. So we want to check to see that the entity is large enough to be seen
// before we consider including it.
if (includeThisEntity) {
AABox entityBounds = entity->getAABox(success);
if (success) {
auto renderAccuracy = params.viewFrustum->calculateRenderAccuracy(entityBounds,
params.octreeElementSizeScale, params.boundaryLevelAdjust);
if (!success) {
// if this entity is a child of an avatar, the entity-server wont be able to determine its
// AABox. If this happens, fall back to the queryAACube.
entityBounds = AABox(entityCube);
}
auto renderAccuracy = params.viewFrustum->calculateRenderAccuracy(entityBounds,
params.octreeElementSizeScale,
params.boundaryLevelAdjust);
if (renderAccuracy <= 0.0f) {
includeThisEntity = false; // too small, don't include it
if (renderAccuracy <= 0.0f) {
includeThisEntity = false; // too small, don't include it
#ifdef WANT_LOD_DEBUGGING
qDebug() << "skipping entity - TOO SMALL - \n"
<< "......id:" << entity->getID() << "\n"
<< "....name:" << entity->getName() << "\n"
<< "..bounds:" << entityBounds << "\n"
<< "....cell:" << getAACube();
#endif
}
} else {
includeThisEntity = false; // couldn't get box, don't include it
#ifdef WANT_LOD_DEBUGGING
qDebug() << "skipping entity - TOO SMALL - \n"
<< "......id:" << entity->getID() << "\n"
<< "....name:" << entity->getName() << "\n"
<< "..bounds:" << entityBounds << "\n"
<< "....cell:" << getAACube();
#endif
}
}
}

View file

@ -41,26 +41,6 @@ ZoneEntityItem::ZoneEntityItem(const EntityItemID& entityItemID) : EntityItem(en
_backgroundMode = BACKGROUND_MODE_INHERIT;
}
EnvironmentData ZoneEntityItem::getEnvironmentData() const {
EnvironmentData result;
result.setAtmosphereCenter(_atmosphereProperties.getCenter());
result.setAtmosphereInnerRadius(_atmosphereProperties.getInnerRadius());
result.setAtmosphereOuterRadius(_atmosphereProperties.getOuterRadius());
result.setRayleighScattering(_atmosphereProperties.getRayleighScattering());
result.setMieScattering(_atmosphereProperties.getMieScattering());
result.setScatteringWavelengths(_atmosphereProperties.getScatteringWavelengths());
result.setHasStars(_atmosphereProperties.getHasStars());
// NOTE: The sunLocation and SunBrightness will be overwritten in the EntityTreeRenderer to use the
// keyLight details from the scene interface
//result.setSunLocation(1000, 900, 1000));
//result.setSunBrightness(20.0f);
return result;
}
EntityItemProperties ZoneEntityItem::getProperties(EntityPropertyFlags desiredProperties) const {
EntityItemProperties properties = EntityItem::getProperties(desiredProperties); // get the properties from our base class
@ -73,7 +53,6 @@ EntityItemProperties ZoneEntityItem::getProperties(EntityPropertyFlags desiredPr
COPY_ENTITY_PROPERTY_TO_PROPERTIES(compoundShapeURL, getCompoundShapeURL);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(backgroundMode, getBackgroundMode);
_atmosphereProperties.getProperties(properties);
_skyboxProperties.getProperties(properties);
return properties;
@ -91,10 +70,9 @@ bool ZoneEntityItem::setProperties(const EntityItemProperties& properties) {
SET_ENTITY_PROPERTY_FROM_PROPERTIES(compoundShapeURL, setCompoundShapeURL);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(backgroundMode, setBackgroundMode);
bool somethingChangedInAtmosphere = _atmosphereProperties.setProperties(properties);
bool somethingChangedInSkybox = _skyboxProperties.setProperties(properties);
somethingChanged = somethingChanged || somethingChangedInKeyLight || somethingChangedInStage || somethingChangedInAtmosphere || somethingChangedInSkybox;
somethingChanged = somethingChanged || somethingChangedInKeyLight || somethingChangedInStage || somethingChangedInSkybox;
if (somethingChanged) {
bool wantDebug = false;
@ -133,12 +111,6 @@ int ZoneEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data,
READ_ENTITY_PROPERTY(PROP_COMPOUND_SHAPE_URL, QString, setCompoundShapeURL);
READ_ENTITY_PROPERTY(PROP_BACKGROUND_MODE, BackgroundMode, setBackgroundMode);
int bytesFromAtmosphere = _atmosphereProperties.readEntitySubclassDataFromBuffer(dataAt, (bytesLeftToRead - bytesRead), args,
propertyFlags, overwriteLocalData, somethingChanged);
bytesRead += bytesFromAtmosphere;
dataAt += bytesFromAtmosphere;
int bytesFromSkybox = _skyboxProperties.readEntitySubclassDataFromBuffer(dataAt, (bytesLeftToRead - bytesRead), args,
propertyFlags, overwriteLocalData, somethingChanged);
bytesRead += bytesFromSkybox;
@ -158,7 +130,6 @@ EntityPropertyFlags ZoneEntityItem::getEntityProperties(EncodeBitstreamParams& p
requestedProperties += PROP_COMPOUND_SHAPE_URL;
requestedProperties += PROP_BACKGROUND_MODE;
requestedProperties += _stageProperties.getEntityProperties(params);
requestedProperties += _atmosphereProperties.getEntityProperties(params);
requestedProperties += _skyboxProperties.getEntityProperties(params);
return requestedProperties;
@ -185,9 +156,6 @@ void ZoneEntityItem::appendSubclassData(OctreePacketData* packetData, EncodeBits
APPEND_ENTITY_PROPERTY(PROP_COMPOUND_SHAPE_URL, getCompoundShapeURL());
APPEND_ENTITY_PROPERTY(PROP_BACKGROUND_MODE, (uint32_t)getBackgroundMode()); // could this be a uint16??
_atmosphereProperties.appendSubclassData(packetData, params, modelTreeElementExtraEncodeData, requestedProperties,
propertyFlags, propertiesDidntFit, propertyCount, appendState);
_skyboxProperties.appendSubclassData(packetData, params, modelTreeElementExtraEncodeData, requestedProperties,
propertyFlags, propertiesDidntFit, propertyCount, appendState);
@ -203,7 +171,6 @@ void ZoneEntityItem::debugDump() const {
_keyLightProperties.debugDump();
_stageProperties.debugDump();
_atmosphereProperties.debugDump();
_skyboxProperties.debugDump();
}

View file

@ -12,10 +12,7 @@
#ifndef hifi_ZoneEntityItem_h
#define hifi_ZoneEntityItem_h
#include <EnvironmentData.h>
#include "KeyLightPropertyGroup.h"
#include "AtmospherePropertyGroup.h"
#include "EntityItem.h"
#include "EntityTree.h"
#include "SkyboxPropertyGroup.h"
@ -70,8 +67,6 @@ public:
void setBackgroundMode(BackgroundMode value) { _backgroundMode = value; }
BackgroundMode getBackgroundMode() const { return _backgroundMode; }
EnvironmentData getEnvironmentData() const;
const AtmospherePropertyGroup& getAtmosphereProperties() const { return _atmosphereProperties; }
const SkyboxPropertyGroup& getSkyboxProperties() const { return _skyboxProperties; }
const StagePropertyGroup& getStageProperties() const { return _stageProperties; }
@ -95,7 +90,6 @@ protected:
BackgroundMode _backgroundMode = BACKGROUND_MODE_INHERIT;
StagePropertyGroup _stageProperties;
AtmospherePropertyGroup _atmosphereProperties;
SkyboxPropertyGroup _skyboxProperties;
static bool _drawZoneBoundaries;

View file

@ -1,3 +0,0 @@
set(TARGET_NAME environment)
setup_hifi_library()
link_hifi_libraries(shared networking)

View file

@ -1,118 +0,0 @@
//
// EnvironmentData.cpp
// libraries/environment/src
//
// Created by Andrzej Kapolka on 5/6/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <cstring>
#include "EnvironmentData.h"
// initial values from Sean O'Neil's GPU Gems entry (http://http.developer.nvidia.com/GPUGems2/gpugems2_chapter16.html),
// GameEngine.cpp
EnvironmentData::EnvironmentData(int id) :
_id(id),
_flat(true),
_gravity(0.0f),
_atmosphereCenter(0, -1000, 0),
_atmosphereInnerRadius(1000.0),
_atmosphereOuterRadius(1025.0),
_rayleighScattering(0.0025f),
_mieScattering(0.0010f),
_scatteringWavelengths(0.650f, 0.570f, 0.475f),
_sunLocation(1000, 900, 1000),
_sunBrightness(20.0f),
_hasStars(true) {
}
glm::vec3 EnvironmentData::getAtmosphereCenter(const glm::vec3& cameraPosition) const {
return _atmosphereCenter + (_flat ? glm::vec3(cameraPosition.x, 0.0f, cameraPosition.z) : glm::vec3());
}
glm::vec3 EnvironmentData::getSunLocation(const glm::vec3& cameraPosition) const {
return _sunLocation;
}
size_t EnvironmentData::getBroadcastData(unsigned char* destinationBuffer) const {
unsigned char* bufferStart = destinationBuffer;
memcpy(destinationBuffer, &_id, sizeof(_id));
destinationBuffer += sizeof(_id);
memcpy(destinationBuffer, &_flat, sizeof(_flat));
destinationBuffer += sizeof(_flat);
memcpy(destinationBuffer, &_gravity, sizeof(_gravity));
destinationBuffer += sizeof(_gravity);
memcpy(destinationBuffer, &_atmosphereCenter, sizeof(_atmosphereCenter));
destinationBuffer += sizeof(_atmosphereCenter);
memcpy(destinationBuffer, &_atmosphereInnerRadius, sizeof(_atmosphereInnerRadius));
destinationBuffer += sizeof(_atmosphereInnerRadius);
memcpy(destinationBuffer, &_atmosphereOuterRadius, sizeof(_atmosphereOuterRadius));
destinationBuffer += sizeof(_atmosphereOuterRadius);
memcpy(destinationBuffer, &_rayleighScattering, sizeof(_rayleighScattering));
destinationBuffer += sizeof(_rayleighScattering);
memcpy(destinationBuffer, &_mieScattering, sizeof(_mieScattering));
destinationBuffer += sizeof(_mieScattering);
memcpy(destinationBuffer, &_scatteringWavelengths, sizeof(_scatteringWavelengths));
destinationBuffer += sizeof(_scatteringWavelengths);
memcpy(destinationBuffer, &_sunLocation, sizeof(_sunLocation));
destinationBuffer += sizeof(_sunLocation);
memcpy(destinationBuffer, &_sunBrightness, sizeof(_sunBrightness));
destinationBuffer += sizeof(_sunBrightness);
return destinationBuffer - bufferStart;
}
size_t EnvironmentData::parseData(const unsigned char* sourceBuffer, int numBytes) {
const unsigned char* startPosition = sourceBuffer;
memcpy(&_id, sourceBuffer, sizeof(_id));
sourceBuffer += sizeof(_id);
memcpy(&_flat, sourceBuffer, sizeof(_flat));
sourceBuffer += sizeof(_flat);
memcpy(&_gravity, sourceBuffer, sizeof(_gravity));
sourceBuffer += sizeof(_gravity);
memcpy(&_atmosphereCenter, sourceBuffer, sizeof(_atmosphereCenter));
sourceBuffer += sizeof(_atmosphereCenter);
memcpy(&_atmosphereInnerRadius, sourceBuffer, sizeof(_atmosphereInnerRadius));
sourceBuffer += sizeof(_atmosphereInnerRadius);
memcpy(&_atmosphereOuterRadius, sourceBuffer, sizeof(_atmosphereOuterRadius));
sourceBuffer += sizeof(_atmosphereOuterRadius);
memcpy(&_rayleighScattering, sourceBuffer, sizeof(_rayleighScattering));
sourceBuffer += sizeof(_rayleighScattering);
memcpy(&_mieScattering, sourceBuffer, sizeof(_mieScattering));
sourceBuffer += sizeof(_mieScattering);
memcpy(&_scatteringWavelengths, sourceBuffer, sizeof(_scatteringWavelengths));
sourceBuffer += sizeof(_scatteringWavelengths);
memcpy(&_sunLocation, sourceBuffer, sizeof(_sunLocation));
sourceBuffer += sizeof(_sunLocation);
memcpy(&_sunBrightness, sourceBuffer, sizeof(_sunBrightness));
sourceBuffer += sizeof(_sunBrightness);
return sourceBuffer - startPosition;
}

View file

@ -1,83 +0,0 @@
//
// EnvironmentData.h
// libraries/environment/src
//
// Created by Andrzej Kapolka on 5/6/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_EnvironmentData_h
#define hifi_EnvironmentData_h
#include <glm/glm.hpp>
class EnvironmentData {
public:
EnvironmentData(int id = 0);
void setID(int id) { _id = id; }
int getID() const { return _id; }
void setFlat(bool flat) { _flat = flat; }
bool isFlat() const { return _flat; }
void setGravity(float gravity) { _gravity = gravity; }
float getGravity() const { return _gravity; }
void setHasStars(bool value) { _hasStars = value; }
bool getHasStars() const { return _hasStars; }
void setAtmosphereCenter(const glm::vec3& center) { _atmosphereCenter = center; }
void setAtmosphereInnerRadius(float radius) { _atmosphereInnerRadius = radius; }
void setAtmosphereOuterRadius(float radius) { _atmosphereOuterRadius = radius; }
const glm::vec3& getAtmosphereCenter() const { return _atmosphereCenter; }
float getAtmosphereInnerRadius() const { return _atmosphereInnerRadius; }
float getAtmosphereOuterRadius() const { return _atmosphereOuterRadius; }
void setRayleighScattering(float scattering) { _rayleighScattering = scattering; }
void setMieScattering(float scattering) { _mieScattering = scattering; }
float getRayleighScattering() const { return _rayleighScattering; }
float getMieScattering() const { return _mieScattering; }
void setScatteringWavelengths(const glm::vec3& wavelengths) { _scatteringWavelengths = wavelengths; }
const glm::vec3& getScatteringWavelengths() const { return _scatteringWavelengths; }
void setSunLocation(const glm::vec3& location) { _sunLocation = location; }
void setSunBrightness(float brightness) { _sunBrightness = brightness; }
const glm::vec3& getSunLocation() const { return _sunLocation; }
float getSunBrightness() const { return _sunBrightness; }
glm::vec3 getAtmosphereCenter(const glm::vec3& cameraPosition) const;
glm::vec3 getSunLocation(const glm::vec3& cameraPosition) const;
size_t getBroadcastData(unsigned char* destinationBuffer) const;
size_t parseData(const unsigned char* sourceBuffer, int numBytes);
private:
int _id;
bool _flat;
float _gravity;
glm::vec3 _atmosphereCenter;
float _atmosphereInnerRadius;
float _atmosphereOuterRadius;
float _rayleighScattering;
float _mieScattering;
glm::vec3 _scatteringWavelengths;
glm::vec3 _sunLocation;
float _sunBrightness;
bool _hasStars;
};
#endif // hifi_EnvironmentData_h

View file

@ -98,7 +98,11 @@ void GLBackend::do_getQuery(Batch& batch, size_t paramOffset) {
glGetQueryObjectui64vEXT(glquery->_qo, GL_QUERY_RESULT, &glquery->_result);
#endif
#else
glGetQueryObjectui64v(glquery->_qo, GL_QUERY_RESULT, &glquery->_result);
glGetQueryObjectui64v(glquery->_qo, GL_QUERY_RESULT_AVAILABLE, &glquery->_result);
if (glquery->_result == GL_TRUE) {
glGetQueryObjectui64v(glquery->_qo, GL_QUERY_RESULT, &glquery->_result);
query->triggerReturnHandler(glquery->_result);
}
#endif
(void)CHECK_GL_ERROR();
}

View file

@ -146,6 +146,68 @@ public:
case gpu::RGB:
case gpu::RGBA:
texel.internalFormat = GL_RED;
switch (dstFormat.getType()) {
case gpu::UINT32: {
texel.internalFormat = GL_R32UI;
break;
}
case gpu::INT32: {
texel.internalFormat = GL_R32I;
break;
}
case gpu::NUINT32: {
texel.internalFormat = GL_RED;
break;
}
case gpu::NINT32: {
texel.internalFormat = GL_RED_SNORM;
break;
}
case gpu::FLOAT: {
texel.internalFormat = GL_R32F;
break;
}
case gpu::UINT16: {
texel.internalFormat = GL_R16UI;
break;
}
case gpu::INT16: {
texel.internalFormat = GL_R16I;
break;
}
case gpu::NUINT16: {
texel.internalFormat = GL_R16;
break;
}
case gpu::NINT16: {
texel.internalFormat = GL_R16_SNORM;
break;
}
case gpu::HALF: {
texel.internalFormat = GL_R16F;
break;
}
case gpu::UINT8: {
texel.internalFormat = GL_R8UI;
break;
}
case gpu::INT8: {
texel.internalFormat = GL_R8I;
break;
}
case gpu::NUINT8: {
texel.internalFormat = GL_R8;
break;
}
case gpu::NINT8: {
texel.internalFormat = GL_R8_SNORM;
break;
}
case gpu::NUM_TYPES: { // quiet compiler
Q_UNREACHABLE();
}
}
break;
case gpu::DEPTH:
texel.format = GL_DEPTH_COMPONENT; // It's depth component to load it

View file

@ -10,11 +10,13 @@
//
#include "Query.h"
#include <QDebug>
#include "GPULogging.h"
#include "Batch.h"
using namespace gpu;
Query::Query()
Query::Query(const Handler& returnHandler) :
_returnHandler(returnHandler)
{
}
@ -22,6 +24,48 @@ Query::~Query()
{
}
double Query::getElapsedTime() {
return 0.0;
double Query::getElapsedTime() const {
return ((double) _queryResult) * 0.000001;
}
void Query::triggerReturnHandler(uint64_t queryResult) {
_queryResult = queryResult;
if (_returnHandler) {
_returnHandler(*this);
}
}
RangeTimer::RangeTimer() {
for (int i = 0; i < QUERY_QUEUE_SIZE; i++) {
_timerQueries.push_back(std::make_shared<gpu::Query>([&, i] (const Query& query) {
_tailIndex ++;
auto elapsedTime = query.getElapsedTime();
_movingAverage.addSample(elapsedTime);
}));
}
}
void RangeTimer::begin(gpu::Batch& batch) {
_headIndex++;
batch.beginQuery(_timerQueries[rangeIndex(_headIndex)]);
}
void RangeTimer::end(gpu::Batch& batch) {
if (_headIndex < 0) {
return;
}
batch.endQuery(_timerQueries[rangeIndex(_headIndex)]);
if (_tailIndex < 0) {
_tailIndex = _headIndex;
}
// Pull the previous tail query hopping to see it return
if (_tailIndex != _headIndex) {
batch.getQuery(_timerQueries[rangeIndex(_tailIndex)]);
}
}
double RangeTimer::getAverage() const {
return _movingAverage.average;
}

View file

@ -13,26 +13,59 @@
#include <assert.h>
#include <memory>
#include <functional>
#include <vector>
#include <SimpleMovingAverage.h>
#include "Format.h"
namespace gpu {
class Batch;
class Query {
public:
Query();
using Handler = std::function<void(const Query&)>;
Query(const Handler& returnHandler);
~Query();
uint32 queryResult;
double getElapsedTime();
double getElapsedTime() const;
const GPUObjectPointer gpuObject {};
void triggerReturnHandler(uint64_t queryResult);
protected:
Handler _returnHandler;
uint64_t _queryResult = 0;
};
typedef std::shared_ptr<Query> QueryPointer;
typedef std::vector< QueryPointer > Queries;
// gpu RangeTimer is just returning an estimate of the time taken by a chunck of work delimited by the
// begin and end calls repeated for several times.
// The result is always a late average of the time spent for that same task a few cycles ago.
class RangeTimer {
public:
RangeTimer();
void begin(gpu::Batch& batch);
void end(gpu::Batch& batch);
double getAverage() const;
protected:
static const int QUERY_QUEUE_SIZE { 4 };
gpu::Queries _timerQueries;
int _headIndex = -1;
int _tailIndex = -1;
MovingAverage<double, QUERY_QUEUE_SIZE * 2> _movingAverage;
int rangeIndex(int index) const { return (index % QUERY_QUEUE_SIZE); }
};
};
#endif

View file

@ -1,245 +0,0 @@
<!
// Atmospheric.slh
//
// Created by Sam Gateau on 3/9/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
!>
<@if not MODEL_ATMOSPHERE_SLH@>
<@def MODEL_ATMOSPHERE_SLH@>
<!
// Code is a modified version of:
// http://http.developer.nvidia.com/GPUGems/gpugems_app01.html
// Atmospheric scattering fragment shader
//
// Author: Sean O'Neil
//
// Copyright (c) 2004 Sean O'Neil
//
// For licensing information, see http://http.developer.nvidia.com/GPUGems/gpugems_app01.html:
//
// NVIDIA Statement on the Software
//
// The source code provided is freely distributable, so long as the NVIDIA header remains unaltered and user modifications are
// detailed.
//
// No Warranty
//
// THE SOFTWARE AND ANY OTHER MATERIALS PROVIDED BY NVIDIA ON THE ENCLOSED CD-ROM ARE PROVIDED "AS IS." NVIDIA DISCLAIMS ALL
// WARRANTIES, EXPRESS, IMPLIED OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
//
// Limitation of Liability
//
// NVIDIA SHALL NOT BE LIABLE TO ANY USER, DEVELOPER, DEVELOPER'S CUSTOMERS, OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH OR
// UNDER DEVELOPER FOR ANY LOSS OF PROFITS, INCOME, SAVINGS, OR ANY OTHER CONSEQUENTIAL, INCIDENTAL, SPECIAL, PUNITIVE, DIRECT
// OR INDIRECT DAMAGES (WHETHER IN AN ACTION IN CONTRACT, TORT OR BASED ON A WARRANTY), EVEN IF NVIDIA HAS BEEN ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGES. THESE LIMITATIONS SHALL APPLY NOTWITHSTANDING ANY FAILURE OF THE ESSENTIAL PURPOSE OF ANY
// LIMITED REMEDY. IN NO EVENT SHALL NVIDIA'S AGGREGATE LIABILITY TO DEVELOPER OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH
// OR UNDER DEVELOPER EXCEED THE AMOUNT OF MONEY ACTUALLY PAID BY DEVELOPER TO NVIDIA FOR THE SOFTWARE OR ANY OTHER MATERIALS.
//
!>
struct Atmosphere {
vec4 _invWaveLength;
vec4 _radiuses;
vec4 _scales;
vec4 _scatterings;
vec4 _control;
};
const int numSamples = 2;
vec3 getAtmosphereInvWaveLength(Atmosphere a) { return a._invWaveLength.xyz; } // 1 / pow(wavelength, 4) for the red, green, and blue channels
float getAtmosphereInnerRadius(Atmosphere a) { return a._radiuses.x; } // The inner (planetary) radius
float getAtmosphereOuterRadius(Atmosphere a) { return a._radiuses.y; } // The outer (atmosphere) radius
float getAtmosphereScale(Atmosphere a) { return a._scales.x; } // 1 / (outerRadius - innerRadius)
float getAtmosphereScaleDepth(Atmosphere a) { return a._scales.y; } // The scale depth (i.e. the altitude at which the atmosphere's average density is found)
float getAtmosphereScaleOverScaleDepth(Atmosphere a) { return a._scales.z; } // scale / scaleDepth
vec4 getAtmosphereScattering(Atmosphere a) { return a._scatterings; } // The full Mie and Rayleigh scattering coefficients
float getAtmosphereKrESun(Atmosphere a) { return a._scatterings.x; } // Kr * ESun
float getAtmosphereKmESun(Atmosphere a) { return a._scatterings.y; } // Km * ESun
float getAtmosphereKr4PI(Atmosphere a) { return a._scatterings.z; } // Kr * 4 * PI
float getAtmosphereKm4PI(Atmosphere a) { return a._scatterings.w; } // Km * 4 * PI
float getAtmosphereNumSamples(Atmosphere a) { return a._control.x; } // numSamples
vec2 getAtmosphereGAndG2(Atmosphere a) { return a._control.yz; } // g and g2
float atmosphereScale(float scaleDepth, float fCos)
{
float x = 1.0 - fCos;
return scaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
vec4 evalAtmosphereContribution(Atmosphere atmospheric, vec3 position, vec3 cameraPos, vec3 lightPos) {
float fInnerRadius = getAtmosphereInnerRadius(atmospheric);
float fSamples = getAtmosphereNumSamples(atmospheric);
vec3 v3InvWavelength = getAtmosphereInvWaveLength(atmospheric);
vec4 scatteringCoefs = getAtmosphereScattering(atmospheric);
float fKrESun = scatteringCoefs.x;
float fKmESun = scatteringCoefs.y;
float fKr4PI = scatteringCoefs.z;
float fKm4PI = scatteringCoefs.w;
vec2 gAndg2 = getAtmosphereGAndG2(atmospheric);
float g = gAndg2.x;
float g2 = gAndg2.y;
float fScale = getAtmosphereScale(atmospheric);
float fScaleDepth = getAtmosphereScaleDepth(atmospheric);
float fScaleOverScaleDepth = getAtmosphereScaleOverScaleDepth(atmospheric);
// Get the ray from the camera to the vertex, and its length (which is the far point of the ray passing through the atmosphere)
vec3 v3Pos = position;
vec3 v3Ray = v3Pos - cameraPos;
float fFar = length(v3Ray);
v3Ray /= fFar;
// Calculate the ray's starting position, then calculate its scattering offset
vec3 v3Start = cameraPos;
float fHeight = length(v3Start);
float fDepthStart = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fStartAngle = dot(v3Ray, v3Start) / fHeight;
float fStartOffset = fDepthStart * atmosphereScale(fScaleDepth, fStartAngle);
// Initialize the scattering loop variables
//gl_FrontColor = vec4(0.0, 0.0, 0.0, 0.0);
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
vec3 v3SampleRay = v3Ray * fSampleLength;
vec3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
vec3 v3FrontColor = vec3(0.0, 0.0, 0.0);
// int nSamples = numSamples;
int nSamples = int(fSamples);
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fLightAngle = dot(lightPos, v3SamplePoint) / fHeight;
float fCameraAngle = dot((v3Ray), v3SamplePoint) / fHeight * 0.99;
float fScatter = (fStartOffset + fDepth * (atmosphereScale(fScaleDepth, fLightAngle) - atmosphereScale(fScaleDepth, fCameraAngle)));
vec3 v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
// Finally, scale the Mie and Rayleigh colors and set up the varying variables for the pixel shader
vec3 secondaryFrontColor = v3FrontColor * fKmESun;
vec3 frontColor = v3FrontColor * (v3InvWavelength * fKrESun);
vec3 v3Direction = cameraPos - v3Pos;
float fCos = dot(lightPos, v3Direction) / length(v3Direction);
float fMiePhase = 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos*fCos) / pow(1.0 + g2 - 2.0*g*fCos, 1.5);
vec4 finalColor;
finalColor.rgb = frontColor.rgb + fMiePhase * secondaryFrontColor.rgb;
finalColor.a = finalColor.b;
finalColor.rgb = pow(finalColor.rgb, vec3(1.0/2.2));
return finalColor;
}
<@if GLPROFILE == PC_GL@>
uniform atmosphereBuffer {
Atmosphere _atmosphere;
};
Atmosphere getAtmosphere() {
return _atmosphere;
}
<@else@>
uniform vec4 atmosphereBuffer[9];
Atmosphere getAtmosphere() {
Atmosphere atmosphere;
atmosphere._invWaveLength = atmosphereBuffer[0];
atmosphere._radiuses = atmosphereBuffer[1];
atmosphere._scales = atmosphereBuffer[2];
atmosphere._scatterings = atmosphereBuffer[3];
atmosphere._control = atmosphereBuffer[4];
return atmosphere;
}
<@endif@>
<!
/*
// uniform vec3 v3CameraPos; // The camera's current position
const int nSamples = 2;
const float fSamples = 2.0;
uniform vec3 v3LightPos;
uniform float g;
uniform float g2;
varying vec3 position;
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
void main (void)
{
// Get the ray from the camera to the vertex, and its length (which is the far point of the ray passing through the atmosphere)
vec3 v3Pos = position;
vec3 v3Ray = v3Pos - v3CameraPos;
float fFar = length(v3Ray);
v3Ray /= fFar;
// Calculate the ray's starting position, then calculate its scattering offset
vec3 v3Start = v3CameraPos;
float fHeight = length(v3Start);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fStartAngle = dot(v3Ray, v3Start) / fHeight;
float fStartOffset = fDepth * scale(fStartAngle);
// Initialize the scattering loop variables
//gl_FrontColor = vec4(0.0, 0.0, 0.0, 0.0);
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
vec3 v3SampleRay = v3Ray * fSampleLength;
vec3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
vec3 v3FrontColor = vec3(0.0, 0.0, 0.0);
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fLightAngle = dot(v3LightPos, v3SamplePoint) / fHeight;
float fCameraAngle = dot((v3Ray), v3SamplePoint) / fHeight * 0.99;
float fScatter = (fStartOffset + fDepth * (scale(fLightAngle) - scale(fCameraAngle)));
vec3 v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
// Finally, scale the Mie and Rayleigh colors and set up the varying variables for the pixel shader
vec3 secondaryFrontColor = v3FrontColor * fKmESun;
vec3 frontColor = v3FrontColor * (v3InvWavelength * fKrESun);
vec3 v3Direction = v3CameraPos - v3Pos;
float fCos = dot(v3LightPos, v3Direction) / length(v3Direction);
float fMiePhase = 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos*fCos) / pow(1.0 + g2 - 2.0*g*fCos, 1.5);
outFragColor.rgb = frontColor.rgb + fMiePhase * secondaryFrontColor.rgb;
outFragColor.a = outFragColor.b;
outFragColor.rgb = pow(outFragColor.rgb, vec3(1.0/2.2));
}
*/
!>
<@endif@>

View file

@ -133,56 +133,6 @@ void EarthSunModel::setSunLongitude(float lon) {
invalidate();
}
Atmosphere::Atmosphere() {
// only if created from nothing shall we create the Buffer to store the properties
Data data;
_dataBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Data), (const gpu::Byte*) &data));
setScatteringWavelength(_scatteringWavelength);
setRayleighScattering(_rayleighScattering);
setInnerOuterRadiuses(getInnerRadius(), getOuterRadius());
}
void Atmosphere::setScatteringWavelength(Vec3 wavelength) {
_scatteringWavelength = wavelength;
Data& data = editData();
data._invWaveLength = Vec4(1.0f / powf(wavelength.x, 4.0f), 1.0f / powf(wavelength.y, 4.0f), 1.0f / powf(wavelength.z, 4.0f), 0.0f);
}
void Atmosphere::setRayleighScattering(float scattering) {
_rayleighScattering = scattering;
updateScattering();
}
void Atmosphere::setMieScattering(float scattering) {
_mieScattering = scattering;
updateScattering();
}
void Atmosphere::setSunBrightness(float brightness) {
_sunBrightness = brightness;
updateScattering();
}
void Atmosphere::updateScattering() {
Data& data = editData();
data._scatterings.x = getRayleighScattering() * getSunBrightness();
data._scatterings.y = getMieScattering() * getSunBrightness();
data._scatterings.z = getRayleighScattering() * 4.0f * glm::pi<float>();
data._scatterings.w = getMieScattering() * 4.0f * glm::pi<float>();
}
void Atmosphere::setInnerOuterRadiuses(float inner, float outer) {
Data& data = editData();
data._radiuses.x = inner;
data._radiuses.y = outer;
data._scales.x = 1.0f / (outer - inner);
data._scales.z = data._scales.x / data._scales.y;
}
const int NUM_DAYS_PER_YEAR = 365;
const float NUM_HOURS_PER_DAY = 24.0f;
const float NUM_HOURS_PER_HALF_DAY = NUM_HOURS_PER_DAY * 0.5f;

View file

@ -107,60 +107,6 @@ protected:
static Mat4d evalWorldToGeoLocationMat(double longitude, double latitude, double altitude, double scale);
};
class Atmosphere {
public:
Atmosphere();
Atmosphere(const Atmosphere& atmosphere);
Atmosphere& operator= (const Atmosphere& atmosphere);
virtual ~Atmosphere() {};
void setScatteringWavelength(Vec3 wavelength);
const Vec3& getScatteringWavelength() const { return _scatteringWavelength; }
void setRayleighScattering(float scattering);
float getRayleighScattering() const { return _rayleighScattering; }
void setMieScattering(float scattering);
float getMieScattering() const { return _mieScattering; }
void setSunBrightness(float brightness);
float getSunBrightness() const { return _sunBrightness; }
void setInnerOuterRadiuses(float inner, float outer);
float getInnerRadius() const { return getData()._radiuses.x; }
float getOuterRadius() const { return getData()._radiuses.y; }
// Data to access the attribute values of the atmosphere
class Data {
public:
Vec4 _invWaveLength = Vec4(0.0f);
Vec4 _radiuses = Vec4(6000.0f, 6025.0f, 0.0f, 0.0f);
Vec4 _scales = Vec4(0.0f, 0.25f, 0.0f, 0.0f);
Vec4 _scatterings = Vec4(0.0f);
Vec4 _control = Vec4(2.0f, -0.990f, -0.990f*-0.990f, 0.f);
Data() {}
};
const UniformBufferView& getDataBuffer() const { return _dataBuffer; }
protected:
UniformBufferView _dataBuffer;
Vec3 _scatteringWavelength = Vec3(0.650f, 0.570f, 0.475f);
float _rayleighScattering = 0.0025f;
float _mieScattering = 0.0010f;
float _sunBrightness = 20.0f;
const Data& getData() const { return _dataBuffer.get<Data>(); }
Data& editData() { return _dataBuffer.edit<Data>(); }
void updateScattering();
};
typedef std::shared_ptr< Atmosphere > AtmospherePointer;
// Sun sky stage generates the rendering primitives to display a scene realistically
// at the specified location and time around earth
class SunSkyStage {
@ -209,7 +155,6 @@ public:
const Vec3& getSunDirection() const { return getSunLight()->getDirection(); }
LightPointer getSunLight() const { valid(); return _sunLight; }
AtmospherePointer getAtmosphere() const { valid(); return _atmosphere; }
enum BackgroundMode {
NO_BACKGROUND = 0,
@ -229,7 +174,6 @@ protected:
BackgroundMode _backgroundMode = SKY_BOX;
LightPointer _sunLight;
AtmospherePointer _atmosphere;
mutable SkyboxPointer _skybox;
float _dayTime = 12.0f;

View file

@ -149,8 +149,15 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
// check if it is a network address first
if (handleNetworkAddress(lookupUrl.host()
+ (lookupUrl.port() == -1 ? "" : ":" + QString::number(lookupUrl.port())), trigger)) {
// if we were not passed a path, use the index path
auto path = lookupUrl.path();
if (path.isEmpty()) {
path = INDEX_PATH;
}
// we may have a path that defines a relative viewpoint - if so we should jump to that now
handlePath(lookupUrl.path(), trigger);
handlePath(path, trigger);
} else if (handleDomainID(lookupUrl.host())){
// no place name - this is probably a domain ID
// try to look up the domain ID on the metaverse API

View file

@ -60,7 +60,7 @@ NodeList::NodeList(char newOwnerType, unsigned short socketListenPort, unsigned
// in case we don't know how to talk to DS when a path change is requested
// fire off any pending DS path query when we get socket information
connect(&_domainHandler, &DomainHandler::completedSocketDiscovery, this, &NodeList::sendPendingDSPathQuery);
connect(&_domainHandler, &DomainHandler::connectedToDomain, this, &NodeList::sendPendingDSPathQuery);
// send a domain server check in immediately once the DS socket is known
connect(&_domainHandler, &DomainHandler::completedSocketDiscovery, this, &NodeList::sendDomainServerCheckIn);

View file

@ -41,7 +41,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::EntityAdd:
case PacketType::EntityEdit:
case PacketType::EntityData:
return VERSION_ENTITITES_HAVE_COLLISION_MASK;
return VERSION_ATMOSPHERE_REMOVED;
case PacketType::AvatarData:
case PacketType::BulkAvatarData:
return static_cast<PacketVersion>(AvatarMixerPacketVersion::SoftAttachmentSupport);

View file

@ -165,6 +165,7 @@ const PacketVersion VERSION_ENTITIES_REMOVED_START_AUTOMATICALLY_FROM_ANIMATION_
const PacketVersion VERSION_MODEL_ENTITIES_JOINTS_ON_WIRE = 53;
const PacketVersion VERSION_ENTITITES_HAVE_QUERY_BOX = 54;
const PacketVersion VERSION_ENTITITES_HAVE_COLLISION_MASK = 55;
const PacketVersion VERSION_ATMOSPHERE_REMOVED = 56;
enum class AvatarMixerPacketVersion : PacketVersion {
TranslationSupport = 17,

View file

@ -82,7 +82,7 @@ EntityMotionState::~EntityMotionState() {
void EntityMotionState::updateServerPhysicsVariables() {
assert(entityTreeIsLocked());
if (_entity->getSimulatorID() == PhysicsEngine::getSessionID()) {
if (_entity->getSimulatorID() == Physics::getSessionUUID()) {
// don't slam these values if we are the simulation owner
return;
}
@ -113,7 +113,7 @@ bool EntityMotionState::handleEasyChanges(uint32_t& flags) {
_outgoingPriority = NO_PRORITY;
} else {
_nextOwnershipBid = usecTimestampNow() + USECS_BETWEEN_OWNERSHIP_BIDS;
if (PhysicsEngine::getSessionID() == _entity->getSimulatorID() || _entity->getSimulationPriority() >= _outgoingPriority) {
if (Physics::getSessionUUID() == _entity->getSimulatorID() || _entity->getSimulationPriority() >= _outgoingPriority) {
// we own the simulation or our priority looses to (or ties with) remote
_outgoingPriority = NO_PRORITY;
}
@ -527,7 +527,7 @@ uint32_t EntityMotionState::getIncomingDirtyFlags() {
if (dirtyFlags | Simulation::DIRTY_SIMULATOR_ID) {
// when SIMULATOR_ID changes we must check for reinterpretation of asymmetric collision mask
// bits for the avatar groups (e.g. MY_AVATAR vs OTHER_AVATAR)
uint8_t entityCollisionMask = _entity->getCollisionMask();
uint8_t entityCollisionMask = _entity->getCollisionless() ? 0 : _entity->getCollisionMask();
if ((bool)(entityCollisionMask & USER_COLLISION_GROUP_MY_AVATAR) !=
(bool)(entityCollisionMask & USER_COLLISION_GROUP_OTHER_AVATAR)) {
// bits are asymmetric --> flag for reinsertion in physics simulation
@ -622,39 +622,8 @@ QString EntityMotionState::getName() const {
// virtual
void EntityMotionState::computeCollisionGroupAndMask(int16_t& group, int16_t& mask) const {
group = BULLET_COLLISION_GROUP_STATIC;
if (_entity) {
if (_entity->getCollisionless()) {
group = BULLET_COLLISION_GROUP_COLLISIONLESS;
}
switch (computePhysicsMotionType()){
case MOTION_TYPE_STATIC:
group = BULLET_COLLISION_GROUP_STATIC;
break;
case MOTION_TYPE_DYNAMIC:
group = BULLET_COLLISION_GROUP_DYNAMIC;
break;
case MOTION_TYPE_KINEMATIC:
group = BULLET_COLLISION_GROUP_KINEMATIC;
break;
default:
break;
}
}
mask = PhysicsEngine::getCollisionMask(group);
if (_entity) {
uint8_t entityCollisionMask = _entity->getFinalCollisionMask();
if ((bool)(entityCollisionMask & USER_COLLISION_GROUP_MY_AVATAR) !=
(bool)(entityCollisionMask & USER_COLLISION_GROUP_OTHER_AVATAR)) {
// asymmetric avatar collision mask bits
if (!_entity->getSimulatorID().isNull() && _entity->getSimulatorID() != PhysicsEngine::getSessionID()) {
// someone else owns the simulation, so we swap the interpretation of the bits
entityCollisionMask ^= USER_COLLISION_MASK_AVATARS | ~entityCollisionMask;
}
}
mask &= (int16_t)(entityCollisionMask);
}
assert(_entity);
_entity->computeCollisionGroupAndFinalMask(group, mask);
}
void EntityMotionState::setOutgoingPriority(quint8 priority) {

View file

@ -142,6 +142,7 @@ void PhysicalEntitySimulation::clearEntitiesInternal() {
// finally clear all lists maintained by this class
_physicalObjects.clear();
_entitiesToRemoveFromPhysics.clear();
_entitiesToRelease.clear();
_entitiesToAddToPhysics.clear();
_pendingChanges.clear();
_outgoingChanges.clear();
@ -157,6 +158,7 @@ void PhysicalEntitySimulation::prepareEntityForDelete(EntityItemPointer entity)
// end EntitySimulation overrides
void PhysicalEntitySimulation::getObjectsToRemoveFromPhysics(VectorOfMotionStates& result) {
_entitiesToRelease.clear();
result.clear();
QMutexLocker lock(&_mutex);
for (auto entity: _entitiesToRemoveFromPhysics) {
@ -171,7 +173,7 @@ void PhysicalEntitySimulation::getObjectsToRemoveFromPhysics(VectorOfMotionState
_entitiesToDelete.insert(entity);
}
}
_entitiesToRemoveFromPhysics.clear();
_entitiesToRemoveFromPhysics.swap(_entitiesToRelease);
}
void PhysicalEntitySimulation::getObjectsToAddToPhysics(VectorOfMotionStates& result) {

View file

@ -60,6 +60,7 @@ public:
private:
SetOfEntities _entitiesToRemoveFromPhysics;
SetOfEntities _entitiesToRelease;
SetOfEntities _entitiesToAddToPhysics;
SetOfEntityMotionStates _pendingChanges; // EntityMotionStates already in PhysicsEngine that need their physics changed
@ -70,7 +71,7 @@ private:
PhysicsEnginePointer _physicsEngine = nullptr;
EntityEditPacketSender* _entityPacketSender = nullptr;
uint32_t _lastStepSendPackets = 0;
uint32_t _lastStepSendPackets { 0 };
};

View file

@ -18,47 +18,10 @@
#include "ThreadSafeDynamicsWorld.h"
#include "PhysicsLogging.h"
uint32_t PhysicsEngine::getNumSubsteps() {
return _numSubsteps;
}
btHashMap<btHashInt, int16_t> _collisionMasks;
void initCollisionMaskTable() {
if (_collisionMasks.size() == 0) {
// build table of masks with their group as the key
_collisionMasks.insert(btHashInt((int)BULLET_COLLISION_GROUP_DYNAMIC), BULLET_COLLISION_MASK_DYNAMIC);
_collisionMasks.insert(btHashInt((int)BULLET_COLLISION_GROUP_STATIC), BULLET_COLLISION_MASK_STATIC);
_collisionMasks.insert(btHashInt((int)BULLET_COLLISION_GROUP_KINEMATIC), BULLET_COLLISION_MASK_KINEMATIC);
_collisionMasks.insert(btHashInt((int)BULLET_COLLISION_GROUP_MY_AVATAR), BULLET_COLLISION_MASK_MY_AVATAR);
_collisionMasks.insert(btHashInt((int)BULLET_COLLISION_GROUP_OTHER_AVATAR), BULLET_COLLISION_MASK_OTHER_AVATAR);
_collisionMasks.insert(btHashInt((int)BULLET_COLLISION_GROUP_COLLISIONLESS), BULLET_COLLISION_MASK_COLLISIONLESS);
}
}
// static
int16_t PhysicsEngine::getCollisionMask(int16_t group) {
const int16_t* mask = _collisionMasks.find(btHashInt((int)group));
return mask ? *mask : BULLET_COLLISION_MASK_DEFAULT;
}
QUuid _sessionID;
// static
void PhysicsEngine::setSessionUUID(const QUuid& sessionID) {
_sessionID = sessionID;
}
// static
const QUuid& PhysicsEngine::getSessionID() {
return _sessionID;
}
PhysicsEngine::PhysicsEngine(const glm::vec3& offset) :
_originOffset(offset),
_sessionID(),
_myAvatarController(nullptr) {
initCollisionMaskTable();
}
PhysicsEngine::~PhysicsEngine() {
@ -90,6 +53,10 @@ void PhysicsEngine::init() {
}
}
uint32_t PhysicsEngine::getNumSubsteps() {
return _numSubsteps;
}
// private
void PhysicsEngine::addObjectToDynamicsWorld(ObjectMotionState* motionState) {
assert(motionState);

View file

@ -45,16 +45,11 @@ typedef QVector<Collision> CollisionEvents;
class PhysicsEngine {
public:
static int16_t getCollisionMask(int16_t group);
uint32_t getNumSubsteps();
PhysicsEngine(const glm::vec3& offset);
~PhysicsEngine();
void init();
static void setSessionUUID(const QUuid& sessionID);
static const QUuid& getSessionID();
uint32_t getNumSubsteps();
void removeObjects(const VectorOfMotionStates& objects);
void removeObjects(const SetOfMotionStates& objects); // only called during teardown
@ -95,6 +90,8 @@ public:
void removeAction(const QUuid actionID);
void forEachAction(std::function<void(EntityActionPointer)> actor);
void setSessionUUID(const QUuid& sessionID) { _sessionID = sessionID; }
private:
void addObjectToDynamicsWorld(ObjectMotionState* motionState);
void removeObjectFromDynamicsWorld(ObjectMotionState* motionState);
@ -111,23 +108,21 @@ private:
ThreadSafeDynamicsWorld* _dynamicsWorld = NULL;
btGhostPairCallback* _ghostPairCallback = NULL;
glm::vec3 _originOffset;
ContactMap _contactMap;
uint32_t _numContactFrames = 0;
CollisionEvents _collisionEvents;
QHash<QUuid, EntityActionPointer> _objectActions;
glm::vec3 _originOffset;
QUuid _sessionID;
/// character collisions
CharacterController* _myAvatarController;
uint32_t _numContactFrames = 0;
uint32_t _numSubsteps;
bool _dumpNextStats = false;
bool _hasOutgoingChanges = false;
CollisionEvents _collisionEvents;
QHash<QUuid, EntityActionPointer> _objectActions;
uint32_t _numSubsteps;
};
typedef std::shared_ptr<PhysicsEngine> PhysicsEnginePointer;

View file

@ -3,7 +3,7 @@ AUTOSCRIBE_SHADER_LIB(gpu model render)
# pull in the resources.qrc file
qt5_add_resources(QT_RESOURCES_FILE "${CMAKE_CURRENT_SOURCE_DIR}/res/fonts/fonts.qrc")
setup_hifi_library(Widgets OpenGL Network Qml Quick Script)
link_hifi_libraries(shared gpu procedural model model-networking render environment animation fbx)
link_hifi_libraries(shared gpu procedural model model-networking render animation fbx)
target_nsight()
target_oglplus()

View file

@ -24,7 +24,6 @@ class Transform;
class QThread;
class ViewFrustum;
class PickRay;
class EnvironmentData;
/// Interface provided by Application to other objects that need access to the current view state details
class AbstractViewStateInterface {
@ -32,10 +31,6 @@ public:
/// gets the current view frustum for rendering the view state
virtual ViewFrustum* getCurrentViewFrustum() = 0;
/// overrides environment data
virtual void overrideEnvironmentData(const EnvironmentData& newData) = 0;
virtual void endOverrideEnvironmentData() = 0;
/// gets the shadow view frustum for rendering the view state
virtual ViewFrustum* getShadowViewFrustum() = 0;

View file

@ -12,69 +12,135 @@
#include <glm/gtc/random.hpp>
#include <algorithm> //min max and more
#include <PathUtils.h>
#include <SharedUtil.h>
#include <gpu/Context.h>
#include <gpu/StandardShaderLib.h>
#include "RenderUtilsLogging.h"
#include "AmbientOcclusionEffect.h"
#include "TextureCache.h"
#include "FramebufferCache.h"
#include "DependencyManager.h"
#include "ViewFrustum.h"
#include "GeometryCache.h"
#include "ambient_occlusion_vert.h"
#include "ambient_occlusion_frag.h"
#include "gaussian_blur_vertical_vert.h"
#include "gaussian_blur_horizontal_vert.h"
#include "gaussian_blur_frag.h"
#include "occlusion_blend_frag.h"
#include "ssao_makePyramid_frag.h"
#include "ssao_makeOcclusion_frag.h"
#include "ssao_makeHorizontalBlur_frag.h"
#include "ssao_makeVerticalBlur_frag.h"
class GaussianDistribution {
public:
static double integral(float x, float deviation) {
return 0.5 * erf((double)x / ((double)deviation * sqrt(2.0)));
}
static double rangeIntegral(float x0, float x1, float deviation) {
return integral(x1, deviation) - integral(x0, deviation);
}
static std::vector<float> evalSampling(int samplingRadius, float deviation) {
std::vector<float> coefs(samplingRadius + 1, 0.0f);
// corner case when radius is 0 or under
if (samplingRadius <= 0) {
coefs[0] = 1.0f;
return coefs;
}
// Evaluate all the samples range integral of width 1 from center until the penultimate one
float halfWidth = 0.5f;
double sum = 0.0;
for (int i = 0; i < samplingRadius; i++) {
float x = (float) i;
double sample = rangeIntegral(x - halfWidth, x + halfWidth, deviation);
coefs[i] = sample;
sum += sample;
}
// last sample goes to infinity
float lastSampleX0 = (float) samplingRadius - halfWidth;
float largeEnough = lastSampleX0 + 1000.0f * deviation;
double sample = rangeIntegral(lastSampleX0, largeEnough, deviation);
coefs[samplingRadius] = sample;
sum += sample;
return coefs;
}
static void evalSampling(float* coefs, unsigned int coefsLength, int samplingRadius, float deviation) {
auto coefsVector = evalSampling(samplingRadius, deviation);
if (coefsLength> coefsVector.size() + 1) {
unsigned int coefsNum = 0;
for (auto s : coefsVector) {
coefs[coefsNum] = s;
coefsNum++;
}
for (;coefsNum < coefsLength; coefsNum++) {
coefs[coefsNum] = 0.0f;
}
}
}
};
AmbientOcclusion::AmbientOcclusion() {
const int AmbientOcclusionEffect_FrameTransformSlot = 0;
const int AmbientOcclusionEffect_ParamsSlot = 1;
const int AmbientOcclusionEffect_DepthMapSlot = 0;
const int AmbientOcclusionEffect_PyramidMapSlot = 0;
const int AmbientOcclusionEffect_OcclusionMapSlot = 0;
AmbientOcclusionEffect::AmbientOcclusionEffect() {
FrameTransform frameTransform;
_frameTransformBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(FrameTransform), (const gpu::Byte*) &frameTransform));
Parameters parameters;
_parametersBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Parameters), (const gpu::Byte*) &parameters));
}
const gpu::PipelinePointer& AmbientOcclusion::getOcclusionPipeline() {
if (!_occlusionPipeline) {
auto vs = gpu::Shader::createVertex(std::string(ambient_occlusion_vert));
auto ps = gpu::Shader::createPixel(std::string(ambient_occlusion_frag));
const gpu::PipelinePointer& AmbientOcclusionEffect::getPyramidPipeline() {
if (!_pyramidPipeline) {
auto vs = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
auto ps = gpu::Shader::createPixel(std::string(ssao_makePyramid_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("depthTexture"), 0));
slotBindings.insert(gpu::Shader::Binding(std::string("normalTexture"), 1));
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionFrameTransformBuffer"), AmbientOcclusionEffect_FrameTransformSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionParamsBuffer"), AmbientOcclusionEffect_ParamsSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), AmbientOcclusionEffect_DepthMapSlot));
gpu::Shader::makeProgram(*program, slotBindings);
_gScaleLoc = program->getUniforms().findLocation("g_scale");
_gBiasLoc = program->getUniforms().findLocation("g_bias");
_gSampleRadiusLoc = program->getUniforms().findLocation("g_sample_rad");
_gIntensityLoc = program->getUniforms().findLocation("g_intensity");
_nearLoc = program->getUniforms().findLocation("near");
_depthScaleLoc = program->getUniforms().findLocation("depthScale");
_depthTexCoordOffsetLoc = program->getUniforms().findLocation("depthTexCoordOffset");
_depthTexCoordScaleLoc = program->getUniforms().findLocation("depthTexCoordScale");
_renderTargetResLoc = program->getUniforms().findLocation("renderTargetRes");
_renderTargetResInvLoc = program->getUniforms().findLocation("renderTargetResInv");
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Stencil test the pyramid passe for objects pixels only, not the background
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::NOT_EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
// Blend on transparent
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
state->setColorWriteMask(true, false, false, false);
// Link the occlusion FBO to texture
_occlusionBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
auto width = _occlusionBuffer->getWidth();
auto height = _occlusionBuffer->getHeight();
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
_occlusionTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
// Good to go add the brand new pipeline
_pyramidPipeline = gpu::Pipeline::create(program, state);
}
return _pyramidPipeline;
}
const gpu::PipelinePointer& AmbientOcclusionEffect::getOcclusionPipeline() {
if (!_occlusionPipeline) {
auto vs = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
auto ps = gpu::Shader::createPixel(std::string(ssao_makeOcclusion_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionFrameTransformBuffer"), AmbientOcclusionEffect_FrameTransformSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionParamsBuffer"), AmbientOcclusionEffect_ParamsSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("pyramidMap"), AmbientOcclusionEffect_PyramidMapSlot));
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setColorWriteMask(true, true, true, false);
// Good to go add the brand new pipeline
_occlusionPipeline = gpu::Pipeline::create(program, state);
@ -82,32 +148,46 @@ const gpu::PipelinePointer& AmbientOcclusion::getOcclusionPipeline() {
return _occlusionPipeline;
}
const gpu::PipelinePointer& AmbientOcclusion::getVBlurPipeline() {
if (!_vBlurPipeline) {
auto vs = gpu::Shader::createVertex(std::string(gaussian_blur_vertical_vert));
auto ps = gpu::Shader::createPixel(std::string(gaussian_blur_frag));
const gpu::PipelinePointer& AmbientOcclusionEffect::getHBlurPipeline() {
if (!_hBlurPipeline) {
auto vs = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
auto ps = gpu::Shader::createPixel(std::string(ssao_makeHorizontalBlur_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionFrameTransformBuffer"), AmbientOcclusionEffect_FrameTransformSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionParamsBuffer"), AmbientOcclusionEffect_ParamsSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("occlusionMap"), AmbientOcclusionEffect_OcclusionMapSlot));
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
state->setColorWriteMask(true, true, true, false);
// Good to go add the brand new pipeline
_hBlurPipeline = gpu::Pipeline::create(program, state);
}
return _hBlurPipeline;
}
// Blend on transparent
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
// Link the horizontal blur FBO to texture
_vBlurBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
auto width = _vBlurBuffer->getWidth();
auto height = _vBlurBuffer->getHeight();
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
_vBlurTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
const gpu::PipelinePointer& AmbientOcclusionEffect::getVBlurPipeline() {
if (!_vBlurPipeline) {
auto vs = gpu::StandardShaderLib::getDrawViewportQuadTransformTexcoordVS();
auto ps = gpu::Shader::createPixel(std::string(ssao_makeVerticalBlur_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionFrameTransformBuffer"), AmbientOcclusionEffect_FrameTransformSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("ambientOcclusionParamsBuffer"), AmbientOcclusionEffect_ParamsSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("occlusionMap"), AmbientOcclusionEffect_OcclusionMapSlot));
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
// Vertical blur write just the final result Occlusion value in the alpha channel
state->setColorWriteMask(true, true, true, false);
// Good to go add the brand new pipeline
_vBlurPipeline = gpu::Pipeline::create(program, state);
@ -115,171 +195,228 @@ const gpu::PipelinePointer& AmbientOcclusion::getVBlurPipeline() {
return _vBlurPipeline;
}
const gpu::PipelinePointer& AmbientOcclusion::getHBlurPipeline() {
if (!_hBlurPipeline) {
auto vs = gpu::Shader::createVertex(std::string(gaussian_blur_horizontal_vert));
auto ps = gpu::Shader::createPixel(std::string(gaussian_blur_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Blend on transparent
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
// Link the horizontal blur FBO to texture
_hBlurBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
auto width = _hBlurBuffer->getWidth();
auto height = _hBlurBuffer->getHeight();
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
_hBlurTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
// Good to go add the brand new pipeline
_hBlurPipeline = gpu::Pipeline::create(program, state);
}
return _hBlurPipeline;
void AmbientOcclusionEffect::setDepthInfo(float nearZ, float farZ) {
_frameTransformBuffer.edit<FrameTransform>().depthInfo = glm::vec4(nearZ*farZ, farZ -nearZ, -farZ, 0.0f);
}
const gpu::PipelinePointer& AmbientOcclusion::getBlendPipeline() {
if (!_blendPipeline) {
auto vs = gpu::Shader::createVertex(std::string(ambient_occlusion_vert));
auto ps = gpu::Shader::createPixel(std::string(occlusion_blend_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
void AmbientOcclusionEffect::setResolutionLevel(int level) {
const int MAX_RESOLUTION_LEVEL = 4;
level = std::max(0, std::min(level, MAX_RESOLUTION_LEVEL));
if (level != getResolutionLevel()) {
auto& current = _parametersBuffer.edit<Parameters>().resolutionInfo;
current.x = (float)level;
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("blurredOcclusionTexture"), 0));
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Blend on transparent
state->setBlendFunction(true,
gpu::State::INV_SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::SRC_ALPHA);
// Good to go add the brand new pipeline
_blendPipeline = gpu::Pipeline::create(program, state);
// Communicate the change to the Framebuffer cache
DependencyManager::get<FramebufferCache>()->setAmbientOcclusionResolutionLevel(level);
}
return _blendPipeline;
}
void AmbientOcclusion::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
void AmbientOcclusionEffect::setRadius(float radius) {
const double RADIUS_POWER = 6.0;
radius = std::max(0.01f, radius);
if (radius != getRadius()) {
auto& current = _parametersBuffer.edit<Parameters>().radiusInfo;
current.x = radius;
current.y = radius * radius;
current.z = (float)(1.0 / pow((double)radius, RADIUS_POWER));
}
}
void AmbientOcclusionEffect::setLevel(float level) {
level = std::max(0.01f, level);
if (level != getLevel()) {
auto& current = _parametersBuffer.edit<Parameters>().radiusInfo;
current.w = level;
}
}
void AmbientOcclusionEffect::setDithering(bool enabled) {
if (enabled != isDitheringEnabled()) {
auto& current = _parametersBuffer.edit<Parameters>().ditheringInfo;
current.x = (float)enabled;
}
}
void AmbientOcclusionEffect::setBordering(bool enabled) {
if (enabled != isBorderingEnabled()) {
auto& current = _parametersBuffer.edit<Parameters>().ditheringInfo;
current.w = (float)enabled;
}
}
void AmbientOcclusionEffect::setFalloffBias(float bias) {
bias = std::max(0.0f, std::min(bias, 0.2f));
if (bias != getFalloffBias()) {
auto& current = _parametersBuffer.edit<Parameters>().ditheringInfo;
current.z = (float)bias;
}
}
void AmbientOcclusionEffect::setNumSamples(int numSamples) {
numSamples = std::max(1.0f, (float) numSamples);
if (numSamples != getNumSamples()) {
auto& current = _parametersBuffer.edit<Parameters>().sampleInfo;
current.x = numSamples;
current.y = 1.0f / numSamples;
}
}
void AmbientOcclusionEffect::setNumSpiralTurns(float numTurns) {
numTurns = std::max(0.0f, (float)numTurns);
if (numTurns != getNumSpiralTurns()) {
auto& current = _parametersBuffer.edit<Parameters>().sampleInfo;
current.z = numTurns;
}
}
void AmbientOcclusionEffect::setEdgeSharpness(float sharpness) {
sharpness = std::max(0.0f, (float)sharpness);
if (sharpness != getEdgeSharpness()) {
auto& current = _parametersBuffer.edit<Parameters>().blurInfo;
current.x = sharpness;
}
}
void AmbientOcclusionEffect::setBlurRadius(int radius) {
const int MAX_BLUR_RADIUS = 6;
radius = std::max(0, std::min(MAX_BLUR_RADIUS, radius));
if (radius != getBlurRadius()) {
auto& current = _parametersBuffer.edit<Parameters>().blurInfo;
current.y = (float)radius;
updateGaussianDistribution();
}
}
void AmbientOcclusionEffect::setBlurDeviation(float deviation) {
deviation = std::max(0.0f, deviation);
if (deviation != getBlurDeviation()) {
auto& current = _parametersBuffer.edit<Parameters>().blurInfo;
current.z = deviation;
updateGaussianDistribution();
}
}
void AmbientOcclusionEffect::updateGaussianDistribution() {
auto coefs = _parametersBuffer.edit<Parameters>()._gaussianCoefs;
GaussianDistribution::evalSampling(coefs, Parameters::GAUSSIAN_COEFS_LENGTH, getBlurRadius(), getBlurDeviation());
}
void AmbientOcclusionEffect::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
assert(renderContext->getArgs());
assert(renderContext->getArgs()->_viewFrustum);
RenderArgs* args = renderContext->getArgs();
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
auto framebufferCache = DependencyManager::get<FramebufferCache>();
QSize framebufferSize = framebufferCache->getFrameBufferSize();
float fbWidth = framebufferSize.width();
float fbHeight = framebufferSize.height();
float sMin = args->_viewport.x / fbWidth;
float sWidth = args->_viewport.z / fbWidth;
float tMin = args->_viewport.y / fbHeight;
float tHeight = args->_viewport.w / fbHeight;
auto framebufferCache = DependencyManager::get<FramebufferCache>();
auto depthBuffer = framebufferCache->getPrimaryDepthTexture();
auto normalBuffer = framebufferCache->getDeferredNormalTexture();
auto pyramidFBO = framebufferCache->getDepthPyramidFramebuffer();
auto occlusionFBO = framebufferCache->getOcclusionFramebuffer();
auto occlusionBlurredFBO = framebufferCache->getOcclusionBlurredFramebuffer();
QSize framebufferSize = framebufferCache->getFrameBufferSize();
float sMin = args->_viewport.x / (float)framebufferSize.width();
float sWidth = args->_viewport.z / (float)framebufferSize.width();
float tMin = args->_viewport.y / (float)framebufferSize.height();
float tHeight = args->_viewport.w / (float)framebufferSize.height();
auto resolutionLevel = getResolutionLevel();
// Update the depth info with near and far (same for stereo)
setDepthInfo(args->_viewFrustum->getNearClip(), args->_viewFrustum->getFarClip());
_frameTransformBuffer.edit<FrameTransform>().pixelInfo = args->_viewport;
//_parametersBuffer.edit<Parameters>()._ditheringInfo.y += 0.25f;
// Running in stero ?
bool isStereo = args->_context->isStereo();
if (!isStereo) {
// Eval the mono projection
mat4 monoProjMat;
args->_viewFrustum->evalProjectionMatrix(monoProjMat);
_frameTransformBuffer.edit<FrameTransform>().projection[0] = monoProjMat;
_frameTransformBuffer.edit<FrameTransform>().stereoInfo = glm::vec4(0.0f, (float)args->_viewport.z, 0.0f, 0.0f);
} else {
mat4 projMats[2];
mat4 eyeViews[2];
args->_context->getStereoProjections(projMats);
args->_context->getStereoViews(eyeViews);
for (int i = 0; i < 2; i++) {
// Compose the mono Eye space to Stereo clip space Projection Matrix
auto sideViewMat = projMats[i] * eyeViews[i];
_frameTransformBuffer.edit<FrameTransform>().projection[i] = sideViewMat;
}
_frameTransformBuffer.edit<FrameTransform>().stereoInfo = glm::vec4(1.0f, (float)(args->_viewport.z >> 1), 0.0f, 1.0f);
}
auto pyramidPipeline = getPyramidPipeline();
auto occlusionPipeline = getOcclusionPipeline();
auto firstHBlurPipeline = getHBlurPipeline();
auto lastVBlurPipeline = getVBlurPipeline();
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
batch.enableStereo(false);
_gpuTimer.begin(batch);
batch.setViewportTransform(args->_viewport);
batch.setProjectionTransform(glm::mat4());
batch.setViewTransform(Transform());
Transform model;
model.setTranslation(glm::vec3(sMin, tMin, 0.0f));
model.setScale(glm::vec3(sWidth, tHeight, 1.0f));
batch.setModelTransform(model);
batch.setUniformBuffer(AmbientOcclusionEffect_FrameTransformSlot, _frameTransformBuffer);
batch.setUniformBuffer(AmbientOcclusionEffect_ParamsSlot, _parametersBuffer);
glm::mat4 projMat;
Transform viewMat;
args->_viewFrustum->evalProjectionMatrix(projMat);
args->_viewFrustum->evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
batch.setModelTransform(Transform());
// Pyramid pass
batch.setFramebuffer(pyramidFBO);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(args->_viewFrustum->getFarClip(), 0.0f, 0.0f, 0.0f));
batch.setPipeline(pyramidPipeline);
batch.setResourceTexture(AmbientOcclusionEffect_DepthMapSlot, depthBuffer);
batch.draw(gpu::TRIANGLE_STRIP, 4);
// Occlusion step
getOcclusionPipeline();
batch.setResourceTexture(0, framebufferCache->getPrimaryDepthTexture());
batch.setResourceTexture(1, framebufferCache->getDeferredNormalTexture());
_occlusionBuffer->setRenderBuffer(0, _occlusionTexture);
batch.setFramebuffer(_occlusionBuffer);
// Make pyramid mips
batch.generateTextureMips(pyramidFBO->getRenderBuffer(0));
// Occlusion uniforms
g_scale = 1.0f;
g_bias = 1.0f;
g_sample_rad = 1.0f;
g_intensity = 1.0f;
// Adjust Viewport for rendering resolution
if (resolutionLevel > 0) {
glm::ivec4 viewport(args->_viewport.x, args->_viewport.y, args->_viewport.z >> resolutionLevel, args->_viewport.w >> resolutionLevel);
batch.setViewportTransform(viewport);
}
// Bind the first gpu::Pipeline we need - for calculating occlusion buffer
batch.setPipeline(getOcclusionPipeline());
batch._glUniform1f(_gScaleLoc, g_scale);
batch._glUniform1f(_gBiasLoc, g_bias);
batch._glUniform1f(_gSampleRadiusLoc, g_sample_rad);
batch._glUniform1f(_gIntensityLoc, g_intensity);
// Occlusion pass
batch.setFramebuffer(occlusionFBO);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(1.0f));
batch.setPipeline(occlusionPipeline);
batch.setResourceTexture(AmbientOcclusionEffect_PyramidMapSlot, pyramidFBO->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);
// setup uniforms for unpacking a view-space position from the depth buffer
// This is code taken from DeferredLightEffect.render() method in DeferredLightingEffect.cpp.
// DeferredBuffer.slh shows how the unpacking is done and what variables are needed.
if (getBlurRadius() > 0) {
// Blur 1st pass
batch.setFramebuffer(occlusionBlurredFBO);
batch.setPipeline(firstHBlurPipeline);
batch.setResourceTexture(AmbientOcclusionEffect_OcclusionMapSlot, occlusionFBO->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);
// initialize the view-space unpacking uniforms using frustum data
float left, right, bottom, top, nearVal, farVal;
glm::vec4 nearClipPlane, farClipPlane;
// Blur 2nd pass
batch.setFramebuffer(occlusionFBO);
batch.setPipeline(lastVBlurPipeline);
batch.setResourceTexture(AmbientOcclusionEffect_OcclusionMapSlot, occlusionBlurredFBO->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
_gpuTimer.end(batch);
args->_viewFrustum->computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
float depthScale = (farVal - nearVal) / farVal;
float nearScale = -1.0f / nearVal;
float depthTexCoordScaleS = (right - left) * nearScale / sWidth;
float depthTexCoordScaleT = (top - bottom) * nearScale / tHeight;
float depthTexCoordOffsetS = left * nearScale - sMin * depthTexCoordScaleS;
float depthTexCoordOffsetT = bottom * nearScale - tMin * depthTexCoordScaleT;
// now set the position-unpacking unforms
batch._glUniform1f(_nearLoc, nearVal);
batch._glUniform1f(_depthScaleLoc, depthScale);
batch._glUniform2f(_depthTexCoordOffsetLoc, depthTexCoordOffsetS, depthTexCoordOffsetT);
batch._glUniform2f(_depthTexCoordScaleLoc, depthTexCoordScaleS, depthTexCoordScaleT);
batch._glUniform2f(_renderTargetResLoc, fbWidth, fbHeight);
batch._glUniform2f(_renderTargetResInvLoc, 1.0f / fbWidth, 1.0f / fbHeight);
glm::vec4 color(0.0f, 0.0f, 0.0f, 1.0f);
glm::vec2 bottomLeft(-1.0f, -1.0f);
glm::vec2 topRight(1.0f, 1.0f);
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
// Vertical blur step
getVBlurPipeline();
batch.setResourceTexture(0, _occlusionTexture);
_vBlurBuffer->setRenderBuffer(0, _vBlurTexture);
batch.setFramebuffer(_vBlurBuffer);
// Bind the second gpu::Pipeline we need - for calculating blur buffer
batch.setPipeline(getVBlurPipeline());
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
// Horizontal blur step
getHBlurPipeline();
batch.setResourceTexture(0, _vBlurTexture);
_hBlurBuffer->setRenderBuffer(0, _hBlurTexture);
batch.setFramebuffer(_hBlurBuffer);
// Bind the third gpu::Pipeline we need - for calculating blur buffer
batch.setPipeline(getHBlurPipeline());
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
// Blend step
getBlendPipeline();
batch.setResourceTexture(0, _hBlurTexture);
batch.setFramebuffer(framebufferCache->getDeferredFramebuffer());
// Bind the fourth gpu::Pipeline we need - for blending the primary color buffer with blurred occlusion texture
batch.setPipeline(getBlendPipeline());
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
});
}

View file

@ -16,53 +16,118 @@
#include "render/DrawTask.h"
class AmbientOcclusion {
class AmbientOcclusionEffect {
public:
AmbientOcclusion();
AmbientOcclusionEffect();
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
using JobModel = render::Task::Job::Model<AmbientOcclusion>;
void setResolutionLevel(int level);
int getResolutionLevel() const { return _parametersBuffer.get<Parameters>().resolutionInfo.x; }
const gpu::PipelinePointer& getOcclusionPipeline();
const gpu::PipelinePointer& getHBlurPipeline();
const gpu::PipelinePointer& getVBlurPipeline();
const gpu::PipelinePointer& getBlendPipeline();
void setRadius(float radius);
float getRadius() const { return _parametersBuffer.get<Parameters>().radiusInfo.x; }
// Obscurance level which intensify or dim down the obscurance effect
void setLevel(float level);
float getLevel() const { return _parametersBuffer.get<Parameters>().radiusInfo.w; }
// On to randomize the distribution of rays per pixel, should always be true
void setDithering(bool enabled);
bool isDitheringEnabled() const { return _parametersBuffer.get<Parameters>().ditheringInfo.x; }
// On to avoid evaluating information from non existing pixels Out of the frame, should always be true
void setBordering(bool enabled);
bool isBorderingEnabled() const { return _parametersBuffer.get<Parameters>().ditheringInfo.w; }
// Faloff Bias
void setFalloffBias(float bias);
int getFalloffBias() const { return (int)_parametersBuffer.get<Parameters>().ditheringInfo.z; }
// Number of samples per pixel to evaluate the Obscurance
void setNumSamples(int numSamples);
int getNumSamples() const { return (int)_parametersBuffer.get<Parameters>().sampleInfo.x; }
// Number of spiral turns defining an angle span to distribute the samples ray directions
void setNumSpiralTurns(float numTurns);
float getNumSpiralTurns() const { return _parametersBuffer.get<Parameters>().sampleInfo.z; }
// Edge blurring setting
void setEdgeSharpness(float sharpness);
int getEdgeSharpness() const { return (int)_parametersBuffer.get<Parameters>().blurInfo.x; }
// Blurring Radius
// 0 means no blurring
const int MAX_BLUR_RADIUS = 6;
void setBlurRadius(int radius);
int getBlurRadius() const { return (int)_parametersBuffer.get<Parameters>().blurInfo.y; }
void setBlurDeviation(float deviation);
float getBlurDeviation() const { return _parametersBuffer.get<Parameters>().blurInfo.z; }
double getGPUTime() const { return _gpuTimer.getAverage(); }
using JobModel = render::Task::Job::Model<AmbientOcclusionEffect>;
private:
// Uniforms for AO
gpu::int32 _gScaleLoc;
gpu::int32 _gBiasLoc;
gpu::int32 _gSampleRadiusLoc;
gpu::int32 _gIntensityLoc;
void updateGaussianDistribution();
void setDepthInfo(float nearZ, float farZ);
typedef gpu::BufferView UniformBufferView;
gpu::int32 _nearLoc;
gpu::int32 _depthScaleLoc;
gpu::int32 _depthTexCoordOffsetLoc;
gpu::int32 _depthTexCoordScaleLoc;
gpu::int32 _renderTargetResLoc;
gpu::int32 _renderTargetResInvLoc;
// Class describing the uniform buffer with the transform info common to the AO shaders
// It s changing every frame
class FrameTransform {
public:
// Pixel info is { viemport width height and stereo on off}
glm::vec4 pixelInfo;
// Depth info is { n.f, f - n, -f}
glm::vec4 depthInfo;
// Stereo info
glm::vec4 stereoInfo { 0.0 };
// Mono proj matrix or Left and Right proj matrix going from Mono Eye space to side clip space
glm::mat4 projection[2];
FrameTransform() {}
};
gpu::BufferView _frameTransformBuffer;
// Class describing the uniform buffer with all the parameters common to the AO shaders
class Parameters {
public:
// Resolution info
glm::vec4 resolutionInfo { -1.0f, 0.0f, 0.0f, 0.0f };
// radius info is { R, R^2, 1 / R^6, ObscuranceScale}
glm::vec4 radiusInfo{ 0.5f, 0.5f * 0.5f, 1.0f / (0.25f * 0.25f * 0.25f), 1.0f };
// Dithering info
glm::vec4 ditheringInfo { 0.0f, 0.0f, 0.01f, 1.0f };
// Sampling info
glm::vec4 sampleInfo { 11.0f, 1.0f/11.0f, 7.0f, 1.0f };
// Blurring info
glm::vec4 blurInfo { 1.0f, 3.0f, 2.0f, 0.0f };
// gaussian distribution coefficients first is the sampling radius (max is 6)
const static int GAUSSIAN_COEFS_LENGTH = 8;
float _gaussianCoefs[GAUSSIAN_COEFS_LENGTH];
Parameters() {}
};
gpu::BufferView _parametersBuffer;
const gpu::PipelinePointer& getPyramidPipeline();
const gpu::PipelinePointer& getOcclusionPipeline();
const gpu::PipelinePointer& getHBlurPipeline(); // first
const gpu::PipelinePointer& getVBlurPipeline(); // second
float g_scale;
float g_bias;
float g_sample_rad;
float g_intensity;
gpu::PipelinePointer _pyramidPipeline;
gpu::PipelinePointer _occlusionPipeline;
gpu::PipelinePointer _hBlurPipeline;
gpu::PipelinePointer _vBlurPipeline;
gpu::PipelinePointer _blendPipeline;
gpu::FramebufferPointer _occlusionBuffer;
gpu::FramebufferPointer _hBlurBuffer;
gpu::FramebufferPointer _vBlurBuffer;
gpu::TexturePointer _occlusionTexture;
gpu::TexturePointer _hBlurTexture;
gpu::TexturePointer _vBlurTexture;
gpu::RangeTimer _gpuTimer;
};
#endif // hifi_AmbientOcclusionEffect_h

View file

@ -33,7 +33,10 @@ enum Slots {
Specular,
Depth,
Lighting,
Shadow
Shadow,
Pyramid,
AmbientOcclusion,
AmbientOcclusionBlurred
};
static const std::string DEFAULT_DIFFUSE_SHADER {
@ -41,11 +44,7 @@ static const std::string DEFAULT_DIFFUSE_SHADER {
" return vec4(pow(texture(diffuseMap, uv).xyz, vec3(1.0 / 2.2)), 1.0);"
" }"
};
static const std::string DEFAULT_ALPHA_SHADER {
"vec4 getFragmentColor() {"
" return vec4(vec3(texture(diffuseMap, uv).a), 1.0);"
" }"
};
static const std::string DEFAULT_SPECULAR_SHADER {
"vec4 getFragmentColor() {"
" return vec4(texture(specularMap, uv).xyz, 1.0);"
@ -58,7 +57,7 @@ static const std::string DEFAULT_ROUGHNESS_SHADER {
};
static const std::string DEFAULT_NORMAL_SHADER {
"vec4 getFragmentColor() {"
" return vec4(normalize(texture(normalMap, uv).xyz), 1.0);"
" return vec4(normalize(texture(normalMap, uv).xyz * 2.0 - vec3(1.0)), 1.0);"
" }"
};
static const std::string DEFAULT_DEPTH_SHADER {
@ -71,13 +70,35 @@ static const std::string DEFAULT_LIGHTING_SHADER {
" return vec4(pow(texture(lightingMap, uv).xyz, vec3(1.0 / 2.2)), 1.0);"
" }"
};
static const std::string DEFAULT_SHADOW_SHADER {
static const std::string DEFAULT_SHADOW_SHADER{
"uniform sampler2D shadowMapColor;"
// The actual shadowMap is a sampler2DShadow, so we cannot normally sample it
"vec4 getFragmentColor() {"
" return vec4(texture(shadowMapColor, uv).xyz, 1.0);"
" }"
};
static const std::string DEFAULT_PYRAMID_DEPTH_SHADER {
"vec4 getFragmentColor() {"
" return vec4(vec3(1.0 - texture(pyramidMap, uv).x * 0.01), 1.0);"
//" return vec4(vec3(1.0 - textureLod(pyramidMap, uv, 3).x * 0.01), 1.0);"
" }"
};
static const std::string DEFAULT_AMBIENT_OCCLUSION_SHADER{
"vec4 getFragmentColor() {"
" return vec4(vec3(texture(occlusionMap, uv).x), 1.0);"
// When drawing color " return vec4(vec3(texture(occlusionMap, uv).xyz), 1.0);"
// when drawing normal " return vec4(normalize(texture(occlusionMap, uv).xyz * 2.0 - vec3(1.0)), 1.0);"
" }"
};
static const std::string DEFAULT_AMBIENT_OCCLUSION_BLURRED_SHADER{
"vec4 getFragmentColor() {"
" return vec4(vec3(texture(occlusionBlurredMap, uv).x), 1.0);"
" }"
};
static const std::string DEFAULT_CUSTOM_SHADER {
"vec4 getFragmentColor() {"
" return vec4(1.0, 0.0, 0.0, 1.0);"
@ -108,8 +129,6 @@ std::string DebugDeferredBuffer::getShaderSourceCode(Modes mode, std::string cus
switch (mode) {
case DiffuseMode:
return DEFAULT_DIFFUSE_SHADER;
case AlphaMode:
return DEFAULT_ALPHA_SHADER;
case SpecularMode:
return DEFAULT_SPECULAR_SHADER;
case RoughnessMode:
@ -122,6 +141,12 @@ std::string DebugDeferredBuffer::getShaderSourceCode(Modes mode, std::string cus
return DEFAULT_LIGHTING_SHADER;
case ShadowMode:
return DEFAULT_SHADOW_SHADER;
case PyramidDepthMode:
return DEFAULT_PYRAMID_DEPTH_SHADER;
case AmbientOcclusionMode:
return DEFAULT_AMBIENT_OCCLUSION_SHADER;
case AmbientOcclusionBlurredMode:
return DEFAULT_AMBIENT_OCCLUSION_BLURRED_SHADER;
case CustomMode:
return getFileContent(customFile, DEFAULT_CUSTOM_SHADER);
}
@ -170,6 +195,9 @@ const gpu::PipelinePointer& DebugDeferredBuffer::getPipeline(Modes mode, std::st
slotBindings.insert(gpu::Shader::Binding("depthMap", Depth));
slotBindings.insert(gpu::Shader::Binding("lightingMap", Lighting));
slotBindings.insert(gpu::Shader::Binding("shadowMapColor", Shadow));
slotBindings.insert(gpu::Shader::Binding("pyramidMap", Pyramid));
slotBindings.insert(gpu::Shader::Binding("occlusionMap", AmbientOcclusion));
slotBindings.insert(gpu::Shader::Binding("occlusionBlurredMap", AmbientOcclusionBlurred));
gpu::Shader::makeProgram(*program, slotBindings);
auto pipeline = gpu::Pipeline::create(program, std::make_shared<gpu::State>());
@ -226,7 +254,10 @@ void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const Ren
batch.setResourceTexture(Depth, framebufferCache->getPrimaryDepthTexture());
batch.setResourceTexture(Lighting, framebufferCache->getLightingTexture());
batch.setResourceTexture(Shadow, lightStage.lights[0]->shadow.framebuffer->getRenderBuffer(0));
batch.setResourceTexture(Pyramid, framebufferCache->getDepthPyramidTexture());
batch.setResourceTexture(AmbientOcclusion, framebufferCache->getOcclusionTexture());
batch.setResourceTexture(AmbientOcclusionBlurred, framebufferCache->getOcclusionBlurredTexture());
const glm::vec4 color(1.0f, 1.0f, 1.0f, 1.0f);
const glm::vec2 bottomLeft(renderContext->_deferredDebugSize.x, renderContext->_deferredDebugSize.y);
const glm::vec2 topRight(renderContext->_deferredDebugSize.z, renderContext->_deferredDebugSize.w);

View file

@ -27,14 +27,15 @@ public:
private:
enum Modes : uint8_t {
DiffuseMode = 0,
AlphaMode,
SpecularMode,
RoughnessMode,
NormalMode,
DepthMode,
LightingMode,
ShadowMode,
PyramidDepthMode,
AmbientOcclusionMode,
AmbientOcclusionBlurredMode,
CustomMode // Needs to stay last
};
struct CustomPipeline {

View file

@ -24,6 +24,9 @@ uniform sampler2D specularMap;
// the depth texture
uniform sampler2D depthMap;
// the obscurance texture
uniform sampler2D obscuranceMap;
// the lighting texture
uniform sampler2D lightingMap;
@ -68,17 +71,21 @@ struct DeferredFragment {
vec4 position;
vec3 normal;
vec3 diffuse;
float opacity;
float obscurance;
vec3 specular;
float gloss;
int mode;
};
const int LIGHT_MAPPED = 1;
DeferredFragment unpackDeferredFragment(DeferredTransform deferredTransform, vec2 texcoord) {
DeferredFragment frag;
frag.depthVal = texture(depthMap, texcoord).r;
frag.normalVal = texture(normalMap, texcoord);
frag.diffuseVal = texture(diffuseMap, texcoord);
frag.specularVal = texture(specularMap, texcoord);
frag.obscurance = texture(obscuranceMap, texcoord).x;
if (getStereoMode(deferredTransform)) {
if (texcoord.x > 0.5) {
@ -88,14 +95,19 @@ DeferredFragment unpackDeferredFragment(DeferredTransform deferredTransform, vec
}
frag.position = evalEyePositionFromZ(deferredTransform, frag.depthVal, texcoord);
// Unpack the normal from the map
// Unpack the normal from the map
frag.normal = normalize(frag.normalVal.xyz * 2.0 - vec3(1.0));
frag.mode = 0;
if ((frag.normalVal.a >= 0.45) && (frag.normalVal.a <= 0.55)) {
frag.mode = LIGHT_MAPPED;
}
frag.diffuse = frag.diffuseVal.xyz;
frag.opacity = frag.diffuseVal.w;
frag.specular = frag.specularVal.xyz;
frag.gloss = frag.specularVal.w;
return frag;
}

View file

@ -70,7 +70,7 @@ uniform SphericalHarmonics ambientSphere;
<@include model/Light.slh@>
<@func declareEvalAmbientGlobalColor()@>
vec3 evalAmbientGlobalColor(mat4 invViewMat, float shadowAttenuation, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss) {
vec3 evalAmbientGlobalColor(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss) {
// Need the light now
Light light = getLight();
@ -79,11 +79,11 @@ vec3 evalAmbientGlobalColor(mat4 invViewMat, float shadowAttenuation, vec3 posit
vec4 fragEyeVector = invViewMat * vec4(-position, 0.0);
vec3 fragEyeDir = normalize(fragEyeVector.xyz);
vec3 color = diffuse.rgb * getLightColor(light) * getLightAmbientIntensity(light);
vec3 color = diffuse.rgb * getLightColor(light) * obscurance * getLightAmbientIntensity(light);
vec4 shading = evalFragShading(fragNormal, -getLightDirection(light), fragEyeDir, specular, gloss);
color += vec3(diffuse * shading.w + shading.rgb) * shadowAttenuation * getLightColor(light) * getLightIntensity(light);
color += vec3(diffuse * shading.w + shading.rgb) * min(shadowAttenuation, obscurance) * getLightColor(light) * getLightIntensity(light);
return color;
}
@ -93,7 +93,7 @@ vec3 evalAmbientGlobalColor(mat4 invViewMat, float shadowAttenuation, vec3 posit
<$declareSphericalHarmonics()$>
vec3 evalAmbientSphereGlobalColor(mat4 invViewMat, float shadowAttenuation, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss) {
vec3 evalAmbientSphereGlobalColor(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss) {
// Need the light now
Light light = getLight();
@ -102,11 +102,11 @@ vec3 evalAmbientSphereGlobalColor(mat4 invViewMat, float shadowAttenuation, vec3
vec3 fragEyeDir = normalize(fragEyeVector.xyz);
vec3 ambientNormal = fragNormal.xyz;
vec3 color = diffuse.rgb * evalSphericalLight(ambientSphere, ambientNormal).xyz * getLightAmbientIntensity(light);
vec3 color = diffuse.rgb * evalSphericalLight(ambientSphere, ambientNormal).xyz * obscurance * getLightAmbientIntensity(light);
vec4 shading = evalFragShading(fragNormal, -getLightDirection(light), fragEyeDir, specular, gloss);
color += vec3(diffuse * shading.w + shading.rgb) * shadowAttenuation * getLightColor(light) * getLightIntensity(light);
color += vec3(diffuse * shading.w + shading.rgb) * min(shadowAttenuation, obscurance) * getLightColor(light) * getLightIntensity(light);
return color;
}
@ -117,7 +117,7 @@ vec3 evalAmbientSphereGlobalColor(mat4 invViewMat, float shadowAttenuation, vec3
<$declareSkyboxMap()$>
<$declareSphericalHarmonics()$>
vec3 evalSkyboxGlobalColor(mat4 invViewMat, float shadowAttenuation, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss) {
vec3 evalSkyboxGlobalColor(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 position, vec3 normal, vec3 diffuse, vec3 specular, float gloss) {
// Need the light now
Light light = getLight();
@ -125,18 +125,18 @@ vec3 evalSkyboxGlobalColor(mat4 invViewMat, float shadowAttenuation, vec3 positi
vec4 fragEyeVector = invViewMat * vec4(-position, 0.0);
vec3 fragEyeDir = normalize(fragEyeVector.xyz);
vec3 color = diffuse.rgb * evalSphericalLight(ambientSphere, fragNormal).xyz * getLightAmbientIntensity(light);
vec3 color = diffuse.rgb * evalSphericalLight(ambientSphere, fragNormal).xyz * obscurance * getLightAmbientIntensity(light);
vec4 shading = evalFragShading(fragNormal, -getLightDirection(light), fragEyeDir, specular, gloss);
color += vec3(diffuse * shading.w + shading.rgb) * shadowAttenuation * getLightColor(light) * getLightIntensity(light);
color += vec3(diffuse * shading.w + shading.rgb) * min(shadowAttenuation, obscurance) * getLightColor(light) * getLightIntensity(light);
return color;
}
<@endfunc@>
<@func declareEvalLightmappedColor()@>
vec3 evalLightmappedColor(mat4 invViewMat, float shadowAttenuation, vec3 normal, vec3 diffuse, vec3 lightmap) {
vec3 evalLightmappedColor(mat4 invViewMat, float shadowAttenuation, float obscurance, vec3 normal, vec3 diffuse, vec3 lightmap) {
Light light = getLight();
@ -156,7 +156,7 @@ vec3 evalLightmappedColor(mat4 invViewMat, float shadowAttenuation, vec3 normal,
// ambient is a tiny percentage of the lightmap and only when in the shadow
vec3 ambientLight = (1 - lightAttenuation) * lightmap * getLightAmbientIntensity(light);
return diffuse * (ambientLight + diffuseLight);
return obscurance * diffuse * (ambientLight + diffuseLight);
}
<@endfunc@>

View file

@ -42,13 +42,21 @@ struct LightLocations {
int radius;
int ambientSphere;
int lightBufferUnit;
int atmosphereBufferUnit;
int texcoordMat;
int coneParam;
int deferredTransformBuffer;
int shadowTransformBuffer;
};
enum {
DEFERRED_BUFFER_COLOR_UNIT = 0,
DEFERRED_BUFFER_NORMAL_UNIT = 1,
DEFERRED_BUFFER_EMISSIVE_UNIT = 2,
DEFERRED_BUFFER_DEPTH_UNIT = 3,
DEFERRED_BUFFER_OBSCURANCE_UNIT = 4,
SHADOW_MAP_UNIT = 5,
SKYBOX_MAP_UNIT = 6,
};
static void loadLightProgram(const char* vertSource, const char* fragSource, bool lightVolume, gpu::PipelinePointer& program, LightLocationsPtr& locations);
void DeferredLightingEffect::init() {
@ -146,7 +154,8 @@ void DeferredLightingEffect::prepare(RenderArgs* args) {
});
}
void DeferredLightingEffect::render(RenderArgs* args) {
void DeferredLightingEffect::render(const render::RenderContextPointer& renderContext) {
auto args = renderContext->getArgs();
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
// Allocate the parameters buffer used by all the deferred shaders
@ -161,6 +170,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
// perform deferred lighting, rendering to free fbo
auto framebufferCache = DependencyManager::get<FramebufferCache>();
auto textureCache = DependencyManager::get<TextureCache>();
QSize framebufferSize = framebufferCache->getFrameBufferSize();
@ -172,16 +182,23 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch.setStateScissorRect(args->_viewport);
// BInd the G-Buffer surfaces
batch.setResourceTexture(0, framebufferCache->getDeferredColorTexture());
batch.setResourceTexture(1, framebufferCache->getDeferredNormalTexture());
batch.setResourceTexture(2, framebufferCache->getDeferredSpecularTexture());
batch.setResourceTexture(3, framebufferCache->getPrimaryDepthTexture());
batch.setResourceTexture(DEFERRED_BUFFER_COLOR_UNIT, framebufferCache->getDeferredColorTexture());
batch.setResourceTexture(DEFERRED_BUFFER_NORMAL_UNIT, framebufferCache->getDeferredNormalTexture());
batch.setResourceTexture(DEFERRED_BUFFER_EMISSIVE_UNIT, framebufferCache->getDeferredSpecularTexture());
batch.setResourceTexture(DEFERRED_BUFFER_DEPTH_UNIT, framebufferCache->getPrimaryDepthTexture());
// need to assign the white texture if ao is off
if (renderContext->getOcclusionStatus()) {
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, framebufferCache->getOcclusionTexture());
} else {
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, textureCache->getWhiteTexture());
}
assert(_lightStage.lights.size() > 0);
const auto& globalShadow = _lightStage.lights[0]->shadow;
// Bind the shadow buffer
batch.setResourceTexture(4, globalShadow.map);
batch.setResourceTexture(SHADOW_MAP_UNIT, globalShadow.map);
// THe main viewport is assumed to be the mono viewport (or the 2 stereo faces side by side within that viewport)
auto monoViewport = args->_viewport;
@ -323,16 +340,13 @@ void DeferredLightingEffect::render(RenderArgs* args) {
}
if (useSkyboxCubemap) {
batch.setResourceTexture(5, _skybox->getCubemap());
batch.setResourceTexture(SKYBOX_MAP_UNIT, _skybox->getCubemap());
}
if (locations->lightBufferUnit >= 0) {
batch.setUniformBuffer(locations->lightBufferUnit, globalLight->getSchemaBuffer());
}
if (_atmosphere && (locations->atmosphereBufferUnit >= 0)) {
batch.setUniformBuffer(locations->atmosphereBufferUnit, _atmosphere->getDataBuffer());
}
}
{
@ -345,7 +359,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
}
if (useSkyboxCubemap) {
batch.setResourceTexture(5, nullptr);
batch.setResourceTexture(SKYBOX_MAP_UNIT, nullptr);
}
}
@ -461,10 +475,14 @@ void DeferredLightingEffect::render(RenderArgs* args) {
}
// Probably not necessary in the long run because the gpu layer would unbound this texture if used as render target
batch.setResourceTexture(0, nullptr);
batch.setResourceTexture(1, nullptr);
batch.setResourceTexture(2, nullptr);
batch.setResourceTexture(3, nullptr);
batch.setResourceTexture(DEFERRED_BUFFER_COLOR_UNIT, nullptr);
batch.setResourceTexture(DEFERRED_BUFFER_NORMAL_UNIT, nullptr);
batch.setResourceTexture(DEFERRED_BUFFER_EMISSIVE_UNIT, nullptr);
batch.setResourceTexture(DEFERRED_BUFFER_DEPTH_UNIT, nullptr);
batch.setResourceTexture(DEFERRED_BUFFER_OBSCURANCE_UNIT, nullptr);
batch.setResourceTexture(SHADOW_MAP_UNIT, nullptr);
batch.setResourceTexture(SKYBOX_MAP_UNIT, nullptr);
batch.setUniformBuffer(_directionalLightLocations->deferredTransformBuffer, nullptr);
});
@ -489,18 +507,17 @@ static void loadLightProgram(const char* vertSource, const char* fragSource, boo
gpu::ShaderPointer program = gpu::Shader::createProgram(VS, PS);
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("diffuseMap"), 0));
slotBindings.insert(gpu::Shader::Binding(std::string("normalMap"), 1));
slotBindings.insert(gpu::Shader::Binding(std::string("specularMap"), 2));
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), 3));
slotBindings.insert(gpu::Shader::Binding(std::string("shadowMap"), 4));
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), 5));
slotBindings.insert(gpu::Shader::Binding(std::string("diffuseMap"), DEFERRED_BUFFER_COLOR_UNIT));
slotBindings.insert(gpu::Shader::Binding(std::string("normalMap"), DEFERRED_BUFFER_NORMAL_UNIT));
slotBindings.insert(gpu::Shader::Binding(std::string("specularMap"), DEFERRED_BUFFER_EMISSIVE_UNIT));
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), DEFERRED_BUFFER_DEPTH_UNIT));
slotBindings.insert(gpu::Shader::Binding(std::string("obscuranceMap"), DEFERRED_BUFFER_OBSCURANCE_UNIT));
slotBindings.insert(gpu::Shader::Binding(std::string("shadowMap"), SHADOW_MAP_UNIT));
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), SKYBOX_MAP_UNIT));
static const int LIGHT_GPU_SLOT = 3;
static const int ATMOSPHERE_GPU_SLOT = 4;
static const int DEFERRED_TRANSFORM_BUFFER_SLOT = 2;
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), LIGHT_GPU_SLOT));
slotBindings.insert(gpu::Shader::Binding(std::string("atmosphereBufferUnit"), ATMOSPHERE_GPU_SLOT));
slotBindings.insert(gpu::Shader::Binding(std::string("deferredTransformBuffer"), DEFERRED_TRANSFORM_BUFFER_SLOT));
gpu::Shader::makeProgram(*program, slotBindings);
@ -512,7 +529,6 @@ static void loadLightProgram(const char* vertSource, const char* fragSource, boo
locations->coneParam = program->getUniforms().findLocation("coneParam");
locations->lightBufferUnit = program->getBuffers().findLocation("lightBuffer");
locations->atmosphereBufferUnit = program->getBuffers().findLocation("atmosphereBufferUnit");
locations->deferredTransformBuffer = program->getBuffers().findLocation("deferredTransformBuffer");
locations->shadowTransformBuffer = program->getBuffers().findLocation("shadowTransformBuffer");

View file

@ -21,6 +21,8 @@
#include "model/Stage.h"
#include "model/Geometry.h"
#include "render/Context.h"
#include "LightStage.h"
class RenderArgs;
@ -42,14 +44,13 @@ public:
float intensity = 0.5f, const glm::quat& orientation = glm::quat(), float exponent = 0.0f, float cutoff = PI);
void prepare(RenderArgs* args);
void render(RenderArgs* args);
void render(const render::RenderContextPointer& renderContext);
void setupTransparent(RenderArgs* args, int lightBufferUnit);
// update global lighting
void setAmbientLightMode(int preset);
void setGlobalLight(const glm::vec3& direction, const glm::vec3& diffuse, float intensity, float ambientIntensity);
void setGlobalAtmosphere(const model::AtmospherePointer& atmosphere) { _atmosphere = atmosphere; }
void setGlobalSkybox(const model::SkyboxPointer& skybox);
const LightStage& getLightStage() { return _lightStage; }
@ -84,7 +85,6 @@ private:
std::vector<int> _spotLights;
int _ambientLightMode = 0;
model::AtmospherePointer _atmosphere;
model::SkyboxPointer _skybox;
// Class describing the uniform buffer with all the parameters common to the deferred shaders

View file

@ -1,256 +0,0 @@
//
// Environment.cpp
// interface/src
//
// Created by Andrzej Kapolka on 5/6/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QByteArray>
#include <QMutexLocker>
#include <QtDebug>
#include "GeometryCache.h"
#include <GeometryUtil.h>
#include <NumericalConstants.h>
#include <OctreePacketData.h>
#include <udt/PacketHeaders.h>
#include <PathUtils.h>
#include <SharedUtil.h>
#include "Environment.h"
#include "SkyFromSpace_vert.h"
#include "SkyFromSpace_frag.h"
#include "SkyFromAtmosphere_vert.h"
#include "SkyFromAtmosphere_frag.h"
Environment::Environment()
: _initialized(false) {
}
Environment::~Environment() {
}
void Environment::init() {
if (_initialized) {
return;
}
setupAtmosphereProgram(SkyFromSpace_vert, SkyFromSpace_frag, _skyFromSpaceProgram, _skyFromSpaceUniformLocations);
setupAtmosphereProgram(SkyFromAtmosphere_vert, SkyFromAtmosphere_frag, _skyFromAtmosphereProgram, _skyFromAtmosphereUniformLocations);
// start off with a default-constructed environment data
_data[QUuid()][0];
_initialized = true;
}
void Environment::setupAtmosphereProgram(const char* vertSource, const char* fragSource, gpu::PipelinePointer& pipeline, int* locations) {
auto VS = gpu::Shader::createVertex(std::string(vertSource));
auto PS = gpu::Shader::createPixel(std::string(fragSource));
gpu::ShaderPointer program = gpu::Shader::createProgram(VS, PS);
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_NONE);
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
pipeline = gpu::Pipeline::create(program, state);
locations[CAMERA_POS_LOCATION] = program->getUniforms().findLocation("v3CameraPos");
locations[LIGHT_POS_LOCATION] = program->getUniforms().findLocation("v3LightPos");
locations[INV_WAVELENGTH_LOCATION] = program->getUniforms().findLocation("v3InvWavelength");
locations[CAMERA_HEIGHT2_LOCATION] = program->getUniforms().findLocation("fCameraHeight2");
locations[OUTER_RADIUS_LOCATION] = program->getUniforms().findLocation("fOuterRadius");
locations[OUTER_RADIUS2_LOCATION] = program->getUniforms().findLocation("fOuterRadius2");
locations[INNER_RADIUS_LOCATION] = program->getUniforms().findLocation("fInnerRadius");
locations[KR_ESUN_LOCATION] = program->getUniforms().findLocation("fKrESun");
locations[KM_ESUN_LOCATION] = program->getUniforms().findLocation("fKmESun");
locations[KR_4PI_LOCATION] = program->getUniforms().findLocation("fKr4PI");
locations[KM_4PI_LOCATION] = program->getUniforms().findLocation("fKm4PI");
locations[SCALE_LOCATION] = program->getUniforms().findLocation("fScale");
locations[SCALE_DEPTH_LOCATION] = program->getUniforms().findLocation("fScaleDepth");
locations[SCALE_OVER_SCALE_DEPTH_LOCATION] = program->getUniforms().findLocation("fScaleOverScaleDepth");
locations[G_LOCATION] = program->getUniforms().findLocation("g");
locations[G2_LOCATION] = program->getUniforms().findLocation("g2");
}
void Environment::resetToDefault() {
_data.clear();
_data[QUuid()][0];
}
void Environment::renderAtmospheres(gpu::Batch& batch, ViewFrustum& viewFrustum) {
// get the lock for the duration of the call
QMutexLocker locker(&_mutex);
if (_environmentIsOverridden) {
renderAtmosphere(batch, viewFrustum, _overrideData);
} else {
foreach (const ServerData& serverData, _data) {
// TODO: do something about EnvironmentData
foreach (const EnvironmentData& environmentData, serverData) {
renderAtmosphere(batch, viewFrustum, environmentData);
}
}
}
}
EnvironmentData Environment::getClosestData(const glm::vec3& position) {
if (_environmentIsOverridden) {
return _overrideData;
}
// get the lock for the duration of the call
QMutexLocker locker(&_mutex);
EnvironmentData closest;
float closestDistance = FLT_MAX;
foreach (const ServerData& serverData, _data) {
foreach (const EnvironmentData& environmentData, serverData) {
float distance = glm::distance(position, environmentData.getAtmosphereCenter(position)) -
environmentData.getAtmosphereOuterRadius();
if (distance < closestDistance) {
closest = environmentData;
closestDistance = distance;
}
}
}
return closest;
}
// NOTE: Deprecated - I'm leaving this in for now, but it's not actually used. I made it private
// so that if anyone wants to start using this in the future they will consider how to make it
// work with new physics systems.
glm::vec3 Environment::getGravity (const glm::vec3& position) {
//
// 'Default' gravity pulls you downward in Y when you are near the X/Z plane
const glm::vec3 DEFAULT_GRAVITY(0.0f, -1.0f, 0.0f);
glm::vec3 gravity(DEFAULT_GRAVITY);
float DEFAULT_SURFACE_RADIUS = 30.0f;
float gravityStrength;
// Weaken gravity with height
if (position.y > 0.0f) {
gravityStrength = 1.0f / powf((DEFAULT_SURFACE_RADIUS + position.y) / DEFAULT_SURFACE_RADIUS, 2.0f);
gravity *= gravityStrength;
}
// get the lock for the duration of the call
QMutexLocker locker(&_mutex);
foreach (const ServerData& serverData, _data) {
foreach (const EnvironmentData& environmentData, serverData) {
glm::vec3 vector = environmentData.getAtmosphereCenter(position) - position;
float surfaceRadius = environmentData.getAtmosphereInnerRadius();
if (glm::length(vector) <= surfaceRadius) {
// At or inside a planet, gravity is as set for the planet
gravity += glm::normalize(vector) * environmentData.getGravity();
} else {
// Outside a planet, the gravity falls off with distance
gravityStrength = 1.0f / powf(glm::length(vector) / surfaceRadius, 2.0f);
gravity += glm::normalize(vector) * environmentData.getGravity() * gravityStrength;
}
}
}
return gravity;
}
bool Environment::findCapsulePenetration(const glm::vec3& start, const glm::vec3& end,
float radius, glm::vec3& penetration) {
// collide with the "floor"
bool found = findCapsulePlanePenetration(start, end, radius, glm::vec4(0.0f, 1.0f, 0.0f, 0.0f), penetration);
glm::vec3 middle = (start + end) * 0.5f;
// get the lock for the duration of the call
QMutexLocker locker(&_mutex);
foreach (const ServerData& serverData, _data) {
foreach (const EnvironmentData& environmentData, serverData) {
if (environmentData.getGravity() == 0.0f) {
continue; // don't bother colliding with gravity-less environments
}
glm::vec3 environmentPenetration;
if (findCapsuleSpherePenetration(start, end, radius, environmentData.getAtmosphereCenter(middle),
environmentData.getAtmosphereInnerRadius(), environmentPenetration)) {
penetration = addPenetrations(penetration, environmentPenetration);
found = true;
}
}
}
return found;
}
void Environment::renderAtmosphere(gpu::Batch& batch, ViewFrustum& viewFrustum, const EnvironmentData& data) {
glm::vec3 center = data.getAtmosphereCenter();
// transform the model transform to the center of our atmosphere
Transform transform;
transform.setTranslation(center);
batch.setModelTransform(transform);
// Make sure our view and projection transforms are correct for our viewFrustum
Transform viewTransform;
viewFrustum.evalViewTransform(viewTransform);
batch.setViewTransform(viewTransform);
glm::mat4 projMat;
viewFrustum.evalProjectionMatrix(projMat);
batch.setProjectionTransform(projMat);
glm::vec3 relativeCameraPos = viewFrustum.getPosition() - center;
float height = glm::length(relativeCameraPos);
// use the appropriate shader depending on whether we're inside or outside
int* locations;
if (height < data.getAtmosphereOuterRadius()) {
batch.setPipeline(_skyFromAtmosphereProgram);
locations = _skyFromAtmosphereUniformLocations;
} else {
batch.setPipeline(_skyFromSpaceProgram);
locations = _skyFromSpaceUniformLocations;
}
// the constants here are from Sean O'Neil's GPU Gems entry
// (http://http.developer.nvidia.com/GPUGems2/gpugems2_chapter16.html), GameEngine.cpp
batch._glUniform3f(locations[CAMERA_POS_LOCATION], relativeCameraPos.x, relativeCameraPos.y, relativeCameraPos.z);
glm::vec3 lightDirection = glm::normalize(data.getSunLocation());
batch._glUniform3f(locations[LIGHT_POS_LOCATION], lightDirection.x, lightDirection.y, lightDirection.z);
batch._glUniform3f(locations[INV_WAVELENGTH_LOCATION],
1 / powf(data.getScatteringWavelengths().r, 4.0f),
1 / powf(data.getScatteringWavelengths().g, 4.0f),
1 / powf(data.getScatteringWavelengths().b, 4.0f));
batch._glUniform1f(locations[CAMERA_HEIGHT2_LOCATION], height * height);
batch._glUniform1f(locations[OUTER_RADIUS_LOCATION], data.getAtmosphereOuterRadius());
batch._glUniform1f(locations[OUTER_RADIUS2_LOCATION], data.getAtmosphereOuterRadius() * data.getAtmosphereOuterRadius());
batch._glUniform1f(locations[INNER_RADIUS_LOCATION], data.getAtmosphereInnerRadius());
batch._glUniform1f(locations[KR_ESUN_LOCATION], data.getRayleighScattering() * data.getSunBrightness());
batch._glUniform1f(locations[KM_ESUN_LOCATION], data.getMieScattering() * data.getSunBrightness());
batch._glUniform1f(locations[KR_4PI_LOCATION], data.getRayleighScattering() * 4.0f * PI);
batch._glUniform1f(locations[KM_4PI_LOCATION], data.getMieScattering() * 4.0f * PI);
batch._glUniform1f(locations[SCALE_LOCATION], 1.0f / (data.getAtmosphereOuterRadius() - data.getAtmosphereInnerRadius()));
batch._glUniform1f(locations[SCALE_DEPTH_LOCATION], 0.25f);
batch._glUniform1f(locations[SCALE_OVER_SCALE_DEPTH_LOCATION],
(1.0f / (data.getAtmosphereOuterRadius() - data.getAtmosphereInnerRadius())) / 0.25f);
batch._glUniform1f(locations[G_LOCATION], -0.990f);
batch._glUniform1f(locations[G2_LOCATION], -0.990f * -0.990f);
batch._glColor4f(1.0f, 0.0f, 0.0f, 0.5f);
DependencyManager::get<GeometryCache>()->renderSphere(batch); //Draw a unit sphere
}

View file

@ -1,85 +0,0 @@
//
// Environment.h
// interface/src
//
// Created by Andrzej Kapolka on 5/6/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Environment_h
#define hifi_Environment_h
#include <QHash>
#include <QMutex>
#include <HifiSockAddr.h>
#include <gpu/Batch.h>
#include <EnvironmentData.h>
class ViewFrustum;
class Environment {
public:
Environment();
~Environment();
void init();
void resetToDefault();
void renderAtmospheres(gpu::Batch& batch, ViewFrustum& viewFrustum);
void override(const EnvironmentData& overrideData) { _overrideData = overrideData; _environmentIsOverridden = true; }
void endOverride() { _environmentIsOverridden = false; }
EnvironmentData getClosestData(const glm::vec3& position);
private:
glm::vec3 getGravity (const glm::vec3& position); // NOTE: Deprecated
bool findCapsulePenetration(const glm::vec3& start,
const glm::vec3& end, float radius, glm::vec3& penetration); // NOTE: Deprecated
void renderAtmosphere(gpu::Batch& batch, ViewFrustum& viewFrustum, const EnvironmentData& data);
bool _initialized;
enum {
CAMERA_POS_LOCATION,
LIGHT_POS_LOCATION,
INV_WAVELENGTH_LOCATION,
CAMERA_HEIGHT2_LOCATION,
OUTER_RADIUS_LOCATION,
OUTER_RADIUS2_LOCATION,
INNER_RADIUS_LOCATION,
KR_ESUN_LOCATION,
KM_ESUN_LOCATION,
KR_4PI_LOCATION,
KM_4PI_LOCATION,
SCALE_LOCATION,
SCALE_DEPTH_LOCATION,
SCALE_OVER_SCALE_DEPTH_LOCATION,
G_LOCATION,
G2_LOCATION,
LOCATION_COUNT
};
void setupAtmosphereProgram(const char* vertSource, const char* fragSource, gpu::PipelinePointer& pipelineProgram, int* locations);
gpu::PipelinePointer _skyFromAtmosphereProgram;
gpu::PipelinePointer _skyFromSpaceProgram;
int _skyFromAtmosphereUniformLocations[LOCATION_COUNT];
int _skyFromSpaceUniformLocations[LOCATION_COUNT];
typedef QHash<int, EnvironmentData> ServerData;
QHash<QUuid, ServerData> _data;
EnvironmentData _overrideData;
bool _environmentIsOverridden = false;
QMutex _mutex;
};
#endif // hifi_Environment_h

View file

@ -45,6 +45,12 @@ void FramebufferCache::setFrameBufferSize(QSize frameBufferSize) {
_cachedFramebuffers.clear();
_lightingTexture.reset();
_lightingFramebuffer.reset();
_depthPyramidFramebuffer.reset();
_depthPyramidTexture.reset();
_occlusionFramebuffer.reset();
_occlusionTexture.reset();
_occlusionBlurredFramebuffer.reset();
_occlusionBlurredTexture.reset();
}
}
@ -96,6 +102,42 @@ void FramebufferCache::createPrimaryFramebuffer() {
_lightingFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
_lightingFramebuffer->setRenderBuffer(0, _lightingTexture);
_lightingFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
// For AO:
auto pointMipSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_POINT);
_depthPyramidTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::RGB), width, height, pointMipSampler));
_depthPyramidFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
_depthPyramidFramebuffer->setRenderBuffer(0, _depthPyramidTexture);
_depthPyramidFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
resizeAmbientOcclusionBuffers();
}
void FramebufferCache::resizeAmbientOcclusionBuffers() {
_occlusionFramebuffer.reset();
_occlusionTexture.reset();
_occlusionBlurredFramebuffer.reset();
_occlusionBlurredTexture.reset();
auto width = _frameBufferSize.width() >> _AOResolutionLevel;
auto height = _frameBufferSize.height() >> _AOResolutionLevel;
auto colorFormat = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGB);
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR);
auto depthFormat = gpu::Element(gpu::SCALAR, gpu::UINT32, gpu::DEPTH_STENCIL); // Depth24_Stencil8 texel format
_occlusionTexture = gpu::TexturePointer(gpu::Texture::create2D(colorFormat, width, height, defaultSampler));
_occlusionFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
_occlusionFramebuffer->setRenderBuffer(0, _occlusionTexture);
_occlusionFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
_occlusionBlurredTexture = gpu::TexturePointer(gpu::Texture::create2D(colorFormat, width, height, defaultSampler));
_occlusionBlurredFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
_occlusionBlurredFramebuffer->setRenderBuffer(0, _occlusionBlurredTexture);
_occlusionBlurredFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
}
gpu::FramebufferPointer FramebufferCache::getPrimaryFramebuffer() {
@ -189,3 +231,54 @@ gpu::FramebufferPointer FramebufferCache::getSelfieFramebuffer() {
}
return _selfieFramebuffer;
}
gpu::FramebufferPointer FramebufferCache::getDepthPyramidFramebuffer() {
if (!_depthPyramidFramebuffer) {
createPrimaryFramebuffer();
}
return _depthPyramidFramebuffer;
}
gpu::TexturePointer FramebufferCache::getDepthPyramidTexture() {
if (!_depthPyramidTexture) {
createPrimaryFramebuffer();
}
return _depthPyramidTexture;
}
void FramebufferCache::setAmbientOcclusionResolutionLevel(int level) {
const int MAX_AO_RESOLUTION_LEVEL = 4;
level = std::max(0, std::min(level, MAX_AO_RESOLUTION_LEVEL));
if (level != _AOResolutionLevel) {
_AOResolutionLevel = level;
resizeAmbientOcclusionBuffers();
}
}
gpu::FramebufferPointer FramebufferCache::getOcclusionFramebuffer() {
if (!_occlusionFramebuffer) {
resizeAmbientOcclusionBuffers();
}
return _occlusionFramebuffer;
}
gpu::TexturePointer FramebufferCache::getOcclusionTexture() {
if (!_occlusionTexture) {
resizeAmbientOcclusionBuffers();
}
return _occlusionTexture;
}
gpu::FramebufferPointer FramebufferCache::getOcclusionBlurredFramebuffer() {
if (!_occlusionBlurredFramebuffer) {
resizeAmbientOcclusionBuffers();
}
return _occlusionBlurredFramebuffer;
}
gpu::TexturePointer FramebufferCache::getOcclusionBlurredTexture() {
if (!_occlusionBlurredTexture) {
resizeAmbientOcclusionBuffers();
}
return _occlusionBlurredTexture;
}

View file

@ -44,7 +44,16 @@ public:
gpu::TexturePointer getDeferredNormalTexture();
gpu::TexturePointer getDeferredSpecularTexture();
gpu::FramebufferPointer getDepthPyramidFramebuffer();
gpu::TexturePointer getDepthPyramidTexture();
void setAmbientOcclusionResolutionLevel(int level);
gpu::FramebufferPointer getOcclusionFramebuffer();
gpu::TexturePointer getOcclusionTexture();
gpu::FramebufferPointer getOcclusionBlurredFramebuffer();
gpu::TexturePointer getOcclusionBlurredTexture();
gpu::TexturePointer getLightingTexture();
gpu::FramebufferPointer getLightingFramebuffer();
@ -83,7 +92,22 @@ private:
gpu::FramebufferPointer _selfieFramebuffer;
gpu::FramebufferPointer _depthPyramidFramebuffer;
gpu::TexturePointer _depthPyramidTexture;
gpu::FramebufferPointer _occlusionFramebuffer;
gpu::TexturePointer _occlusionTexture;
gpu::FramebufferPointer _occlusionBlurredFramebuffer;
gpu::TexturePointer _occlusionBlurredTexture;
QSize _frameBufferSize{ 100, 100 };
int _AOResolutionLevel = 1; // AO perform at half res
// Resize/reallocate the buffers used for AO
// the size of the AO buffers is scaled by the AOResolutionScale;
void resizeAmbientOcclusionBuffers();
};
#endif // hifi_FramebufferCache_h

View file

@ -64,11 +64,10 @@ void PrepareDeferred::run(const SceneContextPointer& sceneContext, const RenderC
}
void RenderDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
DependencyManager::get<DeferredLightingEffect>()->render(renderContext->getArgs());
DependencyManager::get<DeferredLightingEffect>()->render(renderContext);
}
void ToneMappingDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
PerformanceTimer perfTimer("ToneMappingDeferred");
_toneMappingEffect.render(renderContext->getArgs());
}
@ -108,17 +107,17 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) : Task() {
// Use Stencil and start drawing background in Lighting buffer
addJob<DrawBackgroundDeferred>("DrawBackgroundDeferred");
// AO job
addJob<AmbientOcclusionEffect>("AmbientOcclusion");
_jobs.back().setEnabled(false);
_occlusionJobIndex = (int)_jobs.size() - 1;
// Draw Lights just add the lights to the current list of lights to deal with. NOt really gpu job for now.
addJob<DrawLight>("DrawLight", cullFunctor);
// DeferredBuffer is complete, now let's shade it into the LightingBuffer
addJob<RenderDeferred>("RenderDeferred");
// AO job, to be revisited
addJob<AmbientOcclusion>("AmbientOcclusion");
_occlusionJobIndex = (int)_jobs.size() - 1;
enableJob(_occlusionJobIndex, false);
// AA job to be revisited
addJob<Antialiasing>("Antialiasing");
_antialiasingJobIndex = (int)_jobs.size() - 1;
@ -173,15 +172,36 @@ void RenderDeferredTask::run(const SceneContextPointer& sceneContext, const Rend
setDrawHitEffect(renderContext->getDrawHitEffect());
// TODO: turn on/off AO through menu item
setOcclusionStatus(renderContext->getOcclusionStatus());
if (_occlusionJobIndex >= 0) {
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setResolutionLevel(renderContext->getAmbientOcclusion().resolutionLevel);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setRadius(renderContext->getAmbientOcclusion().radius);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setLevel(renderContext->getAmbientOcclusion().level);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setNumSamples(renderContext->getAmbientOcclusion().numSamples);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setNumSpiralTurns(renderContext->getAmbientOcclusion().numSpiralTurns);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setDithering(renderContext->getAmbientOcclusion().ditheringEnabled);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setFalloffBias(renderContext->getAmbientOcclusion().falloffBias);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setEdgeSharpness(renderContext->getAmbientOcclusion().edgeSharpness);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setBlurRadius(renderContext->getAmbientOcclusion().blurRadius);
_jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().setBlurDeviation(renderContext->getAmbientOcclusion().blurDeviation);
}
setAntialiasingStatus(renderContext->getFxaaStatus());
setToneMappingExposure(renderContext->getTone().exposure);
setToneMappingToneCurve(renderContext->getTone().toneCurve);
// TODO: Allow runtime manipulation of culling ShouldRenderFunctor
renderContext->getArgs()->_context->syncCache();
for (auto job : _jobs) {
job.run(sceneContext, renderContext);
}
if (_occlusionJobIndex >= 0 && renderContext->getOcclusionStatus()) {
renderContext->getAmbientOcclusion().gpuTime = _jobs[_occlusionJobIndex].edit<AmbientOcclusionEffect>().getGPUTime();
} else {
renderContext->getAmbientOcclusion().gpuTime = 0.0;
}
};
void DrawOpaqueDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const ItemIDsBounds& inItems) {

View file

@ -42,11 +42,15 @@ QString RenderScripting::Tone::getCurve() const {
render::RenderContext RenderScriptingInterface::getRenderContext() {
render::RenderContext::ItemsConfig items{ *_opaque, *_transparent, *_overlay3D };
return render::RenderContext{ items, *_tone, _drawStatus, _drawHitEffect, _deferredDebugSize, _deferredDebugMode };
return render::RenderContext{ items, *_tone, *_ambientOcclusion, _drawStatus, _drawHitEffect, _deferredDebugSize, _deferredDebugMode };
}
void RenderScriptingInterface::setItemCounts(const render::RenderContext::ItemsConfig& items) {
_opaque->setCounts(items.opaque);
_transparent->setCounts(items.transparent);
_overlay3D->setCounts(items.overlay3D);
}
void RenderScriptingInterface::setJobGPUTimes(double aoTime) {
_ambientOcclusion->gpuTime = aoTime;
}

View file

@ -65,6 +65,24 @@ namespace RenderScripting {
void setCurve(const QString& curve);
};
using TonePointer = std::unique_ptr<Tone>;
class AmbientOcclusion : public QObject, public render::RenderContext::AmbientOcclusion {
Q_OBJECT
public:
Q_PROPERTY(int resolutionLevel MEMBER resolutionLevel)
Q_PROPERTY(float radius MEMBER radius)
Q_PROPERTY(float level MEMBER level)
Q_PROPERTY(int numSamples MEMBER numSamples)
Q_PROPERTY(float numSpiralTurns MEMBER numSpiralTurns)
Q_PROPERTY(bool ditheringEnabled MEMBER ditheringEnabled)
Q_PROPERTY(float falloffBias MEMBER falloffBias)
Q_PROPERTY(float edgeSharpness MEMBER edgeSharpness)
Q_PROPERTY(int blurRadius MEMBER blurRadius)
Q_PROPERTY(float blurDeviation MEMBER blurDeviation)
Q_PROPERTY(double gpuTime MEMBER gpuTime)
};
using AmbientOcclusionPointer = std::unique_ptr<AmbientOcclusion>;
};
class RenderScriptingInterface : public QObject, public Dependency {
@ -77,7 +95,8 @@ class RenderScriptingInterface : public QObject, public Dependency {
Q_PROPERTY(RenderScripting::ItemCounter* overlay3D READ getOverlay3D)
Q_PROPERTY(RenderScripting::Tone* tone READ getTone)
Q_PROPERTY(RenderScripting::AmbientOcclusion* ambientOcclusion READ getAmbientOcclusion)
Q_PROPERTY(int displayItemStatus MEMBER _drawStatus)
Q_PROPERTY(bool displayHitEffect MEMBER _drawHitEffect)
@ -87,6 +106,9 @@ class RenderScriptingInterface : public QObject, public Dependency {
render::RenderContext getRenderContext();
void setItemCounts(const render::RenderContext::ItemsConfig& items);
// FIXME: It is ugly, we need a cleaner solution
void setJobGPUTimes(double aoTime);
protected:
RenderScriptingInterface();
~RenderScriptingInterface() {};
@ -96,12 +118,15 @@ protected:
RenderScripting::ItemCounter* getOverlay3D() const { return _overlay3D.get(); }
RenderScripting::Tone* getTone() const { return _tone.get(); }
RenderScripting::AmbientOcclusion* getAmbientOcclusion() const { return _ambientOcclusion.get(); }
RenderScripting::ItemStatePointer _opaque = RenderScripting::ItemStatePointer{new RenderScripting::ItemState{}};
RenderScripting::ItemStatePointer _transparent = RenderScripting::ItemStatePointer{new RenderScripting::ItemState{}};
RenderScripting::ItemCounterPointer _overlay3D = RenderScripting::ItemCounterPointer{new RenderScripting::ItemCounter{}};
RenderScripting::TonePointer _tone = RenderScripting::TonePointer{ new RenderScripting::Tone{} };
RenderScripting::AmbientOcclusionPointer _ambientOcclusion = RenderScripting::AmbientOcclusionPointer{ new RenderScripting::AmbientOcclusion{} };
// Options
int _drawStatus = 0;

View file

@ -13,6 +13,7 @@
#include <ViewFrustum.h>
#include "render/Context.h"
#include "DeferredLightingEffect.h"
#include "FramebufferCache.h"
@ -118,7 +119,7 @@ RenderShadowTask::RenderShadowTask(CullFunctor cullFunctor) : Task() {
addJob<RenderShadowMap>("RenderShadowMap", shadowShapes, shapePlumber);
}
void RenderShadowTask::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
void RenderShadowTask::run(const SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
assert(sceneContext);
RenderArgs* args = renderContext->getArgs();

View file

@ -1,113 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
//
// For licensing information, see http://http.developer.nvidia.com/GPUGems/gpugems_app01.html:
//
// NVIDIA Statement on the Software
//
// The source code provided is freely distributable, so long as the NVIDIA header remains unaltered and user modifications are
// detailed.
//
// No Warranty
//
// THE SOFTWARE AND ANY OTHER MATERIALS PROVIDED BY NVIDIA ON THE ENCLOSED CD-ROM ARE PROVIDED "AS IS." NVIDIA DISCLAIMS ALL
// WARRANTIES, EXPRESS, IMPLIED OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
//
// Limitation of Liability
//
// NVIDIA SHALL NOT BE LIABLE TO ANY USER, DEVELOPER, DEVELOPER'S CUSTOMERS, OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH OR
// UNDER DEVELOPER FOR ANY LOSS OF PROFITS, INCOME, SAVINGS, OR ANY OTHER CONSEQUENTIAL, INCIDENTAL, SPECIAL, PUNITIVE, DIRECT
// OR INDIRECT DAMAGES (WHETHER IN AN ACTION IN CONTRACT, TORT OR BASED ON A WARRANTY), EVEN IF NVIDIA HAS BEEN ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGES. THESE LIMITATIONS SHALL APPLY NOTWITHSTANDING ANY FAILURE OF THE ESSENTIAL PURPOSE OF ANY
// LIMITED REMEDY. IN NO EVENT SHALL NVIDIA'S AGGREGATE LIABILITY TO DEVELOPER OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH
// OR UNDER DEVELOPER EXCEED THE AMOUNT OF MONEY ACTUALLY PAID BY DEVELOPER TO NVIDIA FOR THE SOFTWARE OR ANY OTHER MATERIALS.
//
//
// Atmospheric scattering fragment shader
//
// Author: Sean O'Neil
//
// Copyright (c) 2004 Sean O'Neil
//
uniform vec3 v3CameraPos; // The camera's current position
uniform vec3 v3InvWavelength; // 1 / pow(wavelength, 4) for the red, green, and blue channels
uniform float fInnerRadius; // The inner (planetary) radius
uniform float fKrESun; // Kr * ESun
uniform float fKmESun; // Km * ESun
uniform float fKr4PI; // Kr * 4 * PI
uniform float fKm4PI; // Km * 4 * PI
uniform float fScale; // 1 / (fOuterRadius - fInnerRadius)
uniform float fScaleDepth; // The scale depth (i.e. the altitude at which the atmosphere's average density is found)
uniform float fScaleOverScaleDepth; // fScale / fScaleDepth
const int nSamples = 2;
const float fSamples = 2.0;
uniform vec3 v3LightPos;
uniform float g;
uniform float g2;
in vec3 position;
out vec4 outFragColor;
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
void main (void)
{
// Get the ray from the camera to the vertex, and its length (which is the far point of the ray passing through the atmosphere)
vec3 v3Pos = position;
vec3 v3Ray = v3Pos - v3CameraPos;
float fFar = length(v3Ray);
v3Ray /= fFar;
// Calculate the ray's starting position, then calculate its scattering offset
vec3 v3Start = v3CameraPos;
float fHeight = length(v3Start);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fStartAngle = dot(v3Ray, v3Start) / fHeight;
float fStartOffset = fDepth * scale(fStartAngle);
// Initialize the scattering loop variables
//gl_FrontColor = vec4(0.0, 0.0, 0.0, 0.0);
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
vec3 v3SampleRay = v3Ray * fSampleLength;
vec3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
vec3 v3FrontColor = vec3(0.0, 0.0, 0.0);
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fLightAngle = dot(v3LightPos, v3SamplePoint) / fHeight;
float fCameraAngle = dot((v3Ray), v3SamplePoint) / fHeight * 0.99;
float fScatter = (fStartOffset + fDepth * (scale(fLightAngle) - scale(fCameraAngle)));
vec3 v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
// Finally, scale the Mie and Rayleigh colors and set up the in variables for the pixel shader
vec3 secondaryFrontColor = v3FrontColor * fKmESun;
vec3 frontColor = v3FrontColor * (v3InvWavelength * fKrESun);
vec3 v3Direction = v3CameraPos - v3Pos;
float fCos = dot(v3LightPos, v3Direction) / length(v3Direction);
float fMiePhase = 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos*fCos) / pow(1.0 + g2 - 2.0*g*fCos, 1.5);
vec3 finalColor = frontColor.rgb + fMiePhase * secondaryFrontColor.rgb;
outFragColor.a = finalColor.b;
// outFragColor.rgb = pow(finalColor.rgb, vec3(1.0/2.2));
outFragColor.rgb = finalColor.rgb;
}

View file

@ -1,69 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
//
// For licensing information, see http://http.developer.nvidia.com/GPUGems/gpugems_app01.html:
//
// NVIDIA Statement on the Software
//
// The source code provided is freely distributable, so long as the NVIDIA header remains unaltered and user modifications are
// detailed.
//
// No Warranty
//
// THE SOFTWARE AND ANY OTHER MATERIALS PROVIDED BY NVIDIA ON THE ENCLOSED CD-ROM ARE PROVIDED "AS IS." NVIDIA DISCLAIMS ALL
// WARRANTIES, EXPRESS, IMPLIED OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
//
// Limitation of Liability
//
// NVIDIA SHALL NOT BE LIABLE TO ANY USER, DEVELOPER, DEVELOPER'S CUSTOMERS, OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH OR
// UNDER DEVELOPER FOR ANY LOSS OF PROFITS, INCOME, SAVINGS, OR ANY OTHER CONSEQUENTIAL, INCIDENTAL, SPECIAL, PUNITIVE, DIRECT
// OR INDIRECT DAMAGES (WHETHER IN AN ACTION IN CONTRACT, TORT OR BASED ON A WARRANTY), EVEN IF NVIDIA HAS BEEN ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGES. THESE LIMITATIONS SHALL APPLY NOTWITHSTANDING ANY FAILURE OF THE ESSENTIAL PURPOSE OF ANY
// LIMITED REMEDY. IN NO EVENT SHALL NVIDIA'S AGGREGATE LIABILITY TO DEVELOPER OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH
// OR UNDER DEVELOPER EXCEED THE AMOUNT OF MONEY ACTUALLY PAID BY DEVELOPER TO NVIDIA FOR THE SOFTWARE OR ANY OTHER MATERIALS.
//
//
// Atmospheric scattering vertex shader
//
// Author: Sean O'Neil
//
// Copyright (c) 2004 Sean O'Neil
//
<@include gpu/Inputs.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
uniform vec3 v3CameraPos; // The camera's current position
uniform vec3 v3LightPos; // The direction vector to the light source
uniform vec3 v3InvWavelength; // 1 / pow(wavelength, 4) for the red, green, and blue channels
uniform float fOuterRadius; // The outer (atmosphere) radius
uniform float fInnerRadius; // The inner (planetary) radius
uniform float fKrESun; // Kr * ESun
uniform float fKmESun; // Km * ESun
uniform float fKr4PI; // Kr * 4 * PI
uniform float fKm4PI; // Km * 4 * PI
uniform float fScale; // 1 / (fOuterRadius - fInnerRadius)
uniform float fScaleDepth; // The scale depth (i.e. the altitude at which the atmosphere's average density is found)
uniform float fScaleOverScaleDepth; // fScale / fScaleDepth
const int nSamples = 2;
const float fSamples = 2.0;
out vec3 position;
void main(void)
{
// Get the ray from the camera to the vertex, and its length (which is the far point of the ray passing through the atmosphere)
position = inPosition.xyz * fOuterRadius;
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
vec4 v4pos = vec4(position, 1.0);
<$transformModelToClipPos(cam, obj, v4pos, gl_Position)$>
}

View file

@ -1,119 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
//
// For licensing information, see http://http.developer.nvidia.com/GPUGems/gpugems_app01.html:
//
// NVIDIA Statement on the Software
//
// The source code provided is freely distributable, so long as the NVIDIA header remains unaltered and user modifications are
// detailed.
//
// No Warranty
//
// THE SOFTWARE AND ANY OTHER MATERIALS PROVIDED BY NVIDIA ON THE ENCLOSED CD-ROM ARE PROVIDED "AS IS." NVIDIA DISCLAIMS ALL
// WARRANTIES, EXPRESS, IMPLIED OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
//
// Limitation of Liability
//
// NVIDIA SHALL NOT BE LIABLE TO ANY USER, DEVELOPER, DEVELOPER'S CUSTOMERS, OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH OR
// UNDER DEVELOPER FOR ANY LOSS OF PROFITS, INCOME, SAVINGS, OR ANY OTHER CONSEQUENTIAL, INCIDENTAL, SPECIAL, PUNITIVE, DIRECT
// OR INDIRECT DAMAGES (WHETHER IN AN ACTION IN CONTRACT, TORT OR BASED ON A WARRANTY), EVEN IF NVIDIA HAS BEEN ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGES. THESE LIMITATIONS SHALL APPLY NOTWITHSTANDING ANY FAILURE OF THE ESSENTIAL PURPOSE OF ANY
// LIMITED REMEDY. IN NO EVENT SHALL NVIDIA'S AGGREGATE LIABILITY TO DEVELOPER OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH
// OR UNDER DEVELOPER EXCEED THE AMOUNT OF MONEY ACTUALLY PAID BY DEVELOPER TO NVIDIA FOR THE SOFTWARE OR ANY OTHER MATERIALS.
//
//
// Atmospheric scattering fragment shader
//
// Author: Sean O'Neil
//
// Copyright (c) 2004 Sean O'Neil
//
uniform vec3 v3CameraPos; // The camera's current position
uniform vec3 v3LightPos; // The direction vector to the light source
uniform vec3 v3InvWavelength; // 1 / pow(wavelength, 4) for the red, green, and blue channels
uniform float fCameraHeight2; // fCameraHeight^2
uniform float fOuterRadius; // The outer (atmosphere) radius
uniform float fOuterRadius2; // fOuterRadius^2
uniform float fInnerRadius; // The inner (planetary) radius
uniform float fKrESun; // Kr * ESun
uniform float fKmESun; // Km * ESun
uniform float fKr4PI; // Kr * 4 * PI
uniform float fKm4PI; // Km * 4 * PI
uniform float fScale; // 1 / (fOuterRadius - fInnerRadius)
uniform float fScaleDepth; // The scale depth (i.e. the altitude at which the atmosphere's average density is found)
uniform float fScaleOverScaleDepth; // fScale / fScaleDepth
uniform float g;
uniform float g2;
const int nSamples = 2;
const float fSamples = 2.0;
in vec3 position;
out vec4 outFragColor;
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
void main (void)
{
// Get the ray from the camera to the vertex and its length (which is the far point of the ray passing through the atmosphere)
vec3 v3Pos = position;
vec3 v3Ray = v3Pos - v3CameraPos;
float fFar = length(v3Ray);
v3Ray /= fFar;
// Calculate the closest intersection of the ray with the outer atmosphere (which is the near point of the ray passing through the atmosphere)
float B = 2.0 * dot(v3CameraPos, v3Ray);
float C = fCameraHeight2 - fOuterRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
float fNear = 0.5 * (-B - sqrt(fDet));
// Calculate the ray's starting position, then calculate its scattering offset
vec3 v3Start = v3CameraPos + v3Ray * fNear;
fFar -= fNear;
float fStartAngle = dot(v3Ray, v3Start) / fOuterRadius;
float fStartDepth = exp(-1.0 / fScaleDepth);
float fStartOffset = fStartDepth * scale(fStartAngle);
// Initialize the scattering loop variables
//gl_FrontColor = vec4(0.0, 0.0, 0.0, 0.0);
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
vec3 v3SampleRay = v3Ray * fSampleLength;
vec3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
vec3 v3FrontColor = vec3(0.0, 0.0, 0.0);
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
float fLightAngle = dot(v3LightPos, v3SamplePoint) / fHeight;
float fCameraAngle = dot((v3Ray), v3SamplePoint) / fHeight * 0.99;
float fScatter = (fStartOffset + fDepth * (scale(fLightAngle) - scale(fCameraAngle)));
vec3 v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
vec3 v3Direction = v3CameraPos - v3Pos;
float fCos = dot(v3LightPos, v3Direction) / length(v3Direction);
float fMiePhase = 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos*fCos) / pow(1.0 + g2 - 2.0*g*fCos, 1.5);
vec3 color = v3FrontColor * (v3InvWavelength * fKrESun);
vec3 secondaryColor = v3FrontColor * fKmESun;
vec3 finalColor = color + fMiePhase * secondaryColor;
outFragColor.a = finalColor.b;
// outFragColor.rgb = pow(finalColor.rgb, vec3(1.0/2.2));
outFragColor.rgb = finalColor.rgb;
}

View file

@ -1,53 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// For licensing information, see http://http.developer.nvidia.com/GPUGems/gpugems_app01.html:
//
// NVIDIA Statement on the Software
//
// The source code provided is freely distributable, so long as the NVIDIA header remains unaltered and user modifications are
// detailed.
//
// No Warranty
//
// THE SOFTWARE AND ANY OTHER MATERIALS PROVIDED BY NVIDIA ON THE ENCLOSED CD-ROM ARE PROVIDED "AS IS." NVIDIA DISCLAIMS ALL
// WARRANTIES, EXPRESS, IMPLIED OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
//
// Limitation of Liability
//
// NVIDIA SHALL NOT BE LIABLE TO ANY USER, DEVELOPER, DEVELOPER'S CUSTOMERS, OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH OR
// UNDER DEVELOPER FOR ANY LOSS OF PROFITS, INCOME, SAVINGS, OR ANY OTHER CONSEQUENTIAL, INCIDENTAL, SPECIAL, PUNITIVE, DIRECT
// OR INDIRECT DAMAGES (WHETHER IN AN ACTION IN CONTRACT, TORT OR BASED ON A WARRANTY), EVEN IF NVIDIA HAS BEEN ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGES. THESE LIMITATIONS SHALL APPLY NOTWITHSTANDING ANY FAILURE OF THE ESSENTIAL PURPOSE OF ANY
// LIMITED REMEDY. IN NO EVENT SHALL NVIDIA'S AGGREGATE LIABILITY TO DEVELOPER OR ANY OTHER PERSON OR ENTITY CLAIMING THROUGH
// OR UNDER DEVELOPER EXCEED THE AMOUNT OF MONEY ACTUALLY PAID BY DEVELOPER TO NVIDIA FOR THE SOFTWARE OR ANY OTHER MATERIALS.
//
//
// Atmospheric scattering vertex shader
//
// Author: Sean O'Neil
//
// Copyright (c) 2004 Sean O'Neil
//
<@include gpu/Inputs.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
uniform float fOuterRadius; // The outer (atmosphere) radius
out vec3 position;
void main(void) {
position = inPosition.xyz * fOuterRadius;
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
vec4 v4pos = vec4(position, 1.0);
<$transformModelToClipPos(cam, obj, v4pos, gl_Position)$>
}

View file

@ -18,6 +18,8 @@
#include "FramebufferCache.h"
const int ToneMappingEffect_ParamsSlot = 0;
const int ToneMappingEffect_LightingMapSlot = 0;
ToneMappingEffect::ToneMappingEffect() {
Parameters parameters;
@ -91,7 +93,8 @@ void ToneMappingEffect::init() {
auto blitProgram = gpu::ShaderPointer(gpu::Shader::createProgram(blitVS, blitPS));
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("toneMappingParamsBuffer"), 3));
slotBindings.insert(gpu::Shader::Binding(std::string("toneMappingParamsBuffer"), ToneMappingEffect_ParamsSlot));
slotBindings.insert(gpu::Shader::Binding(std::string("colorMap"), ToneMappingEffect_LightingMapSlot));
gpu::Shader::makeProgram(*blitProgram, slotBindings);
auto blitState = std::make_shared<gpu::State>();
blitState->setColorWriteMask(true, true, true, true);
@ -138,8 +141,8 @@ void ToneMappingEffect::render(RenderArgs* args) {
batch.setModelTransform(model);
}
batch.setUniformBuffer(3, _parametersBuffer);
batch.setResourceTexture(0, lightingBuffer);
batch.setUniformBuffer(ToneMappingEffect_ParamsSlot, _parametersBuffer);
batch.setResourceTexture(ToneMappingEffect_LightingMapSlot, lightingBuffer);
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
}

View file

@ -1,279 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// ambient_occlusion.frag
// fragment shader
//
// Created by Niraj Venkat on 7/15/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
// Based on NVidia HBAO implementation in D3D11
// http://www.nvidia.co.uk/object/siggraph-2008-HBAO.html
in vec2 varTexcoord;
uniform sampler2D depthTexture;
uniform sampler2D normalTexture;
uniform float g_scale;
uniform float g_bias;
uniform float g_sample_rad;
uniform float g_intensity;
// the distance to the near clip plane
uniform float near;
// scale factor for depth: (far - near) / far
uniform float depthScale;
// offset for depth texture coordinates
uniform vec2 depthTexCoordOffset;
// scale for depth texture coordinates
uniform vec2 depthTexCoordScale;
// the resolution of the occlusion buffer
// and its inverse
uniform vec2 renderTargetRes;
uniform vec2 renderTargetResInv;
const float PI = 3.14159265;
const float AOStrength = 1.9;
// TODO: R (radius) should be exposed as a uniform parameter
const float R = 0.01;
const float R2 = 0.01*0.01;
const float NegInvR2 = - 1.0 / (0.01*0.01);
// can't use tan to initialize a const value
const float TanBias = 0.57735027; // tan(30.0 * PI / 180.0);
const float MaxRadiusPixels = 50.0;
const int NumDirections = 6;
const int NumSamples = 4;
out vec4 outFragColor;
/**
* Gets the normal in view space from a normal texture.
* uv: the uv texture coordinates to look up in the texture at.
*/
vec3 GetViewNormalFromTexture(vec2 uv) {
// convert [0,1] -> [-1,1], note: since we're normalizing
// we don't need to do v*2 - 1.0, we can just do a v-0.5
return normalize(texture(normalTexture, uv).xyz - 0.5);
}
/**
* Gets the linearized depth in view space.
* d: the depth value [0-1], usually from a depth texture to convert.
*/
float ViewSpaceZFromDepth(float d){
return near / (d * depthScale - 1.0);
}
/**
* Converts a uv coordinate and depth value into a 3D view space coordinate.
* uv: the uv coordinates to convert
* z: the view space depth of the uv coordinate.
*/
vec3 UVToViewSpace(vec2 uv, float z){
return vec3((depthTexCoordOffset + varTexcoord * depthTexCoordScale) * z, z);
}
/**
* Converts a uv coordinate into a 3D view space coordinate.
* The depth of the uv coord is determined from the depth texture.
* uv: the uv coordinates to convert
*/
vec3 GetViewPos(vec2 uv) {
float z = ViewSpaceZFromDepth(texture(depthTexture, uv).r);
return UVToViewSpace(uv, z);
}
float TanToSin(float x) {
return x * inversesqrt(x*x + 1.0);
}
float InvLength(vec2 V) {
return inversesqrt(dot(V, V));
}
float Tangent(vec3 V) {
return V.z * InvLength(V.xy);
}
float BiasedTangent(vec3 V) {
return V.z * InvLength(V.xy) + TanBias;
}
float Tangent(vec3 P, vec3 S) {
return -(P.z - S.z) * InvLength(S.xy - P.xy);
}
float Length2(vec3 V) {
return dot(V, V);
}
vec3 MinDiff(vec3 P, vec3 Pr, vec3 Pl) {
vec3 V1 = Pr - P;
vec3 V2 = P - Pl;
return (Length2(V1) < Length2(V2)) ? V1 : V2;
}
vec2 SnapUVOffset(vec2 uv) {
return round(uv * renderTargetRes) * renderTargetResInv;
}
float Falloff(float d2) {
return d2 * NegInvR2 + 1.0f;
}
float HorizonOcclusion(vec2 deltaUV, vec3 P, vec3 dPdu, vec3 dPdv, float randstep, float numSamples) {
float ao = 0;
// Offset the first coord with some noise
vec2 uv = varTexcoord + SnapUVOffset(randstep*deltaUV);
deltaUV = SnapUVOffset(deltaUV);
// Calculate the tangent vector
vec3 T = deltaUV.x * dPdu + deltaUV.y * dPdv;
// Get the angle of the tangent vector from the viewspace axis
float tanH = BiasedTangent(T);
float sinH = TanToSin(tanH);
float tanS;
float d2;
vec3 S;
// Sample to find the maximum angle
for (float s = 1; s <= numSamples; ++s) {
uv += deltaUV;
S = GetViewPos(uv);
tanS = Tangent(P, S);
d2 = Length2(S - P);
// Is the sample within the radius and the angle greater?
if (d2 < R2 && tanS > tanH) {
float sinS = TanToSin(tanS);
// Apply falloff based on the distance
ao += Falloff(d2) * (sinS - sinH);
tanH = tanS;
sinH = sinS;
}
}
return ao;
}
vec2 RotateDirections(vec2 Dir, vec2 CosSin) {
return vec2(Dir.x*CosSin.x - Dir.y*CosSin.y,
Dir.x*CosSin.y + Dir.y*CosSin.x);
}
void ComputeSteps(inout vec2 stepSizeUv, inout float numSteps, float rayRadiusPix, float rand) {
// Avoid oversampling if numSteps is greater than the kernel radius in pixels
numSteps = min(NumSamples, rayRadiusPix);
// Divide by Ns+1 so that the farthest samples are not fully attenuated
float stepSizePix = rayRadiusPix / (numSteps + 1);
// Clamp numSteps if it is greater than the max kernel footprint
float maxNumSteps = MaxRadiusPixels / stepSizePix;
if (maxNumSteps < numSteps) {
// Use dithering to avoid AO discontinuities
numSteps = floor(maxNumSteps + rand);
numSteps = max(numSteps, 1);
stepSizePix = MaxRadiusPixels / numSteps;
}
// Step size in uv space
stepSizeUv = stepSizePix * renderTargetResInv;
}
float getRandom(vec2 uv) {
return fract(sin(dot(uv.xy ,vec2(12.9898,78.233))) * 43758.5453);
}
void main(void) {
mat4 projMatrix = getTransformCamera()._projection;
float numDirections = NumDirections;
vec3 P, Pr, Pl, Pt, Pb;
P = GetViewPos(varTexcoord);
// Sample neighboring pixels
Pr = GetViewPos(varTexcoord + vec2( renderTargetResInv.x, 0));
Pl = GetViewPos(varTexcoord + vec2(-renderTargetResInv.x, 0));
Pt = GetViewPos(varTexcoord + vec2( 0, renderTargetResInv.y));
Pb = GetViewPos(varTexcoord + vec2( 0,-renderTargetResInv.y));
// Calculate tangent basis vectors using the minimum difference
vec3 dPdu = MinDiff(P, Pr, Pl);
vec3 dPdv = MinDiff(P, Pt, Pb) * (renderTargetRes.y * renderTargetResInv.x);
// Get the random samples from the noise function
vec3 random = vec3(getRandom(varTexcoord.xy), getRandom(varTexcoord.yx), getRandom(varTexcoord.xx));
// Calculate the projected size of the hemisphere
float w = P.z * projMatrix[2][3] + projMatrix[3][3];
vec2 rayRadiusUV = (0.5 * R * vec2(projMatrix[0][0], projMatrix[1][1]) / w); // [-1,1] -> [0,1] uv
float rayRadiusPix = rayRadiusUV.x * renderTargetRes.x;
float ao = 1.0;
// Make sure the radius of the evaluated hemisphere is more than a pixel
if(rayRadiusPix > 1.0) {
ao = 0.0;
float numSteps;
vec2 stepSizeUV;
// Compute the number of steps
ComputeSteps(stepSizeUV, numSteps, rayRadiusPix, random.z);
float alpha = 2.0 * PI / numDirections;
// Calculate the horizon occlusion of each direction
for(float d = 0; d < numDirections; ++d) {
float theta = alpha * d;
// Apply noise to the direction
vec2 dir = RotateDirections(vec2(cos(theta), sin(theta)), random.xy);
vec2 deltaUV = dir * stepSizeUV;
// Sample the pixels along the direction
ao += HorizonOcclusion( deltaUV,
P,
dPdu,
dPdv,
random.z,
numSteps);
}
// Average the results and produce the final AO
ao = 1.0 - ao / numDirections * AOStrength;
}
outFragColor = vec4(vec3(ao), 1.0);
}

View file

@ -1,26 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// ambient_occlusion.vert
// vertex shader
//
// Created by Niraj Venkat on 7/15/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
out vec2 varTexcoord;
void main(void) {
varTexcoord = inTexCoord0.xy;
gl_Position = inPosition;
}

View file

@ -14,6 +14,10 @@
<@include DeferredBuffer.slh@>
uniform sampler2D pyramidMap;
uniform sampler2D occlusionMap;
uniform sampler2D occlusionBlurredMap;
in vec2 uv;
out vec4 outFragColor;

View file

@ -25,12 +25,13 @@ void main(void) {
DeferredTransform deferredTransform = getDeferredTransform();
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
float shadowAttenuation = 1.0;
float shadowAttenuation = 1.0;
if ((frag.normalVal.a >= 0.45) && (frag.normalVal.a <= 0.55)) {
if (frag.mode == LIGHT_MAPPED) {
vec3 color = evalLightmappedColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.normal,
frag.diffuse,
frag.specularVal.xyz);
@ -39,6 +40,7 @@ void main(void) {
vec3 color = evalAmbientSphereGlobalColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.position.xyz,
frag.normal,
frag.diffuse,

View file

@ -27,12 +27,13 @@ void main(void) {
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
vec4 worldPos = deferredTransform.viewInverse * vec4(frag.position.xyz, 1.0);
float shadowAttenuation = evalShadowAttenuation(worldPos);
float shadowAttenuation = evalShadowAttenuation(worldPos);
if ((frag.normalVal.a >= 0.45) && (frag.normalVal.a <= 0.55)) {
if (frag.mode == LIGHT_MAPPED) {
vec3 color = evalLightmappedColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.normal,
frag.diffuse,
frag.specularVal.xyz);
@ -41,6 +42,7 @@ void main(void) {
vec3 color = evalAmbientSphereGlobalColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.position.xyz,
frag.normal,
frag.diffuse,

View file

@ -25,13 +25,14 @@ void main(void) {
DeferredTransform deferredTransform = getDeferredTransform();
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
float shadowAttenuation = 1.0;
float shadowAttenuation = 1.0;
// Light mapped or not ?
if ((frag.normalVal.a >= 0.45) && (frag.normalVal.a <= 0.55)) {
if (frag.mode == LIGHT_MAPPED) {
vec3 color = evalLightmappedColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.normal,
frag.diffuse,
frag.specularVal.xyz);
@ -39,7 +40,8 @@ void main(void) {
} else {
vec3 color = evalAmbientGlobalColor(
deferredTransform.viewInverse,
shadowAttenuation,
shadowAttenuation,
frag.obscurance,
frag.position.xyz,
frag.normal,
frag.diffuse,

View file

@ -27,13 +27,14 @@ void main(void) {
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
vec4 worldPos = deferredTransform.viewInverse * vec4(frag.position.xyz, 1.0);
float shadowAttenuation = evalShadowAttenuation(worldPos);
float shadowAttenuation = evalShadowAttenuation(worldPos);
// Light mapped or not ?
if ((frag.normalVal.a >= 0.45) && (frag.normalVal.a <= 0.55)) {
if (frag.mode == LIGHT_MAPPED) {
vec3 color = evalLightmappedColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.normal,
frag.diffuse,
frag.specularVal.xyz);
@ -41,7 +42,8 @@ void main(void) {
} else {
vec3 color = evalAmbientGlobalColor(
deferredTransform.viewInverse,
shadowAttenuation,
shadowAttenuation,
frag.obscurance,
frag.position.xyz,
frag.normal,
frag.diffuse,

View file

@ -25,13 +25,14 @@ void main(void) {
DeferredTransform deferredTransform = getDeferredTransform();
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
float shadowAttenuation = 1.0;
float shadowAttenuation = 1.0;
// Light mapped or not ?
if ((frag.normalVal.a >= 0.45) && (frag.normalVal.a <= 0.55)) {
if (frag.mode == LIGHT_MAPPED) {
vec3 color = evalLightmappedColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.normal,
frag.diffuse,
frag.specularVal.xyz);
@ -40,6 +41,7 @@ void main(void) {
vec3 color = evalSkyboxGlobalColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.position.xyz,
frag.normal,
frag.diffuse,

View file

@ -27,13 +27,14 @@ void main(void) {
DeferredFragment frag = unpackDeferredFragment(deferredTransform, _texCoord0);
vec4 worldPos = deferredTransform.viewInverse * vec4(frag.position.xyz, 1.0);
float shadowAttenuation = evalShadowAttenuation(worldPos);
float shadowAttenuation = evalShadowAttenuation(worldPos);
// Light mapped or not ?
if ((frag.normalVal.a >= 0.45) && (frag.normalVal.a <= 0.55)) {
if (frag.mode == LIGHT_MAPPED) {
vec3 color = evalLightmappedColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.normal,
frag.diffuse,
frag.specularVal.xyz);
@ -42,6 +43,7 @@ void main(void) {
vec3 color = evalSkyboxGlobalColor(
deferredTransform.viewInverse,
shadowAttenuation,
frag.obscurance,
frag.position.xyz,
frag.normal,
frag.diffuse,

View file

@ -1,43 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// gaussian_blur.frag
// fragment shader
//
// Created by Niraj Venkat on 7/17/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
// the interpolated normal
//in vec4 interpolatedNormal;
in vec2 varTexcoord;
in vec2 varBlurTexcoords[14];
uniform sampler2D occlusionTexture;
out vec4 outFragColor;
void main(void) {
outFragColor = vec4(0.0);
outFragColor += texture(occlusionTexture, varBlurTexcoords[0])*0.0044299121055113265;
outFragColor += texture(occlusionTexture, varBlurTexcoords[1])*0.00895781211794;
outFragColor += texture(occlusionTexture, varBlurTexcoords[2])*0.0215963866053;
outFragColor += texture(occlusionTexture, varBlurTexcoords[3])*0.0443683338718;
outFragColor += texture(occlusionTexture, varBlurTexcoords[4])*0.0776744219933;
outFragColor += texture(occlusionTexture, varBlurTexcoords[5])*0.115876621105;
outFragColor += texture(occlusionTexture, varBlurTexcoords[6])*0.147308056121;
outFragColor += texture(occlusionTexture, varTexcoord)*0.159576912161;
outFragColor += texture(occlusionTexture, varBlurTexcoords[7])*0.147308056121;
outFragColor += texture(occlusionTexture, varBlurTexcoords[8])*0.115876621105;
outFragColor += texture(occlusionTexture, varBlurTexcoords[9])*0.0776744219933;
outFragColor += texture(occlusionTexture, varBlurTexcoords[10])*0.0443683338718;
outFragColor += texture(occlusionTexture, varBlurTexcoords[11])*0.0215963866053;
outFragColor += texture(occlusionTexture, varBlurTexcoords[12])*0.00895781211794;
outFragColor += texture(occlusionTexture, varBlurTexcoords[13])*0.0044299121055113265;
}

View file

@ -1,43 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// guassian_blur_horizontal.vert
// vertex shader
//
// Created by Niraj Venkat on 7/17/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
out vec2 varTexcoord;
out vec2 varBlurTexcoords[14];
void main(void) {
varTexcoord = inTexCoord0.xy;
gl_Position = inPosition;
varBlurTexcoords[0] = varTexcoord + vec2(-0.028, 0.0);
varBlurTexcoords[1] = varTexcoord + vec2(-0.024, 0.0);
varBlurTexcoords[2] = varTexcoord + vec2(-0.020, 0.0);
varBlurTexcoords[3] = varTexcoord + vec2(-0.016, 0.0);
varBlurTexcoords[4] = varTexcoord + vec2(-0.012, 0.0);
varBlurTexcoords[5] = varTexcoord + vec2(-0.008, 0.0);
varBlurTexcoords[6] = varTexcoord + vec2(-0.004, 0.0);
varBlurTexcoords[7] = varTexcoord + vec2(0.004, 0.0);
varBlurTexcoords[8] = varTexcoord + vec2(0.008, 0.0);
varBlurTexcoords[9] = varTexcoord + vec2(0.012, 0.0);
varBlurTexcoords[10] = varTexcoord + vec2(0.016, 0.0);
varBlurTexcoords[11] = varTexcoord + vec2(0.020, 0.0);
varBlurTexcoords[12] = varTexcoord + vec2(0.024, 0.0);
varBlurTexcoords[13] = varTexcoord + vec2(0.028, 0.0);
}

View file

@ -1,43 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// guassian_blur_vertical.vert
// vertex shader
//
// Created by Niraj Venkat on 7/17/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
out vec2 varTexcoord;
out vec2 varBlurTexcoords[14];
void main(void) {
varTexcoord = inTexCoord0.xy;
gl_Position = inPosition;
varBlurTexcoords[0] = varTexcoord + vec2(0.0, -0.028);
varBlurTexcoords[1] = varTexcoord + vec2(0.0, -0.024);
varBlurTexcoords[2] = varTexcoord + vec2(0.0, -0.020);
varBlurTexcoords[3] = varTexcoord + vec2(0.0, -0.016);
varBlurTexcoords[4] = varTexcoord + vec2(0.0, -0.012);
varBlurTexcoords[5] = varTexcoord + vec2(0.0, -0.008);
varBlurTexcoords[6] = varTexcoord + vec2(0.0, -0.004);
varBlurTexcoords[7] = varTexcoord + vec2(0.0, 0.004);
varBlurTexcoords[8] = varTexcoord + vec2(0.0, 0.008);
varBlurTexcoords[9] = varTexcoord + vec2(0.0, 0.012);
varBlurTexcoords[10] = varTexcoord + vec2(0.0, 0.016);
varBlurTexcoords[11] = varTexcoord + vec2(0.0, 0.020);
varBlurTexcoords[12] = varTexcoord + vec2(0.0, 0.024);
varBlurTexcoords[13] = varTexcoord + vec2(0.0, 0.028);
}

View file

@ -1,27 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// occlusion_blend.frag
// fragment shader
//
// Created by Niraj Venkat on 7/20/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
in vec2 varTexcoord;
out vec4 outFragColor;
uniform sampler2D blurredOcclusionTexture;
void main(void) {
vec4 occlusionColor = texture(blurredOcclusionTexture, varTexcoord);
outFragColor = vec4(vec3(0.0), occlusionColor.r);
}

View file

@ -67,7 +67,7 @@ void main(void) {
// Final Lighting color
vec3 fragColor = (shading.w * frag.diffuse + shading.xyz);
_fragColor = vec4(fragColor * radialAttenuation * getLightColor(light) * getLightIntensity(light), 0.0);
_fragColor = vec4(fragColor * radialAttenuation * getLightColor(light) * getLightIntensity(light) * frag.obscurance, 0.0);
if (getLightShowContour(light) > 0.0) {
// Show edge

Some files were not shown because too many files have changed in this diff Show more