mirror of
https://github.com/JulianGro/overte.git
synced 2025-04-10 20:47:02 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into one
This commit is contained in:
commit
e8bd424697
32 changed files with 218 additions and 707 deletions
14
cmake/externals/etc2comp/CMakeLists.txt
vendored
14
cmake/externals/etc2comp/CMakeLists.txt
vendored
|
@ -40,16 +40,22 @@ if (WIN32)
|
|||
set(_LIB_FOLDER "$<$<CONFIG:RelWithDebInfo>:build/EtcLib/RelWithDebInfo>")
|
||||
set(_LIB_FOLDER "${_LIB_FOLDER}$<$<CONFIG:MinSizeRel>:build/EtcLib/MinSizeRel>")
|
||||
set(_LIB_FOLDER "${_LIB_FOLDER}$<$<OR:$<CONFIG:Release>,$<CONFIG:Debug>>:build/EtcLib/Release>")
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/${_LIB_FOLDER}/EtcLib.lib CACHE FILEPATH "Path to Etc2Comp release library")
|
||||
|
||||
elseif (APPLE)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/build/EtcLib/Debug/libEtcLib.a CACHE FILEPATH "Path to EtcLib debug library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/build/EtcLib/Release/libEtcLib.a CACHE FILEPATH "Path to EtcLib release library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/build/EtcLib/Debug/libEtcLib.a CACHE FILEPATH "Path to EtcLib debug library")
|
||||
|
||||
set(_LIB_FOLDER "$<$<CONFIG:RelWithDebInfo>:build/EtcLib/RelWithDebInfo>")
|
||||
set(_LIB_FOLDER "${_LIB_FOLDER}$<$<CONFIG:MinSizeRel>:build/EtcLib/MinSizeRel>")
|
||||
set(_LIB_FOLDER "${_LIB_FOLDER}$<$<OR:$<CONFIG:Release>,$<CONFIG:Debug>>:build/EtcLib/Release>")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/${_LIB_FOLDER}/libEtcLib.a CACHE FILEPATH "Path to Etc2Comp release library")
|
||||
|
||||
else ()
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "" CACHE FILEPATH "Path to EtcLib debug library")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/build/EtcLib/libEtcLib.a CACHE FILEPATH "Path to EtcLib release library")
|
||||
|
||||
endif ()
|
||||
|
||||
set(ETC_INCLUDE_DIR ${SOURCE_DIR}/EtcLib/Etc CACHE FILEPATH "Path to Etc2Comp/Etc include directory")
|
||||
set(ETCCODEC_INCLUDE_DIR ${SOURCE_DIR}/EtcLib/EtcCodec CACHE FILEPATH "Path to Etc2Comp/EtcCodec include directory")
|
||||
# ETC2COMP_INCLUDE_DIRS will be set later by FindEtc2Comp
|
||||
# ETC2COMP_INCLUDE_DIRS will be set later by FindEtc2Comp
|
||||
|
|
|
@ -62,8 +62,6 @@ Original.Button {
|
|||
hifi.buttons.pressedColor[control.color]
|
||||
} else if (control.hovered) {
|
||||
hifi.buttons.hoveredColor[control.color]
|
||||
} else if (!control.hovered && control.focus) {
|
||||
hifi.buttons.focusedColor[control.color]
|
||||
} else {
|
||||
hifi.buttons.colorStart[control.color]
|
||||
}
|
||||
|
@ -78,8 +76,6 @@ Original.Button {
|
|||
hifi.buttons.pressedColor[control.color]
|
||||
} else if (control.hovered) {
|
||||
hifi.buttons.hoveredColor[control.color]
|
||||
} else if (!control.hovered && control.focus) {
|
||||
hifi.buttons.focusedColor[control.color]
|
||||
} else {
|
||||
hifi.buttons.colorFinish[control.color]
|
||||
}
|
||||
|
|
|
@ -11,7 +11,6 @@ Item {
|
|||
height: parent !== null ? parent.height : undefined
|
||||
property var parentStackItem: null
|
||||
property int headerHeight: 70
|
||||
property string url
|
||||
property string scriptURL
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
|
@ -23,6 +22,7 @@ Item {
|
|||
property bool punctuationMode: false
|
||||
property bool passwordField: false
|
||||
property bool isDesktop: false
|
||||
property alias url: web.url
|
||||
property alias webView: web.webViewCore
|
||||
property alias profile: web.webViewCoreProfile
|
||||
property bool remove: false
|
||||
|
@ -81,7 +81,7 @@ Item {
|
|||
color: hifi.colors.baseGray
|
||||
font.pixelSize: 12
|
||||
verticalAlignment: Text.AlignLeft
|
||||
text: root.url
|
||||
text: web.url
|
||||
anchors {
|
||||
top: nav.bottom
|
||||
horizontalCenter: parent.horizontalCenter;
|
||||
|
@ -131,11 +131,11 @@ Item {
|
|||
|
||||
function loadUrl(url) {
|
||||
web.webViewCore.url = url
|
||||
root.url = web.webViewCore.url;
|
||||
}
|
||||
|
||||
onUrlChanged: {
|
||||
loadUrl(url);
|
||||
Rectangle {
|
||||
anchors.fill: web;
|
||||
color: hifi.colors.white;
|
||||
}
|
||||
|
||||
FlickableWebViewCore {
|
||||
|
|
|
@ -253,8 +253,12 @@ Item {
|
|||
anchors.left: parent.left;
|
||||
anchors.right: parent.right;
|
||||
|
||||
Item { // On empty history. We don't want to flash and then replace, so don't show until we know we're zero.
|
||||
visible: transactionHistoryModel.count === 0 && transactionHistoryModel.currentPageToRetrieve < 0;
|
||||
Item {
|
||||
// On empty history. We don't want to flash and then replace, so don't show until we know we should.
|
||||
// The history is empty when it contains 1 item (the pending item count) AND there are no pending items.
|
||||
visible: transactionHistoryModel.count === 1 &&
|
||||
transactionHistoryModel.retrievedAtLeastOnePage &&
|
||||
transactionHistoryModel.get(0).count === 0;
|
||||
anchors.centerIn: parent;
|
||||
width: parent.width - 12;
|
||||
height: parent.height;
|
||||
|
|
|
@ -282,7 +282,9 @@ public:
|
|||
|
||||
private:
|
||||
void initialize() {
|
||||
setObjectName("Render");
|
||||
PROFILE_SET_THREAD_NAME("Render");
|
||||
setCrashAnnotation("render_thread_id", std::to_string((size_t)QThread::currentThreadId()));
|
||||
if (!_renderContext->makeCurrent()) {
|
||||
qFatal("Unable to make rendering context current on render thread");
|
||||
}
|
||||
|
@ -1096,6 +1098,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_logger->setSessionID(accountManager->getSessionID());
|
||||
|
||||
setCrashAnnotation("metaverse_session_id", accountManager->getSessionID().toString().toStdString());
|
||||
setCrashAnnotation("main_thread_id", std::to_string((size_t)QThread::currentThreadId()));
|
||||
|
||||
if (steamClient) {
|
||||
qCDebug(interfaceapp) << "[VERSION] SteamVR buildID:" << steamClient->getSteamVRBuildID();
|
||||
|
@ -1160,24 +1163,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
});
|
||||
audioIO->startThread();
|
||||
|
||||
auto audioScriptingInterface = DependencyManager::set<AudioScriptingInterface, scripting::Audio>();
|
||||
connect(audioIO.data(), &AudioClient::mutedByMixer, audioScriptingInterface.data(), &AudioScriptingInterface::mutedByMixer);
|
||||
connect(audioIO.data(), &AudioClient::receivedFirstPacket, audioScriptingInterface.data(), &AudioScriptingInterface::receivedFirstPacket);
|
||||
connect(audioIO.data(), &AudioClient::disconnected, audioScriptingInterface.data(), &AudioScriptingInterface::disconnected);
|
||||
connect(audioIO.data(), &AudioClient::muteEnvironmentRequested, [](glm::vec3 position, float radius) {
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
auto audioScriptingInterface = DependencyManager::get<AudioScriptingInterface>();
|
||||
auto myAvatarPosition = DependencyManager::get<AvatarManager>()->getMyAvatar()->getWorldPosition();
|
||||
float distance = glm::distance(myAvatarPosition, position);
|
||||
|
||||
if (distance < radius) {
|
||||
audioClient->setMuted(true);
|
||||
audioScriptingInterface->environmentMuted();
|
||||
}
|
||||
});
|
||||
connect(this, &Application::activeDisplayPluginChanged,
|
||||
reinterpret_cast<scripting::Audio*>(audioScriptingInterface.data()), &scripting::Audio::onContextChanged);
|
||||
|
||||
// Make sure we don't time out during slow operations at startup
|
||||
updateHeartbeat();
|
||||
|
||||
|
@ -1241,9 +1226,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
connect(accountManager.data(), &AccountManager::authRequired, dialogsManager.data(), &DialogsManager::showLoginDialog);
|
||||
#endif
|
||||
connect(accountManager.data(), &AccountManager::usernameChanged, this, &Application::updateWindowTitle);
|
||||
connect(accountManager.data(), &AccountManager::usernameChanged, [](QString username){
|
||||
setCrashAnnotation("username", username.toStdString());
|
||||
});
|
||||
|
||||
// set the account manager's root URL and trigger a login request if we don't have the access token
|
||||
accountManager->setIsAgent(true);
|
||||
|
@ -1366,6 +1348,28 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
initializeDisplayPlugins();
|
||||
qCDebug(interfaceapp, "Initialized Display");
|
||||
|
||||
// An audio device changed signal received before the display plugins are set up will cause a crash,
|
||||
// so we defer the setup of the `scripting::Audio` class until this point
|
||||
{
|
||||
auto audioScriptingInterface = DependencyManager::set<AudioScriptingInterface, scripting::Audio>();
|
||||
connect(audioIO.data(), &AudioClient::mutedByMixer, audioScriptingInterface.data(), &AudioScriptingInterface::mutedByMixer);
|
||||
connect(audioIO.data(), &AudioClient::receivedFirstPacket, audioScriptingInterface.data(), &AudioScriptingInterface::receivedFirstPacket);
|
||||
connect(audioIO.data(), &AudioClient::disconnected, audioScriptingInterface.data(), &AudioScriptingInterface::disconnected);
|
||||
connect(audioIO.data(), &AudioClient::muteEnvironmentRequested, [](glm::vec3 position, float radius) {
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
auto audioScriptingInterface = DependencyManager::get<AudioScriptingInterface>();
|
||||
auto myAvatarPosition = DependencyManager::get<AvatarManager>()->getMyAvatar()->getWorldPosition();
|
||||
float distance = glm::distance(myAvatarPosition, position);
|
||||
|
||||
if (distance < radius) {
|
||||
audioClient->setMuted(true);
|
||||
audioScriptingInterface->environmentMuted();
|
||||
}
|
||||
});
|
||||
connect(this, &Application::activeDisplayPluginChanged,
|
||||
reinterpret_cast<scripting::Audio*>(audioScriptingInterface.data()), &scripting::Audio::onContextChanged);
|
||||
}
|
||||
|
||||
// Create the rendering engine. This can be slow on some machines due to lots of
|
||||
// GPU pipeline creation.
|
||||
initializeRenderEngine();
|
||||
|
@ -1751,6 +1755,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
// set the local loopback interface for local sounds
|
||||
AudioInjector::setLocalAudioInterface(audioIO.data());
|
||||
auto audioScriptingInterface = DependencyManager::get<AudioScriptingInterface>();
|
||||
audioScriptingInterface->setLocalAudioInterface(audioIO.data());
|
||||
connect(audioIO.data(), &AudioClient::noiseGateOpened, audioScriptingInterface.data(), &AudioScriptingInterface::noiseGateOpened);
|
||||
connect(audioIO.data(), &AudioClient::noiseGateClosed, audioScriptingInterface.data(), &AudioScriptingInterface::noiseGateClosed);
|
||||
|
@ -6165,6 +6170,8 @@ void Application::updateWindowTitle() const {
|
|||
QString connectionStatus = nodeList->getDomainHandler().isConnected() ? "" : " (NOT CONNECTED)";
|
||||
QString username = accountManager->getAccountInfo().getUsername();
|
||||
|
||||
setCrashAnnotation("username", username.toStdString());
|
||||
|
||||
QString currentPlaceName;
|
||||
if (isServerlessMode()) {
|
||||
currentPlaceName = "serverless: " + DependencyManager::get<AddressManager>()->getDomainURL().toString();
|
||||
|
|
|
@ -21,6 +21,17 @@ AvatarMotionState::AvatarMotionState(AvatarSharedPointer avatar, const btCollisi
|
|||
_type = MOTIONSTATE_TYPE_AVATAR;
|
||||
}
|
||||
|
||||
void AvatarMotionState::handleEasyChanges(uint32_t& flags) {
|
||||
ObjectMotionState::handleEasyChanges(flags);
|
||||
if (flags & Simulation::DIRTY_PHYSICS_ACTIVATION && !_body->isActive()) {
|
||||
_body->activate();
|
||||
}
|
||||
}
|
||||
|
||||
bool AvatarMotionState::handleHardAndEasyChanges(uint32_t& flags, PhysicsEngine* engine) {
|
||||
return ObjectMotionState::handleHardAndEasyChanges(flags, engine);
|
||||
}
|
||||
|
||||
AvatarMotionState::~AvatarMotionState() {
|
||||
assert(_avatar);
|
||||
_avatar = nullptr;
|
||||
|
@ -46,6 +57,9 @@ PhysicsMotionType AvatarMotionState::computePhysicsMotionType() const {
|
|||
const btCollisionShape* AvatarMotionState::computeNewShape() {
|
||||
ShapeInfo shapeInfo;
|
||||
std::static_pointer_cast<Avatar>(_avatar)->computeShapeInfo(shapeInfo);
|
||||
glm::vec3 halfExtents = shapeInfo.getHalfExtents();
|
||||
halfExtents.y = 0.0f;
|
||||
_diameter = 2.0f * glm::length(halfExtents);
|
||||
return getShapeManager()->getShape(shapeInfo);
|
||||
}
|
||||
|
||||
|
@ -60,25 +74,31 @@ void AvatarMotionState::getWorldTransform(btTransform& worldTrans) const {
|
|||
worldTrans.setRotation(glmToBullet(getObjectRotation()));
|
||||
if (_body) {
|
||||
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||
_body->setAngularVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
|
||||
}
|
||||
}
|
||||
|
||||
// virtual
|
||||
void AvatarMotionState::setWorldTransform(const btTransform& worldTrans) {
|
||||
// HACK: The PhysicsEngine does not actually move OTHER avatars -- instead it slaves their local RigidBody to the transform
|
||||
// as specified by a remote simulation. However, to give the remote simulation time to respond to our own objects we tie
|
||||
// the other avatar's body to its true position with a simple spring. This is a HACK that will have to be improved later.
|
||||
const float SPRING_TIMESCALE = 0.5f;
|
||||
float tau = PHYSICS_ENGINE_FIXED_SUBSTEP / SPRING_TIMESCALE;
|
||||
btVector3 currentPosition = worldTrans.getOrigin();
|
||||
btVector3 targetPosition = glmToBullet(getObjectPosition());
|
||||
btTransform newTransform;
|
||||
newTransform.setOrigin((1.0f - tau) * currentPosition + tau * targetPosition);
|
||||
newTransform.setRotation(glmToBullet(getObjectRotation()));
|
||||
_body->setWorldTransform(newTransform);
|
||||
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||
_body->setAngularVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||
btVector3 offsetToTarget = glmToBullet(getObjectPosition()) - currentPosition;
|
||||
float distance = offsetToTarget.length();
|
||||
if ((1.0f - tau) * distance > _diameter) {
|
||||
// the avatar body is far from its target --> slam position
|
||||
btTransform newTransform;
|
||||
newTransform.setOrigin(currentPosition + offsetToTarget);
|
||||
newTransform.setRotation(glmToBullet(getObjectRotation()));
|
||||
_body->setWorldTransform(newTransform);
|
||||
_body->setLinearVelocity(glmToBullet(getObjectLinearVelocity()));
|
||||
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
|
||||
} else {
|
||||
// the avatar body is near its target --> slam velocity
|
||||
btVector3 velocity = glmToBullet(getObjectLinearVelocity()) + (1.0f / SPRING_TIMESCALE) * offsetToTarget;
|
||||
_body->setLinearVelocity(velocity);
|
||||
_body->setAngularVelocity(glmToBullet(getObjectAngularVelocity()));
|
||||
}
|
||||
}
|
||||
|
||||
// These pure virtual methods must be implemented for each MotionState type
|
||||
|
@ -145,3 +165,8 @@ void AvatarMotionState::computeCollisionGroupAndMask(int32_t& group, int32_t& ma
|
|||
mask = Physics::getDefaultCollisionMask(group);
|
||||
}
|
||||
|
||||
// virtual
|
||||
float AvatarMotionState::getMass() const {
|
||||
return std::static_pointer_cast<Avatar>(_avatar)->computeMass();
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,9 @@ class AvatarMotionState : public ObjectMotionState {
|
|||
public:
|
||||
AvatarMotionState(AvatarSharedPointer avatar, const btCollisionShape* shape);
|
||||
|
||||
virtual void handleEasyChanges(uint32_t& flags) override;
|
||||
virtual bool handleHardAndEasyChanges(uint32_t& flags, PhysicsEngine* engine) override;
|
||||
|
||||
virtual PhysicsMotionType getMotionType() const override { return _motionType; }
|
||||
|
||||
virtual uint32_t getIncomingDirtyFlags() override;
|
||||
|
@ -64,6 +67,8 @@ public:
|
|||
|
||||
virtual void computeCollisionGroupAndMask(int32_t& group, int32_t& mask) const override;
|
||||
|
||||
virtual float getMass() const override;
|
||||
|
||||
friend class AvatarManager;
|
||||
friend class Avatar;
|
||||
|
||||
|
@ -76,6 +81,7 @@ protected:
|
|||
virtual const btCollisionShape* computeNewShape() override;
|
||||
|
||||
AvatarSharedPointer _avatar;
|
||||
float _diameter { 0.0f };
|
||||
|
||||
uint32_t _dirtyFlags;
|
||||
};
|
||||
|
|
|
@ -108,12 +108,15 @@ bool MyCharacterController::testRayShotgun(const glm::vec3& position, const glm:
|
|||
btScalar lengthAxis = axis.length();
|
||||
if (lengthAxis > FLT_EPSILON) {
|
||||
// we're walking sideways
|
||||
btScalar angle = acosf(lengthAxis / adjustedDirection.length());
|
||||
if (rayDirection.dot(forward) < 0.0f) {
|
||||
angle = PI - angle;
|
||||
btScalar cosAngle = lengthAxis / adjustedDirection.length();
|
||||
if (cosAngle < 1.0f) {
|
||||
btScalar angle = acosf(cosAngle);
|
||||
if (rayDirection.dot(forward) < 0.0f) {
|
||||
angle = PI - angle;
|
||||
}
|
||||
axis /= lengthAxis;
|
||||
rotation = btMatrix3x3(btQuaternion(axis, angle)) * rotation;
|
||||
}
|
||||
axis /= lengthAxis;
|
||||
rotation = btMatrix3x3(btQuaternion(axis, angle)) * rotation;
|
||||
} else if (rayDirection.dot(forward) < 0.0f) {
|
||||
// we're walking backwards
|
||||
rotation = btMatrix3x3(btQuaternion(_currentUp, PI)) * rotation;
|
||||
|
|
|
@ -319,6 +319,15 @@ public slots:
|
|||
* {@link Window.processingGifStarted|processingGifStarted} and {@link Window.processingGifCompleted|processingGifCompleted}
|
||||
* are emitted. The path to store the snapshots and the length of the animated GIF to capture are specified in Settings >
|
||||
* General > Snapshots.
|
||||
*
|
||||
* If user has supplied a specific filename for the snapshot:
|
||||
* If the user's requested filename has a suffix that's contained within SUPPORTED_IMAGE_FORMATS,
|
||||
* DON'T append ".jpg" to the filename. QT will save the image in the format associated with the
|
||||
* filename's suffix.
|
||||
* If you want lossless Snapshots, supply a `.png` filename. Otherwise, use `.jpeg` or `.jpg`.
|
||||
* Otherwise, ".jpg" is appended to the user's requested filename so that the image is saved in JPG format.
|
||||
* If the user hasn't supplied a specific filename for the snapshot:
|
||||
* Save the snapshot in JPG format according to FILENAME_PATH_FORMAT
|
||||
* @function Window.takeSnapshot
|
||||
* @param {boolean} [notify=true] - This value is passed on through the {@link Window.stillSnapshotTaken|stillSnapshotTaken}
|
||||
* signal.
|
||||
|
|
|
@ -50,6 +50,7 @@ const QString DATETIME_FORMAT = "yyyy-MM-dd_hh-mm-ss";
|
|||
const QString SNAPSHOTS_DIRECTORY = "Snapshots";
|
||||
const QString URL = "highfidelity_url";
|
||||
static const int SNAPSHOT_360_TIMER_INTERVAL = 350;
|
||||
static const QList<QString> SUPPORTED_IMAGE_FORMATS = { "jpg", "jpeg", "png" };
|
||||
|
||||
Snapshot::Snapshot() {
|
||||
_snapshotTimer.setSingleShot(false);
|
||||
|
@ -67,7 +68,6 @@ Snapshot::Snapshot() {
|
|||
}
|
||||
|
||||
SnapshotMetaData* Snapshot::parseSnapshotData(QString snapshotPath) {
|
||||
|
||||
if (!QFile(snapshotPath).exists()) {
|
||||
return NULL;
|
||||
}
|
||||
|
@ -95,7 +95,6 @@ SnapshotMetaData* Snapshot::parseSnapshotData(QString snapshotPath) {
|
|||
}
|
||||
|
||||
QString Snapshot::saveSnapshot(QImage image, const QString& filename, const QString& pathname) {
|
||||
|
||||
QFile* snapshotFile = savedFileForSnapshot(image, false, filename, pathname);
|
||||
|
||||
if (snapshotFile) {
|
||||
|
@ -122,11 +121,15 @@ static const glm::quat CAMERA_ORIENTATION_LEFT(glm::quat(glm::radians(glm::vec3(
|
|||
static const glm::quat CAMERA_ORIENTATION_BACK(glm::quat(glm::radians(glm::vec3(0.0f, 180.0f, 0.0f))));
|
||||
static const glm::quat CAMERA_ORIENTATION_RIGHT(glm::quat(glm::radians(glm::vec3(0.0f, 270.0f, 0.0f))));
|
||||
static const glm::quat CAMERA_ORIENTATION_UP(glm::quat(glm::radians(glm::vec3(90.0f, 0.0f, 0.0f))));
|
||||
void Snapshot::save360Snapshot(const glm::vec3& cameraPosition, const bool& cubemapOutputFormat, const bool& notify, const QString& filename) {
|
||||
void Snapshot::save360Snapshot(const glm::vec3& cameraPosition,
|
||||
const bool& cubemapOutputFormat,
|
||||
const bool& notify,
|
||||
const QString& filename) {
|
||||
_snapshotFilename = filename;
|
||||
_notify360 = notify;
|
||||
_cubemapOutputFormat = cubemapOutputFormat;
|
||||
SecondaryCameraJobConfig* secondaryCameraRenderConfig = static_cast<SecondaryCameraJobConfig*>(qApp->getRenderEngine()->getConfiguration()->getConfig("SecondaryCamera"));
|
||||
SecondaryCameraJobConfig* secondaryCameraRenderConfig =
|
||||
static_cast<SecondaryCameraJobConfig*>(qApp->getRenderEngine()->getConfiguration()->getConfig("SecondaryCamera"));
|
||||
|
||||
// Save initial values of secondary camera render config
|
||||
_oldEnabled = secondaryCameraRenderConfig->isEnabled();
|
||||
|
@ -141,9 +144,11 @@ void Snapshot::save360Snapshot(const glm::vec3& cameraPosition, const bool& cube
|
|||
}
|
||||
|
||||
// Initialize some secondary camera render config options for 360 snapshot capture
|
||||
static_cast<ToneMappingConfig*>(qApp->getRenderEngine()->getConfiguration()->getConfig("SecondaryCameraJob.ToneMapping"))->setCurve(0);
|
||||
static_cast<ToneMappingConfig*>(qApp->getRenderEngine()->getConfiguration()->getConfig("SecondaryCameraJob.ToneMapping"))
|
||||
->setCurve(0);
|
||||
|
||||
secondaryCameraRenderConfig->resetSizeSpectatorCamera(static_cast<int>(CUBEMAP_SIDE_PIXEL_DIMENSION), static_cast<int>(CUBEMAP_SIDE_PIXEL_DIMENSION));
|
||||
secondaryCameraRenderConfig->resetSizeSpectatorCamera(static_cast<int>(CUBEMAP_SIDE_PIXEL_DIMENSION),
|
||||
static_cast<int>(CUBEMAP_SIDE_PIXEL_DIMENSION));
|
||||
secondaryCameraRenderConfig->setProperty("attachedEntityId", "");
|
||||
secondaryCameraRenderConfig->setPosition(cameraPosition);
|
||||
secondaryCameraRenderConfig->setProperty("vFoV", SNAPSHOT_360_FOV);
|
||||
|
@ -159,7 +164,8 @@ void Snapshot::save360Snapshot(const glm::vec3& cameraPosition, const bool& cube
|
|||
}
|
||||
|
||||
void Snapshot::takeNextSnapshot() {
|
||||
SecondaryCameraJobConfig* config = static_cast<SecondaryCameraJobConfig*>(qApp->getRenderEngine()->getConfiguration()->getConfig("SecondaryCamera"));
|
||||
SecondaryCameraJobConfig* config =
|
||||
static_cast<SecondaryCameraJobConfig*>(qApp->getRenderEngine()->getConfiguration()->getConfig("SecondaryCamera"));
|
||||
|
||||
// Order is:
|
||||
// 0. Down
|
||||
|
@ -191,7 +197,9 @@ void Snapshot::takeNextSnapshot() {
|
|||
_snapshotTimer.stop();
|
||||
|
||||
// Reset secondary camera render config
|
||||
static_cast<ToneMappingConfig*>(qApp->getRenderEngine()->getConfiguration()->getConfig("SecondaryCameraJob.ToneMapping"))->setCurve(1);
|
||||
static_cast<ToneMappingConfig*>(
|
||||
qApp->getRenderEngine()->getConfiguration()->getConfig("SecondaryCameraJob.ToneMapping"))
|
||||
->setCurve(1);
|
||||
config->resetSizeSpectatorCamera(qApp->getWindow()->geometry().width(), qApp->getWindow()->geometry().height());
|
||||
config->setProperty("attachedEntityId", _oldAttachedEntityId);
|
||||
config->setProperty("vFoV", _oldvFoV);
|
||||
|
@ -338,8 +346,10 @@ QTemporaryFile* Snapshot::saveTempSnapshot(QImage image) {
|
|||
return static_cast<QTemporaryFile*>(savedFileForSnapshot(image, true));
|
||||
}
|
||||
|
||||
QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary, const QString& userSelectedFilename, const QString& userSelectedPathname) {
|
||||
|
||||
QFile* Snapshot::savedFileForSnapshot(QImage& shot,
|
||||
bool isTemporary,
|
||||
const QString& userSelectedFilename,
|
||||
const QString& userSelectedPathname) {
|
||||
// adding URL to snapshot
|
||||
QUrl currentURL = DependencyManager::get<AddressManager>()->currentPublicAddress();
|
||||
shot.setText(URL, currentURL.toString());
|
||||
|
@ -350,18 +360,35 @@ QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary, const QSt
|
|||
|
||||
QDateTime now = QDateTime::currentDateTime();
|
||||
|
||||
// If user has requested specific filename then use it, else create the filename
|
||||
// 'jpg" is appended, as the image is saved in jpg format. This is the case for all snapshots
|
||||
// (see definition of FILENAME_PATH_FORMAT)
|
||||
// If user has supplied a specific filename for the snapshot:
|
||||
// If the user's requested filename has a suffix that's contained within SUPPORTED_IMAGE_FORMATS,
|
||||
// DON'T append ".jpg" to the filename. QT will save the image in the format associated with the
|
||||
// filename's suffix.
|
||||
// If you want lossless Snapshots, supply a `.png` filename. Otherwise, use `.jpeg` or `.jpg`.
|
||||
// For PNGs, we use a "quality" of "50". The output image quality is the same as "100"
|
||||
// is the same as "0" -- the difference lies in the amount of compression applied to the PNG,
|
||||
// which slightly affects the time it takes to save the image.
|
||||
// Otherwise, ".jpg" is appended to the user's requested filename so that the image is saved in JPG format.
|
||||
// If the user hasn't supplied a specific filename for the snapshot:
|
||||
// Save the snapshot in JPG format at "100" quality according to FILENAME_PATH_FORMAT
|
||||
int imageQuality = 100;
|
||||
QString filename;
|
||||
if (!userSelectedFilename.isNull()) {
|
||||
filename = userSelectedFilename + ".jpg";
|
||||
QFileInfo snapshotFileInfo(userSelectedFilename);
|
||||
QString userSelectedFilenameSuffix = snapshotFileInfo.suffix();
|
||||
userSelectedFilenameSuffix = userSelectedFilenameSuffix.toLower();
|
||||
if (SUPPORTED_IMAGE_FORMATS.contains(userSelectedFilenameSuffix)) {
|
||||
filename = userSelectedFilename;
|
||||
if (userSelectedFilenameSuffix == "png") {
|
||||
imageQuality = 50;
|
||||
}
|
||||
} else {
|
||||
filename = userSelectedFilename + ".jpg";
|
||||
}
|
||||
} else {
|
||||
filename = FILENAME_PATH_FORMAT.arg(username, now.toString(DATETIME_FORMAT));
|
||||
}
|
||||
|
||||
const int IMAGE_QUALITY = 100;
|
||||
|
||||
if (!isTemporary) {
|
||||
// If user has requested specific path then use it, else use the application value
|
||||
QString snapshotFullPath;
|
||||
|
@ -372,11 +399,13 @@ QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary, const QSt
|
|||
}
|
||||
|
||||
if (snapshotFullPath.isEmpty()) {
|
||||
snapshotFullPath = OffscreenUi::getExistingDirectory(nullptr, "Choose Snapshots Directory", QStandardPaths::writableLocation(QStandardPaths::DesktopLocation));
|
||||
snapshotFullPath =
|
||||
OffscreenUi::getExistingDirectory(nullptr, "Choose Snapshots Directory",
|
||||
QStandardPaths::writableLocation(QStandardPaths::DesktopLocation));
|
||||
_snapshotsLocation.set(snapshotFullPath);
|
||||
}
|
||||
|
||||
if (!snapshotFullPath.isEmpty()) { // not cancelled
|
||||
if (!snapshotFullPath.isEmpty()) { // not cancelled
|
||||
|
||||
if (!snapshotFullPath.endsWith(QDir::separator())) {
|
||||
snapshotFullPath.append(QDir::separator());
|
||||
|
@ -393,7 +422,9 @@ QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary, const QSt
|
|||
qApp->getApplicationCompositor().getReticleInterface()->setVisible(true);
|
||||
qApp->getApplicationCompositor().getReticleInterface()->setAllowMouseCapture(true);
|
||||
|
||||
snapshotFullPath = OffscreenUi::getExistingDirectory(nullptr, "Write Error - Choose New Snapshots Directory", QStandardPaths::writableLocation(QStandardPaths::DesktopLocation));
|
||||
snapshotFullPath =
|
||||
OffscreenUi::getExistingDirectory(nullptr, "Write Error - Choose New Snapshots Directory",
|
||||
QStandardPaths::writableLocation(QStandardPaths::DesktopLocation));
|
||||
if (snapshotFullPath.isEmpty()) {
|
||||
return NULL;
|
||||
}
|
||||
|
@ -407,12 +438,11 @@ QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary, const QSt
|
|||
imageFile = new QFile(snapshotFullPath);
|
||||
}
|
||||
|
||||
shot.save(imageFile, 0, IMAGE_QUALITY);
|
||||
shot.save(imageFile, 0, imageQuality);
|
||||
imageFile->close();
|
||||
|
||||
return imageFile;
|
||||
}
|
||||
|
||||
}
|
||||
// Either we were asked for a tempororary, or the user didn't set a directory.
|
||||
QTemporaryFile* imageTempFile = new QTemporaryFile(QDir::tempPath() + "/XXXXXX-" + filename);
|
||||
|
@ -423,14 +453,13 @@ QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary, const QSt
|
|||
}
|
||||
imageTempFile->setAutoRemove(isTemporary);
|
||||
|
||||
shot.save(imageTempFile, 0, IMAGE_QUALITY);
|
||||
shot.save(imageTempFile, 0, imageQuality);
|
||||
imageTempFile->close();
|
||||
|
||||
return imageTempFile;
|
||||
}
|
||||
|
||||
void Snapshot::uploadSnapshot(const QString& filename, const QUrl& href) {
|
||||
|
||||
const QString SNAPSHOT_UPLOAD_URL = "/api/v1/snapshots";
|
||||
QUrl url = href;
|
||||
if (url.isEmpty()) {
|
||||
|
@ -444,7 +473,7 @@ void Snapshot::uploadSnapshot(const QString& filename, const QUrl& href) {
|
|||
url = QUrl(DependencyManager::get<AddressManager>()->currentShareableAddress());
|
||||
}
|
||||
SnapshotUploader* uploader = new SnapshotUploader(url, filename);
|
||||
|
||||
|
||||
QFile* file = new QFile(filename);
|
||||
Q_ASSERT(file->exists());
|
||||
file->open(QIODevice::ReadOnly);
|
||||
|
@ -458,20 +487,16 @@ void Snapshot::uploadSnapshot(const QString& filename, const QUrl& href) {
|
|||
imagePart.setHeader(QNetworkRequest::ContentDispositionHeader,
|
||||
QVariant("form-data; name=\"image\"; filename=\"" + file->fileName() + "\""));
|
||||
imagePart.setBodyDevice(file);
|
||||
|
||||
|
||||
QHttpMultiPart* multiPart = new QHttpMultiPart(QHttpMultiPart::FormDataType);
|
||||
file->setParent(multiPart); // we cannot delete the file now, so delete it with the multiPart
|
||||
file->setParent(multiPart); // we cannot delete the file now, so delete it with the multiPart
|
||||
multiPart->append(imagePart);
|
||||
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
JSONCallbackParameters callbackParams(uploader, "uploadSuccess", uploader, "uploadFailure");
|
||||
|
||||
accountManager->sendRequest(SNAPSHOT_UPLOAD_URL,
|
||||
AccountManagerAuth::Required,
|
||||
QNetworkAccessManager::PostOperation,
|
||||
callbackParams,
|
||||
nullptr,
|
||||
multiPart);
|
||||
accountManager->sendRequest(SNAPSHOT_UPLOAD_URL, AccountManagerAuth::Required, QNetworkAccessManager::PostOperation,
|
||||
callbackParams, nullptr, multiPart);
|
||||
}
|
||||
|
||||
QString Snapshot::getSnapshotsLocation() {
|
||||
|
|
|
@ -41,8 +41,6 @@
|
|||
|
||||
Q_LOGGING_CATEGORY(trace_render_overlays, "trace.render.overlays")
|
||||
|
||||
extern void initOverlay3DPipelines(render::ShapePlumber& plumber, bool depthTest = false);
|
||||
|
||||
Overlays::Overlays() {
|
||||
auto pointerManager = DependencyManager::get<PointerManager>();
|
||||
connect(pointerManager.data(), &PointerManager::hoverBeginOverlay, this, &Overlays::hoverEnterPointerEvent);
|
||||
|
|
|
@ -861,7 +861,6 @@ bool Avatar::shouldRenderHead(const RenderArgs* renderArgs) const {
|
|||
return true;
|
||||
}
|
||||
|
||||
// virtual
|
||||
void Avatar::simulateAttachments(float deltaTime) {
|
||||
assert(_attachmentModels.size() == _attachmentModelsTexturesLoaded.size());
|
||||
PerformanceTimer perfTimer("attachments");
|
||||
|
@ -1544,12 +1543,13 @@ void Avatar::updateDisplayNameAlpha(bool showDisplayName) {
|
|||
}
|
||||
}
|
||||
|
||||
// virtual
|
||||
void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
|
||||
float uniformScale = getModelScale();
|
||||
shapeInfo.setCapsuleY(uniformScale * _skeletonModel->getBoundingCapsuleRadius(),
|
||||
0.5f * uniformScale * _skeletonModel->getBoundingCapsuleHeight());
|
||||
shapeInfo.setOffset(uniformScale * _skeletonModel->getBoundingCapsuleOffset());
|
||||
float radius = glm::max(MIN_AVATAR_RADIUS, uniformScale * _skeletonModel->getBoundingCapsuleRadius());
|
||||
float height = glm::max(MIN_AVATAR_HEIGHT, uniformScale * _skeletonModel->getBoundingCapsuleHeight());
|
||||
shapeInfo.setCapsuleY(radius, 0.5f * height);
|
||||
glm::vec3 offset = uniformScale * _skeletonModel->getBoundingCapsuleOffset();
|
||||
shapeInfo.setOffset(offset);
|
||||
}
|
||||
|
||||
void Avatar::getCapsule(glm::vec3& start, glm::vec3& end, float& radius) {
|
||||
|
@ -1572,9 +1572,8 @@ float Avatar::computeMass() {
|
|||
return _density * TWO_PI * radius * radius * (glm::length(end - start) + 2.0f * radius / 3.0f);
|
||||
}
|
||||
|
||||
// virtual
|
||||
void Avatar::rebuildCollisionShape() {
|
||||
addPhysicsFlags(Simulation::DIRTY_SHAPE);
|
||||
addPhysicsFlags(Simulation::DIRTY_SHAPE | Simulation::DIRTY_MASS);
|
||||
}
|
||||
|
||||
void Avatar::setPhysicsCallback(AvatarPhysicsCallback cb) {
|
||||
|
|
|
@ -83,6 +83,7 @@ public:
|
|||
connect(qApp, &QCoreApplication::aboutToQuit, [this] {
|
||||
shutdown();
|
||||
});
|
||||
setObjectName("Present");
|
||||
}
|
||||
|
||||
~PresentThread() {
|
||||
|
|
|
@ -110,8 +110,8 @@ public:
|
|||
MotionStateType getType() const { return _type; }
|
||||
virtual PhysicsMotionType getMotionType() const { return _motionType; }
|
||||
|
||||
void setMass(float mass);
|
||||
float getMass() const;
|
||||
virtual void setMass(float mass);
|
||||
virtual float getMass() const;
|
||||
|
||||
void setBodyLinearVelocity(const glm::vec3& velocity) const;
|
||||
void setBodyAngularVelocity(const glm::vec3& velocity) const;
|
||||
|
|
|
@ -105,9 +105,9 @@ void PhysicsEngine::addObjectToDynamicsWorld(ObjectMotionState* motionState) {
|
|||
}
|
||||
case MOTION_TYPE_DYNAMIC: {
|
||||
mass = motionState->getMass();
|
||||
if (mass != mass || mass < 1.0f) {
|
||||
qCDebug(physics) << "mass is too low, setting to 1.0 Kg --" << mass;
|
||||
mass = 1.0f;
|
||||
const float MIN_DYNAMIC_MASS = 0.01f;
|
||||
if (mass != mass || mass < MIN_DYNAMIC_MASS) {
|
||||
mass = MIN_DYNAMIC_MASS;
|
||||
}
|
||||
btCollisionShape* shape = const_cast<btCollisionShape*>(motionState->getShape());
|
||||
assert(shape);
|
||||
|
|
|
@ -286,6 +286,7 @@ void DrawHighlight::run(const render::RenderContextPointer& renderContext, const
|
|||
shaderParameters._size.y = size;
|
||||
}
|
||||
|
||||
auto primaryFramebuffer = inputs.get4();
|
||||
gpu::doInBatch("DrawHighlight::run", args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setFramebuffer(destinationFrameBuffer);
|
||||
|
@ -301,6 +302,9 @@ void DrawHighlight::run(const render::RenderContextPointer& renderContext, const
|
|||
batch.setResourceTexture(SCENE_DEPTH_MAP_SLOT, sceneDepthBuffer->getPrimaryDepthTexture());
|
||||
batch.setResourceTexture(HIGHLIGHTED_DEPTH_MAP_SLOT, highlightedDepthTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
// Reset the framebuffer for overlay drawing
|
||||
batch.setFramebuffer(primaryFramebuffer);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -360,6 +364,7 @@ void DebugHighlight::run(const render::RenderContextPointer& renderContext, cons
|
|||
RenderArgs* args = renderContext->args;
|
||||
const auto jitter = input.get2();
|
||||
|
||||
auto primaryFramebuffer = input.get3();
|
||||
gpu::doInBatch("DebugHighlight::run", args->_context, [&](gpu::Batch& batch) {
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
batch.setFramebuffer(highlightRessources->getColorFramebuffer());
|
||||
|
@ -384,6 +389,9 @@ void DebugHighlight::run(const render::RenderContextPointer& renderContext, cons
|
|||
geometryBuffer->renderQuad(batch, bottomLeft, topRight, color, _geometryDepthId);
|
||||
|
||||
batch.setResourceTexture(0, nullptr);
|
||||
|
||||
// Reset the framebuffer for overlay drawing
|
||||
batch.setFramebuffer(primaryFramebuffer);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -532,12 +540,12 @@ void DrawHighlightTask::build(JobModel& task, const render::Varying& inputs, ren
|
|||
stream << "HighlightEffect" << i;
|
||||
name = stream.str();
|
||||
}
|
||||
const auto drawHighlightInputs = DrawHighlight::Inputs(deferredFrameTransform, highlightRessources, sceneFrameBuffer, highlightedRect).asVarying();
|
||||
const auto drawHighlightInputs = DrawHighlight::Inputs(deferredFrameTransform, highlightRessources, sceneFrameBuffer, highlightedRect, primaryFramebuffer).asVarying();
|
||||
task.addJob<DrawHighlight>(name, drawHighlightInputs, i, sharedParameters);
|
||||
}
|
||||
|
||||
// Debug highlight
|
||||
const auto debugInputs = DebugHighlight::Inputs(highlightRessources, const_cast<const render::Varying&>(highlight0Rect), jitter).asVarying();
|
||||
const auto debugInputs = DebugHighlight::Inputs(highlightRessources, const_cast<const render::Varying&>(highlight0Rect), jitter, primaryFramebuffer).asVarying();
|
||||
task.addJob<DebugHighlight>("HighlightDebug", debugInputs);
|
||||
}
|
||||
|
||||
|
|
|
@ -135,7 +135,7 @@ protected:
|
|||
class DrawHighlight {
|
||||
public:
|
||||
|
||||
using Inputs = render::VaryingSet4<DeferredFrameTransformPointer, HighlightRessourcesPointer, DeferredFramebufferPointer, glm::ivec4>;
|
||||
using Inputs = render::VaryingSet5<DeferredFrameTransformPointer, HighlightRessourcesPointer, DeferredFramebufferPointer, glm::ivec4, gpu::FramebufferPointer>;
|
||||
using Config = render::Job::Config;
|
||||
using JobModel = render::Job::ModelI<DrawHighlight, Inputs, Config>;
|
||||
|
||||
|
@ -182,7 +182,7 @@ signals:
|
|||
|
||||
class DebugHighlight {
|
||||
public:
|
||||
using Inputs = render::VaryingSet3<HighlightRessourcesPointer, glm::ivec4, glm::vec2>;
|
||||
using Inputs = render::VaryingSet4<HighlightRessourcesPointer, glm::ivec4, glm::vec2, gpu::FramebufferPointer>;
|
||||
using Config = DebugHighlightConfig;
|
||||
using JobModel = render::Job::ModelI<DebugHighlight, Inputs, Config>;
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
#include "RenderUtilsLogging.h"
|
||||
|
||||
using namespace render;
|
||||
extern void initOverlay3DPipelines(render::ShapePlumber& plumber, bool depthTest = false);
|
||||
extern void initForwardPipelines(ShapePlumber& plumber);
|
||||
|
||||
void BeginGPURangeTimer::run(const render::RenderContextPointer& renderContext, gpu::RangeTimerPointer& timer) {
|
||||
timer = _gpuTimer;
|
||||
|
@ -35,7 +35,7 @@ void EndGPURangeTimer::run(const render::RenderContextPointer& renderContext, co
|
|||
DrawOverlay3D::DrawOverlay3D(bool opaque) :
|
||||
_shapePlumber(std::make_shared<ShapePlumber>()),
|
||||
_opaquePass(opaque) {
|
||||
initOverlay3DPipelines(*_shapePlumber);
|
||||
initForwardPipelines(*_shapePlumber);
|
||||
}
|
||||
|
||||
void DrawOverlay3D::run(const RenderContextPointer& renderContext, const Inputs& inputs) {
|
||||
|
@ -60,7 +60,7 @@ void DrawOverlay3D::run(const RenderContextPointer& renderContext, const Inputs&
|
|||
if (_opaquePass) {
|
||||
gpu::doInBatch("DrawOverlay3D::run::clear", args->_context, [&](gpu::Batch& batch){
|
||||
batch.enableStereo(false);
|
||||
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_DEPTH, glm::vec4(), 1.f, 0, false);
|
||||
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_DEPTHSTENCIL, glm::vec4(), 1.f, 0, false);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,6 @@
|
|||
#include <sstream>
|
||||
|
||||
using namespace render;
|
||||
extern void initOverlay3DPipelines(render::ShapePlumber& plumber, bool depthTest = false);
|
||||
extern void initDeferredPipelines(render::ShapePlumber& plumber, const render::ShapePipeline::BatchSetter& batchSetter, const render::ShapePipeline::ItemSetter& itemSetter);
|
||||
|
||||
RenderDeferredTask::RenderDeferredTask()
|
||||
|
|
|
@ -37,10 +37,7 @@
|
|||
#include "nop_frag.h"
|
||||
|
||||
using namespace render;
|
||||
extern void initForwardPipelines(ShapePlumber& plumber,
|
||||
const render::ShapePipeline::BatchSetter& batchSetter,
|
||||
const render::ShapePipeline::ItemSetter& itemSetter);
|
||||
extern void initOverlay3DPipelines(render::ShapePlumber& plumber, bool depthTest = false);
|
||||
extern void initForwardPipelines(ShapePlumber& plumber);
|
||||
|
||||
void RenderForwardTask::build(JobModel& task, const render::Varying& input, render::Varying& output) {
|
||||
auto items = input.get<Input>();
|
||||
|
@ -48,8 +45,7 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
|
|||
|
||||
// Prepare the ShapePipelines
|
||||
ShapePlumberPointer shapePlumber = std::make_shared<ShapePlumber>();
|
||||
initForwardPipelines(*shapePlumber, fadeEffect->getBatchSetter(), fadeEffect->getItemUniformSetter());
|
||||
initOverlay3DPipelines(*shapePlumber);
|
||||
initForwardPipelines(*shapePlumber);
|
||||
|
||||
// Extract opaques / transparents / lights / metas / overlays / background
|
||||
const auto& opaques = items.get0()[RenderFetchCullSortTask::OPAQUE_SHAPE];
|
||||
|
|
|
@ -80,16 +80,6 @@
|
|||
#include "model_translucent_unlit_fade_frag.h"
|
||||
#include "model_translucent_normal_map_fade_frag.h"
|
||||
|
||||
#include "overlay3D_vert.h"
|
||||
#include "overlay3D_frag.h"
|
||||
#include "overlay3D_model_frag.h"
|
||||
#include "overlay3D_model_translucent_frag.h"
|
||||
#include "overlay3D_translucent_frag.h"
|
||||
#include "overlay3D_unlit_frag.h"
|
||||
#include "overlay3D_translucent_unlit_frag.h"
|
||||
#include "overlay3D_model_unlit_frag.h"
|
||||
#include "overlay3D_model_translucent_unlit_frag.h"
|
||||
|
||||
#include "model_shadow_vert.h"
|
||||
#include "skin_model_shadow_vert.h"
|
||||
#include "skin_model_shadow_dq_vert.h"
|
||||
|
@ -104,12 +94,16 @@
|
|||
#include "model_shadow_fade_frag.h"
|
||||
#include "skin_model_shadow_fade_frag.h"
|
||||
|
||||
#include "simple_vert.h"
|
||||
#include "forward_simple_textured_frag.h"
|
||||
#include "forward_simple_textured_transparent_frag.h"
|
||||
#include "forward_simple_textured_unlit_frag.h"
|
||||
|
||||
using namespace render;
|
||||
using namespace std::placeholders;
|
||||
|
||||
void initOverlay3DPipelines(ShapePlumber& plumber, bool depthTest = false);
|
||||
void initDeferredPipelines(ShapePlumber& plumber, const render::ShapePipeline::BatchSetter& batchSetter, const render::ShapePipeline::ItemSetter& itemSetter);
|
||||
void initForwardPipelines(ShapePlumber& plumber, const render::ShapePipeline::BatchSetter& batchSetter, const render::ShapePipeline::ItemSetter& itemSetter);
|
||||
void initForwardPipelines(ShapePlumber& plumber);
|
||||
void initZPassPipelines(ShapePlumber& plumber, gpu::StatePointer state);
|
||||
|
||||
void addPlumberPipeline(ShapePlumber& plumber,
|
||||
|
@ -120,71 +114,6 @@ void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch, RenderArgs* a
|
|||
void lightBatchSetter(const ShapePipeline& pipeline, gpu::Batch& batch, RenderArgs* args);
|
||||
static bool forceLightBatchSetter{ false };
|
||||
|
||||
void initOverlay3DPipelines(ShapePlumber& plumber, bool depthTest) {
|
||||
auto vertex = overlay3D_vert::getShader();
|
||||
auto vertexModel = model_vert::getShader();
|
||||
auto pixel = overlay3D_frag::getShader();
|
||||
auto pixelTranslucent = overlay3D_translucent_frag::getShader();
|
||||
auto pixelUnlit = overlay3D_unlit_frag::getShader();
|
||||
auto pixelTranslucentUnlit = overlay3D_translucent_unlit_frag::getShader();
|
||||
auto pixelModel = overlay3D_model_frag::getShader();
|
||||
auto pixelModelTranslucent = overlay3D_model_translucent_frag::getShader();
|
||||
auto pixelModelUnlit = overlay3D_model_unlit_frag::getShader();
|
||||
auto pixelModelTranslucentUnlit = overlay3D_model_translucent_unlit_frag::getShader();
|
||||
|
||||
auto opaqueProgram = gpu::Shader::createProgram(vertex, pixel);
|
||||
auto translucentProgram = gpu::Shader::createProgram(vertex, pixelTranslucent);
|
||||
auto unlitOpaqueProgram = gpu::Shader::createProgram(vertex, pixelUnlit);
|
||||
auto unlitTranslucentProgram = gpu::Shader::createProgram(vertex, pixelTranslucentUnlit);
|
||||
auto materialOpaqueProgram = gpu::Shader::createProgram(vertexModel, pixelModel);
|
||||
auto materialTranslucentProgram = gpu::Shader::createProgram(vertexModel, pixelModelTranslucent);
|
||||
auto materialUnlitOpaqueProgram = gpu::Shader::createProgram(vertexModel, pixelModel);
|
||||
auto materialUnlitTranslucentProgram = gpu::Shader::createProgram(vertexModel, pixelModelTranslucent);
|
||||
|
||||
for (int i = 0; i < 8; i++) {
|
||||
bool isCulled = (i & 1);
|
||||
bool isBiased = (i & 2);
|
||||
bool isOpaque = (i & 4);
|
||||
|
||||
auto state = std::make_shared<gpu::State>();
|
||||
if (depthTest) {
|
||||
state->setDepthTest(true, true, gpu::LESS_EQUAL);
|
||||
} else {
|
||||
state->setDepthTest(false);
|
||||
}
|
||||
state->setCullMode(isCulled ? gpu::State::CULL_BACK : gpu::State::CULL_NONE);
|
||||
if (isBiased) {
|
||||
state->setDepthBias(1.0f);
|
||||
state->setDepthBiasSlopeScale(1.0f);
|
||||
}
|
||||
if (isOpaque) {
|
||||
// Soft edges
|
||||
state->setBlendFunction(true,
|
||||
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
|
||||
} else {
|
||||
state->setBlendFunction(true,
|
||||
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
|
||||
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
|
||||
}
|
||||
|
||||
ShapeKey::Filter::Builder builder;
|
||||
|
||||
isCulled ? builder.withCullFace() : builder.withoutCullFace();
|
||||
isBiased ? builder.withDepthBias() : builder.withoutDepthBias();
|
||||
isOpaque ? builder.withOpaque() : builder.withTranslucent();
|
||||
|
||||
auto simpleProgram = isOpaque ? opaqueProgram : translucentProgram;
|
||||
auto unlitProgram = isOpaque ? unlitOpaqueProgram : unlitTranslucentProgram;
|
||||
auto materialProgram = isOpaque ? materialOpaqueProgram : materialTranslucentProgram;
|
||||
auto materialUnlitProgram = isOpaque ? materialUnlitOpaqueProgram : materialUnlitTranslucentProgram;
|
||||
|
||||
plumber.addPipeline(builder.withMaterial().build().key(), materialProgram, state, &lightBatchSetter);
|
||||
plumber.addPipeline(builder.withMaterial().withUnlit().build().key(), materialUnlitProgram, state, &batchSetter);
|
||||
plumber.addPipeline(builder.withoutUnlit().withoutMaterial().build().key(), simpleProgram, state, &lightBatchSetter);
|
||||
plumber.addPipeline(builder.withUnlit().withoutMaterial().build().key(), unlitProgram, state, &batchSetter);
|
||||
}
|
||||
}
|
||||
|
||||
void initDeferredPipelines(render::ShapePlumber& plumber, const render::ShapePipeline::BatchSetter& batchSetter, const render::ShapePipeline::ItemSetter& itemSetter) {
|
||||
// Vertex shaders
|
||||
auto simpleVertex = simple_vert::getShader();
|
||||
|
@ -432,8 +361,9 @@ void initDeferredPipelines(render::ShapePlumber& plumber, const render::ShapePip
|
|||
skinModelShadowFadeDualQuatVertex, modelShadowFadePixel, batchSetter, itemSetter);
|
||||
}
|
||||
|
||||
void initForwardPipelines(ShapePlumber& plumber, const render::ShapePipeline::BatchSetter& batchSetter, const render::ShapePipeline::ItemSetter& itemSetter) {
|
||||
void initForwardPipelines(ShapePlumber& plumber) {
|
||||
// Vertex shaders
|
||||
auto simpleVertex = simple_vert::getShader();
|
||||
auto modelVertex = model_vert::getShader();
|
||||
auto modelNormalMapVertex = model_normal_map_vert::getShader();
|
||||
auto skinModelVertex = skin_model_vert::getShader();
|
||||
|
@ -443,6 +373,10 @@ void initForwardPipelines(ShapePlumber& plumber, const render::ShapePipeline::Ba
|
|||
auto skinModelNormalMapDualQuatVertex = skin_model_normal_map_dq_vert::getShader();
|
||||
|
||||
// Pixel shaders
|
||||
auto simplePixel = forward_simple_textured_frag::getShader();
|
||||
auto simpleTranslucentPixel = forward_simple_textured_transparent_frag::getShader();
|
||||
auto simpleUnlitPixel = forward_simple_textured_unlit_frag::getShader();
|
||||
auto simpleTranslucentUnlitPixel = simple_transparent_textured_unlit_frag::getShader();
|
||||
auto modelPixel = forward_model_frag::getShader();
|
||||
auto modelUnlitPixel = forward_model_unlit_frag::getShader();
|
||||
auto modelNormalMapPixel = forward_model_normal_map_frag::getShader();
|
||||
|
@ -458,8 +392,15 @@ void initForwardPipelines(ShapePlumber& plumber, const render::ShapePipeline::Ba
|
|||
};
|
||||
|
||||
// Forward pipelines need the lightBatchSetter for opaques and transparents
|
||||
// forceLightBatchSetter = true;
|
||||
forceLightBatchSetter = false;
|
||||
forceLightBatchSetter = true;
|
||||
|
||||
// Simple Opaques
|
||||
addPipeline(Key::Builder(), simpleVertex, simplePixel);
|
||||
addPipeline(Key::Builder().withUnlit(), simpleVertex, simpleUnlitPixel);
|
||||
|
||||
// Simple Translucents
|
||||
addPipeline(Key::Builder().withTranslucent(), simpleVertex, simpleTranslucentPixel);
|
||||
addPipeline(Key::Builder().withTranslucent().withUnlit(), simpleVertex, simpleTranslucentUnlitPixel);
|
||||
|
||||
// Opaques
|
||||
addPipeline(Key::Builder().withMaterial(), modelVertex, modelPixel);
|
||||
|
@ -501,7 +442,7 @@ void addPlumberPipeline(ShapePlumber& plumber,
|
|||
bool isWireframed = (i & 4);
|
||||
|
||||
auto state = std::make_shared<gpu::State>();
|
||||
PrepareStencil::testMaskDrawShape(*state);
|
||||
key.isTranslucent() ? PrepareStencil::testMask(*state) : PrepareStencil::testMaskDrawShape(*state);
|
||||
|
||||
// Depth test depends on transparency
|
||||
state->setDepthTest(true, !key.isTranslucent(), gpu::LESS_EQUAL);
|
||||
|
|
|
@ -1,92 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D.slf
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 6/16/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
<@include graphics/Light.slh@>
|
||||
<$declareLightBuffer()$>
|
||||
<$declareLightAmbientBuffer()$>
|
||||
|
||||
<@include LightingModel.slh@>
|
||||
|
||||
<@include LightDirectional.slh@>
|
||||
<$declareLightingDirectional()$>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
<$declareStandardCameraTransform()$>
|
||||
|
||||
vec4 evalGlobalColor(float shadowAttenuation, vec3 position, vec3 normal, vec3 albedo, float metallic, vec3 fresnel, float roughness, float opacity) {
|
||||
|
||||
// Need the light now
|
||||
Light light = getKeyLight();
|
||||
vec3 lightDirection = getLightDirection(light);
|
||||
vec3 lightIrradiance = getLightIrradiance(light);
|
||||
|
||||
LightAmbient ambient = getLightAmbient();
|
||||
|
||||
TransformCamera cam = getTransformCamera();
|
||||
vec3 fragEyeVectorView = normalize(-position);
|
||||
vec3 fragEyeDir;
|
||||
<$transformEyeToWorldDir(cam, fragEyeVectorView, fragEyeDir)$>
|
||||
|
||||
SurfaceData surface = initSurfaceData(roughness, normal, fragEyeDir);
|
||||
|
||||
vec3 color = opacity * albedo * getLightColor(light) * getLightAmbientIntensity(ambient);
|
||||
|
||||
// Directional
|
||||
vec3 directionalDiffuse;
|
||||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, lightDirection, lightIrradiance, surface, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse * isDiffuseEnabled() * isDirectionalEnabled();
|
||||
color += directionalSpecular * isSpecularEnabled() * isDirectionalEnabled();
|
||||
|
||||
return vec4(color, opacity);
|
||||
}
|
||||
|
||||
uniform sampler2D originalTexture;
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec4 _positionES;
|
||||
in vec3 _normalWS;
|
||||
in vec3 _color;
|
||||
in float _alpha;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
vec4 albedo = texture(originalTexture, _texCoord0);
|
||||
|
||||
vec3 fragPosition = _positionES.xyz;
|
||||
vec3 fragNormal = normalize(_normalWS);
|
||||
vec3 fragAlbedo = albedo.rgb * _color;
|
||||
float fragMetallic = 0.0;
|
||||
vec3 fragSpecular = vec3(0.1);
|
||||
float fragRoughness = 0.9;
|
||||
float fragOpacity = albedo.a;
|
||||
|
||||
if (fragOpacity <= 0.1) {
|
||||
discard;
|
||||
}
|
||||
|
||||
vec4 color = evalGlobalColor(1.0,
|
||||
fragPosition,
|
||||
fragNormal,
|
||||
fragAlbedo,
|
||||
fragMetallic,
|
||||
fragSpecular,
|
||||
fragRoughness,
|
||||
fragOpacity);
|
||||
|
||||
|
||||
// Apply standard tone mapping
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D.slv
|
||||
// vertex shader
|
||||
//
|
||||
// Created by Sam Gateau on 6/16/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/Inputs.slh@>
|
||||
<@include gpu/Color.slh@>
|
||||
<@include gpu/Transform.slh@>
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
out vec3 _color;
|
||||
out float _alpha;
|
||||
out vec2 _texCoord0;
|
||||
out vec4 _positionES;
|
||||
out vec3 _normalWS;
|
||||
|
||||
void main(void) {
|
||||
_color = color_sRGBToLinear(inColor.xyz);
|
||||
_alpha = inColor.w;
|
||||
|
||||
_texCoord0 = inTexCoord0.st;
|
||||
|
||||
// standard transform
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToEyeAndClipPos(cam, obj, inPosition, _positionES, gl_Position)$>
|
||||
<$transformModelToWorldDir(cam, obj, inNormal.xyz, _normalWS)$>
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D.slf
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 6/16/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredGlobalLight.slh@>
|
||||
<$declareEvalSkyboxGlobalColor()$>
|
||||
|
||||
<@include graphics/Material.slh@>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
<$declareStandardCameraTransform()$>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec2 _texCoord1;
|
||||
in vec4 _positionES;
|
||||
in vec3 _normalWS;
|
||||
in vec3 _color;
|
||||
in float _alpha;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
Material mat = getMaterial();
|
||||
BITFIELD matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
|
||||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
albedo *= _color;
|
||||
|
||||
float metallic = getMaterialMetallic(mat);
|
||||
vec3 fresnel = getFresnelF0(metallic, albedo);
|
||||
|
||||
float roughness = getMaterialRoughness(mat);
|
||||
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
|
||||
|
||||
vec3 emissive = getMaterialEmissive(mat);
|
||||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
|
||||
vec3 fragPosition = _positionES.xyz;
|
||||
|
||||
TransformCamera cam = getTransformCamera();
|
||||
|
||||
vec4 color = vec4(evalSkyboxGlobalColor(
|
||||
cam._viewInverse,
|
||||
1.0,
|
||||
occlusionTex,
|
||||
fragPosition,
|
||||
normalize(_normalWS),
|
||||
albedo,
|
||||
fresnel,
|
||||
metallic,
|
||||
roughness),
|
||||
opacity);
|
||||
|
||||
// And emissive
|
||||
color.rgb += emissive * isEmissiveEnabled();
|
||||
|
||||
// Apply standard tone mapping
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -1,72 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D_model_transparent.slf
|
||||
//
|
||||
// Created by Sam Gateau on 2/27/2017.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredGlobalLight.slh@>
|
||||
<$declareEvalGlobalLightingAlphaBlendedWithHaze()$>
|
||||
|
||||
<@include graphics/Material.slh@>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
<$declareStandardCameraTransform()$>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO, ROUGHNESS, _SCRIBE_NULL, _SCRIBE_NULL, EMISSIVE, OCCLUSION)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec2 _texCoord1;
|
||||
in vec4 _positionES;
|
||||
in vec3 _normalWS;
|
||||
in vec3 _color;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
Material mat = getMaterial();
|
||||
BITFIELD matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex, roughnessTex, _SCRIBE_NULL, _SCRIBE_NULL, emissiveTex)$>
|
||||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, occlusionTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
albedo *= _color;
|
||||
|
||||
float metallic = getMaterialMetallic(mat);
|
||||
vec3 fresnel = getFresnelF0(metallic, albedo);
|
||||
|
||||
float roughness = getMaterialRoughness(mat);
|
||||
<$evalMaterialRoughness(roughnessTex, roughness, matKey, roughness)$>;
|
||||
|
||||
vec3 emissive = getMaterialEmissive(mat);
|
||||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 fragPosition = _positionES.xyz;
|
||||
TransformCamera cam = getTransformCamera();
|
||||
|
||||
vec4 color = vec4(evalGlobalLightingAlphaBlendedWithHaze(
|
||||
cam._viewInverse,
|
||||
1.0,
|
||||
occlusionTex,
|
||||
fragPosition,
|
||||
normalize(_normalWS),
|
||||
albedo,
|
||||
fresnel,
|
||||
metallic,
|
||||
emissive,
|
||||
roughness, opacity),
|
||||
opacity);
|
||||
|
||||
// Apply standard tone mapping
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D-model_transparent_unlit.slf
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 2/28/2017.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include LightingModel.slh@>
|
||||
<@include graphics/Material.slh@>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec3 _color;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
|
||||
Material mat = getMaterial();
|
||||
BITFIELD matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
albedo *= _color;
|
||||
|
||||
vec4 color = vec4(albedo * isUnlitEnabled(), opacity);
|
||||
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// overlay3D-model_unlit.slf
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 2/28/2017.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include LightingModel.slh@>
|
||||
<@include graphics/Material.slh@>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
<$declareMaterialTextures(ALBEDO)$>
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec3 _color;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
|
||||
Material mat = getMaterial();
|
||||
BITFIELD matKey = getMaterialKey(mat);
|
||||
<$fetchMaterialTexturesCoord0(matKey, _texCoord0, albedoTex)$>
|
||||
|
||||
float opacity = 1.0;
|
||||
<$evalMaterialOpacity(albedoTex.a, opacity, matKey, opacity)$>;
|
||||
<$discardTransparent(opacity)$>;
|
||||
|
||||
vec3 albedo = getMaterialAlbedo(mat);
|
||||
<$evalMaterialAlbedo(albedoTex, albedo, matKey, albedo)$>;
|
||||
albedo *= _color;
|
||||
|
||||
vec4 color = vec4(albedo * isUnlitEnabled(), opacity);
|
||||
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -1,87 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// overlay3D_translucent.slf
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Sam Gateau on 6/16/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include graphics/Light.slh@>
|
||||
<$declareLightBuffer()$>
|
||||
<$declareLightAmbientBuffer()$>
|
||||
|
||||
<@include LightingModel.slh@>
|
||||
|
||||
<@include LightDirectional.slh@>
|
||||
<$declareLightingDirectional()$>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
<$declareStandardCameraTransform()$>
|
||||
|
||||
vec4 evalGlobalColor(float shadowAttenuation, vec3 position, vec3 normal, vec3 albedo, float metallic, vec3 fresnel, float roughness, float opacity) {
|
||||
|
||||
// Need the light now
|
||||
Light light = getKeyLight();
|
||||
vec3 lightDirection = getLightDirection(light);
|
||||
vec3 lightIrradiance = getLightIrradiance(light);
|
||||
|
||||
LightAmbient ambient = getLightAmbient();
|
||||
|
||||
TransformCamera cam = getTransformCamera();
|
||||
vec3 fragEyeVectorView = normalize(-position);
|
||||
vec3 fragEyeDir;
|
||||
<$transformEyeToWorldDir(cam, fragEyeVectorView, fragEyeDir)$>
|
||||
|
||||
SurfaceData surface = initSurfaceData(roughness, normal, fragEyeDir);
|
||||
|
||||
vec3 color = opacity * albedo * getLightColor(light) * getLightAmbientIntensity(ambient);
|
||||
|
||||
// Directional
|
||||
vec3 directionalDiffuse;
|
||||
vec3 directionalSpecular;
|
||||
evalLightingDirectional(directionalDiffuse, directionalSpecular, lightDirection, lightIrradiance, surface, metallic, fresnel, albedo, shadowAttenuation);
|
||||
color += directionalDiffuse;
|
||||
color += directionalSpecular / opacity;
|
||||
|
||||
return vec4(color, opacity);
|
||||
}
|
||||
|
||||
uniform sampler2D originalTexture;
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec4 _positionES;
|
||||
in vec3 _normalWS;
|
||||
in vec3 _color;
|
||||
in float _alpha;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
vec4 albedo = texture(originalTexture, _texCoord0);
|
||||
|
||||
vec3 fragPosition = _positionES.xyz;
|
||||
vec3 fragNormal = normalize(_normalWS);
|
||||
vec3 fragAlbedo = albedo.rgb * _color;
|
||||
float fragMetallic = 0.0;
|
||||
vec3 fragSpecular = vec3(0.1);
|
||||
float fragRoughness = 0.9;
|
||||
float fragOpacity = albedo.a * _alpha;
|
||||
|
||||
vec4 color = evalGlobalColor(1.0,
|
||||
fragPosition,
|
||||
fragNormal,
|
||||
fragAlbedo,
|
||||
fragMetallic,
|
||||
fragSpecular,
|
||||
fragRoughness,
|
||||
fragOpacity);
|
||||
|
||||
// Apply standard tone mapping
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// overlay3D_translucent_unlit.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Zach Pomerantz on 2/2/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
uniform sampler2D originalTexture;
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec3 _color;
|
||||
in float _alpha;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
vec4 albedo = texture(originalTexture, _texCoord0);
|
||||
|
||||
_fragColor = vec4(albedo.rgb * _color, albedo.a * _alpha);
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// overlay3D_unlit.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Zach Pomerantz on 2/2/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
uniform sampler2D originalTexture;
|
||||
|
||||
in vec2 _texCoord0;
|
||||
in vec3 _color;
|
||||
|
||||
out vec4 _fragColor;
|
||||
|
||||
void main(void) {
|
||||
vec4 albedo = texture(originalTexture, _texCoord0);
|
||||
|
||||
if (albedo.a <= 0.1) {
|
||||
discard;
|
||||
}
|
||||
vec4 color = vec4(albedo.rgb * _color, albedo.a);
|
||||
|
||||
// Apply standard tone mapping
|
||||
_fragColor = vec4(pow(color.xyz, vec3(1.0 / 2.2)), color.w);
|
||||
}
|
|
@ -69,6 +69,7 @@ static const float MIN_AVATAR_SCALE = 0.005f;
|
|||
|
||||
static const float MAX_AVATAR_HEIGHT = 1000.0f * DEFAULT_AVATAR_HEIGHT; // meters
|
||||
static const float MIN_AVATAR_HEIGHT = 0.005f * DEFAULT_AVATAR_HEIGHT; // meters
|
||||
static const float MIN_AVATAR_RADIUS = 0.5f * MIN_AVATAR_HEIGHT;
|
||||
static const float AVATAR_WALK_SPEED_SCALAR = 1.0f;
|
||||
static const float AVATAR_SPRINT_SPEED_SCALAR = 3.0f;
|
||||
|
||||
|
|
|
@ -57,9 +57,6 @@
|
|||
#include <entities-renderer/textured_particle_frag.h>
|
||||
#include <entities-renderer/textured_particle_vert.h>
|
||||
|
||||
#include <render-utils/overlay3D_vert.h>
|
||||
#include <render-utils/overlay3D_frag.h>
|
||||
|
||||
#include <graphics/skybox_vert.h>
|
||||
#include <graphics/skybox_frag.h>
|
||||
|
||||
|
@ -195,8 +192,6 @@ void QTestWindow::draw() {
|
|||
testShaderBuild(ambient_occlusion_vert::getSource(), occlusion_blend_frag::getSource());
|
||||
*/
|
||||
|
||||
testShaderBuild(overlay3D_vert::getSource(), overlay3D_frag::getSource());
|
||||
|
||||
testShaderBuild(paintStroke_vert::getSource(),paintStroke_frag::getSource());
|
||||
testShaderBuild(polyvox_vert::getSource(), polyvox_frag::getSource());
|
||||
|
||||
|
|
Loading…
Reference in a new issue