3
0
Fork 0
mirror of https://github.com/lubosz/overte.git synced 2025-04-27 08:55:31 +02:00

Merge branch 'master' of http://github.com/highfidelity/hifi into transparent

This commit is contained in:
Olivier Prat 2018-07-03 09:26:29 +02:00
commit 74a9b03a5e
46 changed files with 397 additions and 180 deletions

View file

@ -64,6 +64,7 @@ Agent::Agent(ReceivedMessage& message) :
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
DependencyManager::set<ResourceManager>();
DependencyManager::set<PluginManager>();
DependencyManager::registerInheritance<SpatialParentFinder, AssignmentParentFinder>();
@ -833,6 +834,8 @@ void Agent::aboutToFinish() {
DependencyManager::get<ResourceManager>()->cleanup();
DependencyManager::destroy<PluginManager>();
// cleanup the AudioInjectorManager (and any still running injectors)
DependencyManager::destroy<AudioInjectorManager>();

View file

@ -65,7 +65,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
// hash the available codecs (on the mixer)
_availableCodecs.clear(); // Make sure struct is clean
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
auto pluginManager = DependencyManager::set<PluginManager>();
auto codecPlugins = pluginManager->getCodecPlugins();
std::for_each(codecPlugins.cbegin(), codecPlugins.cend(),
[&](const CodecPluginPointer& codec) {
_availableCodecs[codec->getName()] = codec;
@ -106,6 +107,10 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
}
void AudioMixer::aboutToFinish() {
DependencyManager::destroy<PluginManager>();
}
void AudioMixer::queueAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node) {
if (message->getType() == PacketType::SilentAudioFrame) {
_numSilentPackets++;

View file

@ -58,6 +58,9 @@ public:
to.getPublicSocket() != from.getPublicSocket() &&
to.getLocalSocket() != from.getLocalSocket();
}
virtual void aboutToFinish() override;
public slots:
void run() override;
void sendStatsPacket() override;

View file

@ -58,6 +58,7 @@ EntityScriptServer::EntityScriptServer(ReceivedMessage& message) : ThreadedAssig
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
DependencyManager::set<ResourceManager>();
DependencyManager::set<PluginManager>();
DependencyManager::registerInheritance<SpatialParentFinder, AssignmentParentFinder>();
@ -572,6 +573,8 @@ void EntityScriptServer::aboutToFinish() {
DependencyManager::get<ResourceManager>()->cleanup();
DependencyManager::destroy<PluginManager>();
// cleanup the AudioInjectorManager (and any still running injectors)
DependencyManager::destroy<AudioInjectorManager>();
DependencyManager::destroy<ScriptEngines>();

View file

@ -135,6 +135,8 @@ Item {
placeholderText: qsTr("Password")
echoMode: TextInput.Password
Keys.onReturnPressed: linkAccountBody.login()
}
}

View file

@ -177,6 +177,8 @@ Item {
root.text = "";
root.isPassword = true;
}
Keys.onReturnPressed: linkAccountBody.login()
}
CheckBox {

View file

@ -164,6 +164,8 @@ Item {
root.text = "";
root.isPassword = focus
}
Keys.onReturnPressed: signupBody.signup()
}
Row {

View file

@ -56,8 +56,8 @@ Preference {
id: slider
value: preference.value
width: 100
minimumValue: MyAvatar.getDomainMinScale()
maximumValue: MyAvatar.getDomainMaxScale()
minimumValue: preference.min
maximumValue: preference.max
stepSize: preference.step
onValueChanged: {
spinner.realValue = value
@ -74,8 +74,8 @@ Preference {
id: spinner
decimals: preference.decimals
realValue: preference.value
minimumValue: MyAvatar.getDomainMinScale()
maximumValue: MyAvatar.getDomainMaxScale()
minimumValue: preference.min
maximumValue: preference.max
width: 100
onValueChanged: {
slider.value = realValue;

View file

@ -92,9 +92,9 @@ Rectangle {
onBuyResult: {
if (result.status !== 'success') {
failureErrorText.text = result.message;
failureErrorText.text = result.data.message;
root.activeView = "checkoutFailure";
UserActivityLogger.commercePurchaseFailure(root.itemId, root.itemAuthor, root.itemPrice, !root.alreadyOwned, result.message);
UserActivityLogger.commercePurchaseFailure(root.itemId, root.itemAuthor, root.itemPrice, !root.alreadyOwned, result.data.message);
} else {
root.certificateId = result.data.certificate_id;
root.itemHref = result.data.download_url;

View file

@ -269,9 +269,6 @@ public:
}
_renderContext->doneCurrent();
// Deleting the object with automatically shutdown the thread
connect(qApp, &QCoreApplication::aboutToQuit, this, &QObject::deleteLater);
// Transfer to a new thread
moveToNewNamedThread(this, "RenderThread", [this](QThread* renderThread) {
hifi::qt::addBlockingForbiddenThread("Render", renderThread);
@ -814,6 +811,7 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
}
// Tell the plugin manager about our statically linked plugins
DependencyManager::set<PluginManager>();
auto pluginManager = PluginManager::getInstance();
pluginManager->setInputPluginProvider([] { return getInputPlugins(); });
pluginManager->setDisplayPluginProvider([] { return getDisplayPlugins(); });
@ -1378,6 +1376,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
initializeRenderEngine();
qCDebug(interfaceapp, "Initialized Render Engine.");
// Overlays need to exist before we set the ContextOverlayInterface dependency
_overlays.init(); // do this before scripts load
DependencyManager::set<ContextOverlayInterface>();
// Initialize the user interface and menu system
// Needs to happen AFTER the render engine initialization to access its configuration
initializeUi();
@ -1514,10 +1516,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// allow you to move an entity around in your hand
_entityEditSender.setPacketsPerSecond(3000); // super high!!
// Overlays need to exist before we set the ContextOverlayInterface dependency
_overlays.init(); // do this before scripts load
DependencyManager::set<ContextOverlayInterface>();
// Make sure we don't time out during slow operations at startup
updateHeartbeat();
@ -2555,25 +2553,28 @@ Application::~Application() {
_octreeProcessor.terminate();
_entityEditSender.terminate();
if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) {
steamClient->shutdown();
}
DependencyManager::destroy<PluginManager>();
DependencyManager::destroy<CompositorHelper>(); // must be destroyed before the FramebufferCache
DependencyManager::destroy<AvatarManager>();
DependencyManager::destroy<AnimationCache>();
DependencyManager::destroy<FramebufferCache>();
DependencyManager::destroy<TextureCache>();
DependencyManager::destroy<ModelCache>();
DependencyManager::destroy<GeometryCache>();
DependencyManager::destroy<ScriptCache>();
DependencyManager::destroy<SoundCache>();
DependencyManager::destroy<OctreeStatsProvider>();
DependencyManager::destroy<GeometryCache>();
DependencyManager::get<ResourceManager>()->cleanup();
// remove the NodeList from the DependencyManager
DependencyManager::destroy<NodeList>();
if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) {
steamClient->shutdown();
}
#if 0
ConnexionClient::getInstance().destroy();
#endif
@ -2593,6 +2594,8 @@ Application::~Application() {
// Can't log to file passed this point, FileLogger about to be deleted
qInstallMessageHandler(LogHandler::verboseMessageHandler);
_renderEventHandler->deleteLater();
}
void Application::initializeGL() {
@ -2719,7 +2722,7 @@ void Application::initializeDisplayPlugins() {
setDisplayPlugin(defaultDisplayPlugin);
// Now set the desired plugin if it's not the same as the default plugin
if (targetDisplayPlugin != defaultDisplayPlugin) {
if (!targetDisplayPlugin && (targetDisplayPlugin != defaultDisplayPlugin)) {
setDisplayPlugin(targetDisplayPlugin);
}
@ -2893,6 +2896,7 @@ void Application::initializeUi() {
auto compositorHelper = DependencyManager::get<CompositorHelper>();
connect(compositorHelper.data(), &CompositorHelper::allowMouseCaptureChanged, this, [=] {
if (isHMDMode()) {
auto compositorHelper = DependencyManager::get<CompositorHelper>(); // don't capture outer smartpointer
showCursor(compositorHelper->getAllowMouseCapture() ?
Cursor::Manager::lookupIcon(_preferredCursor.get()) :
Cursor::Icon::SYSTEM);

View file

@ -139,7 +139,10 @@ void Application::paintGL() {
frame->frameIndex = _renderFrameCount;
frame->framebuffer = finalFramebuffer;
frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer) {
DependencyManager::get<FramebufferCache>()->releaseFramebuffer(framebuffer);
auto frameBufferCache = DependencyManager::get<FramebufferCache>();
if (frameBufferCache) {
frameBufferCache->releaseFramebuffer(framebuffer);
}
};
// deliver final scene rendering commands to the display plugin
{

View file

@ -227,6 +227,8 @@ void setupPreferences() {
auto getter = [=]()->float { return myAvatar->getTargetScale(); };
auto setter = [=](float value) { myAvatar->setTargetScale(value); };
auto preference = new SpinnerSliderPreference(AVATAR_TUNING, "Avatar Scale", getter, setter);
preference->setMin(0.25);
preference->setMax(4);
preference->setStep(0.05f);
preference->setDecimals(2);
preferences->addPreference(preference);
@ -303,17 +305,21 @@ void setupPreferences() {
{
auto getter = [=]()->float { return myAvatar->getPitchSpeed(); };
auto setter = [=](float value) { myAvatar->setPitchSpeed(value); };
auto preference = new SpinnerPreference(AVATAR_CAMERA, "Pitch speed (degrees/second)", getter, setter);
auto preference = new SpinnerSliderPreference(AVATAR_CAMERA, "Y input:", getter, setter);
preference->setMin(1.0f);
preference->setMax(360.0f);
preference->setStep(1);
preference->setDecimals(1);
preferences->addPreference(preference);
}
{
auto getter = [=]()->float { return myAvatar->getYawSpeed(); };
auto setter = [=](float value) { myAvatar->setYawSpeed(value); };
auto preference = new SpinnerPreference(AVATAR_CAMERA, "Yaw speed (degrees/second)", getter, setter);
auto preference = new SpinnerSliderPreference(AVATAR_CAMERA, "X input:", getter, setter);
preference->setMin(1.0f);
preference->setMax(360.0f);
preference->setStep(1);
preference->setDecimals(1);
preferences->addPreference(preference);
}

View file

@ -1269,6 +1269,8 @@ bool EntityScriptingInterface::appendPoint(QUuid entityID, const glm::vec3& poin
EntityItemPointer entity = static_cast<EntityItemPointer>(_entityTree->findEntityByEntityItemID(entityID));
if (!entity) {
qCDebug(entities) << "EntityScriptingInterface::setPoints no entity with ID" << entityID;
// There is no entity
return false;
}
EntityTypes::EntityType entityType = entity->getType();

View file

@ -2521,6 +2521,13 @@ bool EntityTree::readFromMap(QVariantMap& map) {
}
}
// Zero out the spread values that were fixed in version ParticleEntityFix so they behave the same as before
if (contentVersion < (int)EntityVersion::ParticleEntityFix) {
properties.setRadiusSpread(0.0f);
properties.setAlphaSpread(0.0f);
properties.setColorSpread({0, 0, 0});
}
EntityItemPointer entity = addEntity(entityItemID, properties);
if (!entity) {
qCDebug(entities) << "adding Entity failed:" << entityItemID << properties.getType();

View file

@ -203,10 +203,11 @@ void GLBackend::releaseResourceTexture(uint32_t slot) {
}
void GLBackend::resetResourceStage() {
for (uint32_t i = 0; i < _resource._buffers.size(); i++) {
uint32_t i;
for (i = 0; i < _resource._buffers.size(); i++) {
releaseResourceBuffer(i);
}
for (uint32_t i = 0; i < _resource._textures.size(); i++) {
for (i = 0; i < _resource._textures.size(); i++) {
releaseResourceTexture(i);
}
}

View file

@ -59,7 +59,11 @@ const size_t GLVariableAllocationSupport::MAX_BUFFER_SIZE = MAX_TRANSFER_SIZE;
GLenum GLTexture::getGLTextureType(const Texture& texture) {
switch (texture.getType()) {
case Texture::TEX_2D:
return GL_TEXTURE_2D;
if (!texture.isArray()) {
return GL_TEXTURE_2D;
} else {
return GL_TEXTURE_2D_ARRAY;
}
break;
case Texture::TEX_CUBE:
@ -77,6 +81,7 @@ GLenum GLTexture::getGLTextureType(const Texture& texture) {
uint8_t GLTexture::getFaceCount(GLenum target) {
switch (target) {
case GL_TEXTURE_2D:
case GL_TEXTURE_2D_ARRAY:
return TEXTURE_2D_NUM_FACES;
case GL_TEXTURE_CUBE_MAP:
return TEXTURE_CUBE_NUM_FACES;
@ -86,17 +91,22 @@ uint8_t GLTexture::getFaceCount(GLenum target) {
}
}
const std::vector<GLenum>& GLTexture::getFaceTargets(GLenum target) {
static std::vector<GLenum> cubeFaceTargets {
static const std::vector<GLenum> cubeFaceTargets {
GL_TEXTURE_CUBE_MAP_POSITIVE_X, GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
GL_TEXTURE_CUBE_MAP_POSITIVE_Y, GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
GL_TEXTURE_CUBE_MAP_POSITIVE_Z, GL_TEXTURE_CUBE_MAP_NEGATIVE_Z
};
static std::vector<GLenum> faceTargets {
static const std::vector<GLenum> faceTargets {
GL_TEXTURE_2D
};
static const std::vector<GLenum> arrayFaceTargets{
GL_TEXTURE_2D_ARRAY
};
switch (target) {
case GL_TEXTURE_2D:
return faceTargets;
case GL_TEXTURE_2D_ARRAY:
return arrayFaceTargets;
case GL_TEXTURE_CUBE_MAP:
return cubeFaceTargets;
default:

View file

@ -64,7 +64,12 @@ public:
}
if (gltexture) {
glFramebufferTexture2D(GL_FRAMEBUFFER, colorAttachments[unit], GL_TEXTURE_2D, gltexture->_texture, 0);
if (gltexture->_target == GL_TEXTURE_2D) {
glFramebufferTexture2D(GL_FRAMEBUFFER, colorAttachments[unit], GL_TEXTURE_2D, gltexture->_texture, 0);
} else {
glFramebufferTextureLayer(GL_FRAMEBUFFER, colorAttachments[unit], gltexture->_texture, 0,
b._subresource);
}
_colorBuffers.push_back(colorAttachments[unit]);
} else {
glFramebufferTexture2D(GL_FRAMEBUFFER, colorAttachments[unit], GL_TEXTURE_2D, 0, 0);
@ -91,7 +96,12 @@ public:
}
if (gltexture) {
glFramebufferTexture2D(GL_FRAMEBUFFER, attachement, GL_TEXTURE_2D, gltexture->_texture, 0);
if (gltexture->_target == GL_TEXTURE_2D) {
glFramebufferTexture2D(GL_FRAMEBUFFER, attachement, GL_TEXTURE_2D, gltexture->_texture, 0);
} else {
glFramebufferTextureLayer(GL_FRAMEBUFFER, attachement, gltexture->_texture, 0,
_gpuObject.getDepthStencilBufferSubresource());
}
} else {
glFramebufferTexture2D(GL_FRAMEBUFFER, attachement, GL_TEXTURE_2D, 0, 0);
}

View file

@ -182,7 +182,7 @@ void GL41Texture::syncSampler() const {
glTexParameteri(_target, GL_TEXTURE_MAG_FILTER, fm.magFilter);
if (sampler.doComparison()) {
glTexParameteri(_target, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_R_TO_TEXTURE_ARB);
glTexParameteri(_target, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_REF_TO_TEXTURE);
glTexParameteri(_target, GL_TEXTURE_COMPARE_FUNC, COMPARISON_TO_GL[sampler.getComparisonFunction()]);
} else {
glTexParameteri(_target, GL_TEXTURE_COMPARE_MODE, GL_NONE);
@ -197,7 +197,7 @@ void GL41Texture::syncSampler() const {
glTexParameterf(_target, GL_TEXTURE_MIN_LOD, (float)sampler.getMinMip());
glTexParameterf(_target, GL_TEXTURE_MAX_LOD, (sampler.getMaxMip() == Sampler::MAX_MIP_LEVEL ? 1000.f : sampler.getMaxMip()));
glTexParameterf(_target, GL_TEXTURE_MAX_ANISOTROPY_EXT, sampler.getMaxAnisotropy());
glTexParameterf(_target, GL_TEXTURE_MAX_ANISOTROPY, sampler.getMaxAnisotropy());
}
using GL41FixedAllocationTexture = GL41Backend::GL41FixedAllocationTexture;
@ -215,12 +215,19 @@ GL41FixedAllocationTexture::~GL41FixedAllocationTexture() {
void GL41FixedAllocationTexture::allocateStorage() const {
const GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(_gpuObject.getTexelFormat());
const auto numMips = _gpuObject.getNumMips();
const auto numSlices = _gpuObject.getNumSlices();
// glTextureStorage2D(_id, mips, texelFormat.internalFormat, dimensions.x, dimensions.y);
for (GLint level = 0; level < numMips; level++) {
Vec3u dimensions = _gpuObject.evalMipDimensions(level);
for (GLenum target : getFaceTargets(_target)) {
glTexImage2D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, 0, texelFormat.format, texelFormat.type, nullptr);
if (!_gpuObject.isArray()) {
glTexImage2D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, 0, texelFormat.format,
texelFormat.type, nullptr);
} else {
glTexImage3D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, numSlices, 0,
texelFormat.format, texelFormat.type, nullptr);
}
}
}

View file

@ -60,7 +60,11 @@ public:
}
if (gltexture) {
glNamedFramebufferTexture(_id, colorAttachments[unit], gltexture->_texture, 0);
if (gltexture->_target == GL_TEXTURE_2D) {
glNamedFramebufferTexture(_id, colorAttachments[unit], gltexture->_texture, 0);
} else {
glNamedFramebufferTextureLayer(_id, colorAttachments[unit], gltexture->_texture, 0, b._subresource);
}
_colorBuffers.push_back(colorAttachments[unit]);
} else {
glNamedFramebufferTexture(_id, colorAttachments[unit], 0, 0);
@ -87,14 +91,18 @@ public:
}
if (gltexture) {
glNamedFramebufferTexture(_id, attachement, gltexture->_texture, 0);
if (gltexture->_target == GL_TEXTURE_2D) {
glNamedFramebufferTexture(_id, attachement, gltexture->_texture, 0);
} else {
glNamedFramebufferTextureLayer(_id, attachement, gltexture->_texture, 0,
_gpuObject.getDepthStencilBufferSubresource());
}
} else {
glNamedFramebufferTexture(_id, attachement, 0, 0);
}
_depthStamp = _gpuObject.getDepthStamp();
}
// Last but not least, define where we draw
if (!_colorBuffers.empty()) {
glNamedFramebufferDrawBuffers(_id, (GLsizei)_colorBuffers.size(), _colorBuffers.data());

View file

@ -152,7 +152,7 @@ public:
glSamplerParameteri(result, GL_TEXTURE_MIN_FILTER, fm.minFilter);
glSamplerParameteri(result, GL_TEXTURE_MAG_FILTER, fm.magFilter);
if (sampler.doComparison()) {
glSamplerParameteri(result, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_R_TO_TEXTURE_ARB);
glSamplerParameteri(result, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_REF_TO_TEXTURE);
glSamplerParameteri(result, GL_TEXTURE_COMPARE_FUNC, COMPARISON_TO_GL[sampler.getComparisonFunction()]);
} else {
glSamplerParameteri(result, GL_TEXTURE_COMPARE_MODE, GL_NONE);
@ -341,7 +341,7 @@ void GL45Texture::syncSampler() const {
glTextureParameteri(_id, GL_TEXTURE_MAG_FILTER, fm.magFilter);
if (sampler.doComparison()) {
glTextureParameteri(_id, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_R_TO_TEXTURE_ARB);
glTextureParameteri(_id, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_REF_TO_TEXTURE);
glTextureParameteri(_id, GL_TEXTURE_COMPARE_FUNC, COMPARISON_TO_GL[sampler.getComparisonFunction()]);
} else {
glTextureParameteri(_id, GL_TEXTURE_COMPARE_MODE, GL_NONE);
@ -374,8 +374,13 @@ void GL45FixedAllocationTexture::allocateStorage() const {
const GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(_gpuObject.getTexelFormat());
const auto dimensions = _gpuObject.getDimensions();
const auto mips = _gpuObject.getNumMips();
const auto numSlices = _gpuObject.getNumSlices();
glTextureStorage2D(_id, mips, texelFormat.internalFormat, dimensions.x, dimensions.y);
if (!_gpuObject.isArray()) {
glTextureStorage2D(_id, mips, texelFormat.internalFormat, dimensions.x, dimensions.y);
} else {
glTextureStorage3D(_id, mips, texelFormat.internalFormat, dimensions.x, dimensions.y, numSlices);
}
glTextureParameteri(_id, GL_TEXTURE_BASE_LEVEL, 0);
glTextureParameteri(_id, GL_TEXTURE_MAX_LEVEL, mips - 1);

View file

@ -64,7 +64,12 @@ public:
}
if (gltexture) {
glFramebufferTexture2D(GL_FRAMEBUFFER, colorAttachments[unit], GL_TEXTURE_2D, gltexture->_texture, 0);
if (gltexture->_target == GL_TEXTURE_2D) {
glFramebufferTexture2D(GL_FRAMEBUFFER, colorAttachments[unit], GL_TEXTURE_2D, gltexture->_texture, 0);
} else {
glFramebufferTextureLayer(GL_FRAMEBUFFER, colorAttachments[unit], gltexture->_texture, 0,
b._subresource);
}
_colorBuffers.push_back(colorAttachments[unit]);
} else {
glFramebufferTexture2D(GL_FRAMEBUFFER, colorAttachments[unit], GL_TEXTURE_2D, 0, 0);
@ -91,7 +96,12 @@ public:
}
if (gltexture) {
glFramebufferTexture2D(GL_FRAMEBUFFER, attachement, GL_TEXTURE_2D, gltexture->_texture, 0);
if (gltexture->_target == GL_TEXTURE_2D) {
glFramebufferTexture2D(GL_FRAMEBUFFER, attachement, GL_TEXTURE_2D, gltexture->_texture, 0);
} else {
glFramebufferTextureLayer(GL_FRAMEBUFFER, attachement, gltexture->_texture, 0,
_gpuObject.getDepthStencilBufferSubresource());
}
} else {
glFramebufferTexture2D(GL_FRAMEBUFFER, attachement, GL_TEXTURE_2D, 0, 0);
}

View file

@ -268,16 +268,27 @@ GLsizei getCompressedImageSize(int width, int height, GLenum internalFormat) {
void GLESFixedAllocationTexture::allocateStorage() const {
const GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(_gpuObject.getTexelFormat());
const auto numMips = _gpuObject.getNumMips();
const auto numSlices = _gpuObject.getNumSlices();
// glTextureStorage2D(_id, mips, texelFormat.internalFormat, dimensions.x, dimensions.y);
for (GLint level = 0; level < numMips; level++) {
Vec3u dimensions = _gpuObject.evalMipDimensions(level);
for (GLenum target : getFaceTargets(_target)) {
if (texelFormat.isCompressed()) {
glCompressedTexImage2D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, 0,
getCompressedImageSize(dimensions.x, dimensions.y, texelFormat.internalFormat), nullptr);
auto size = getCompressedImageSize(dimensions.x, dimensions.y, texelFormat.internalFormat);
if (!_gpuObject.isArray()) {
glCompressedTexImage2D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, 0, size, nullptr);
} else {
glCompressedTexImage3D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, numSlices, 0, size * numSlices, nullptr);
}
} else {
glTexImage2D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, 0, texelFormat.format, texelFormat.type, nullptr);
if (!_gpuObject.isArray()) {
glTexImage2D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, 0, texelFormat.format,
texelFormat.type, nullptr);
} else {
glTexImage3D(target, level, texelFormat.internalFormat, dimensions.x, dimensions.y, numSlices, 0,
texelFormat.format, texelFormat.type, nullptr);
}
}
}
}

View file

@ -21,10 +21,7 @@ Frame::~Frame() {
framebuffer.reset();
}
assert(bufferUpdates.empty());
if (!bufferUpdates.empty()) {
qFatal("Buffer sync error... frame destroyed without buffer updates being applied");
}
bufferUpdates.clear();
}
void Frame::finish() {

View file

@ -184,6 +184,10 @@ TexturePointer Texture::createRenderBuffer(const Element& texelFormat, uint16 wi
return create(TextureUsageType::RENDERBUFFER, TEX_2D, texelFormat, width, height, 1, 1, 0, numMips, sampler);
}
TexturePointer Texture::createRenderBufferArray(const Element& texelFormat, uint16 width, uint16 height, uint16 numSlices, uint16 numMips, const Sampler& sampler) {
return create(TextureUsageType::RENDERBUFFER, TEX_2D, texelFormat, width, height, 1, 1, numSlices, numMips, sampler);
}
TexturePointer Texture::create1D(const Element& texelFormat, uint16 width, uint16 numMips, const Sampler& sampler) {
return create(TextureUsageType::RESOURCE, TEX_1D, texelFormat, width, 1, 1, 1, 0, numMips, sampler);
}
@ -192,6 +196,10 @@ TexturePointer Texture::create2D(const Element& texelFormat, uint16 width, uint1
return create(TextureUsageType::RESOURCE, TEX_2D, texelFormat, width, height, 1, 1, 0, numMips, sampler);
}
TexturePointer Texture::create2DArray(const Element& texelFormat, uint16 width, uint16 height, uint16 numSlices, uint16 numMips, const Sampler& sampler) {
return create(TextureUsageType::STRICT_RESOURCE, TEX_2D, texelFormat, width, height, 1, 1, numSlices, numMips, sampler);
}
TexturePointer Texture::createStrict(const Element& texelFormat, uint16 width, uint16 height, uint16 numMips, const Sampler& sampler) {
return create(TextureUsageType::STRICT_RESOURCE, TEX_2D, texelFormat, width, height, 1, 1, 0, numMips, sampler);
}

View file

@ -374,9 +374,11 @@ public:
static const uint16 SINGLE_MIP = 1;
static TexturePointer create1D(const Element& texelFormat, uint16 width, uint16 numMips = SINGLE_MIP, const Sampler& sampler = Sampler());
static TexturePointer create2D(const Element& texelFormat, uint16 width, uint16 height, uint16 numMips = SINGLE_MIP, const Sampler& sampler = Sampler());
static TexturePointer create2DArray(const Element& texelFormat, uint16 width, uint16 height, uint16 numSlices, uint16 numMips = SINGLE_MIP, const Sampler& sampler = Sampler());
static TexturePointer create3D(const Element& texelFormat, uint16 width, uint16 height, uint16 depth, uint16 numMips = SINGLE_MIP, const Sampler& sampler = Sampler());
static TexturePointer createCube(const Element& texelFormat, uint16 width, uint16 numMips = 1, const Sampler& sampler = Sampler());
static TexturePointer createRenderBuffer(const Element& texelFormat, uint16 width, uint16 height, uint16 numMips = SINGLE_MIP, const Sampler& sampler = Sampler());
static TexturePointer createRenderBufferArray(const Element& texelFormat, uint16 width, uint16 height, uint16 numSlices, uint16 numMips = SINGLE_MIP, const Sampler& sampler = Sampler());
static TexturePointer createStrict(const Element& texelFormat, uint16 width, uint16 height, uint16 numMips = SINGLE_MIP, const Sampler& sampler = Sampler());
static TexturePointer createExternal(const ExternalRecycler& recycler, const Sampler& sampler = Sampler());

View file

@ -515,7 +515,7 @@ TexturePointer Texture::build(const ktx::KTXDescriptor& descriptor) {
header.getPixelHeight(),
header.getPixelDepth(),
1, // num Samples
header.getNumberOfSlices(),
header.isArray() ? header.getNumberOfSlices() : 0,
header.getNumberOfLevels(),
samplerDesc);
texture->setUsage(gpuktxKeyValue._usage);

View file

@ -163,6 +163,7 @@ namespace ktx {
uint32_t getPixelDepth() const { return (pixelDepth ? pixelDepth : 1); }
uint32_t getNumberOfSlices() const { return (numberOfArrayElements ? numberOfArrayElements : 1); }
uint32_t getNumberOfLevels() const { return (numberOfMipmapLevels ? numberOfMipmapLevels : 1); }
bool isArray() const { return numberOfArrayElements > 0; }
bool isCompressed() const { return glFormat == COMPRESSED_FORMAT; }
uint32_t evalMaxDimension() const;

View file

@ -39,8 +39,13 @@ ResourceManager::ResourceManager(bool atpSupportEnabled) : _atpSupportEnabled(at
}
ResourceManager::~ResourceManager() {
_thread.terminate();
_thread.wait();
if (_thread.isRunning()) {
_thread.quit();
static const auto MAX_RESOURCE_MANAGER_THREAD_QUITTING_TIME = MSECS_PER_SECOND / 2;
if (!_thread.wait(MAX_RESOURCE_MANAGER_THREAD_QUITTING_TIME)) {
_thread.terminate();
}
}
}
void ResourceManager::setUrlPrefixOverride(const QString& prefix, const QString& replacement) {

View file

@ -239,7 +239,7 @@ void Connection::sync() {
sendACK();
}
if (_lossList.getLength() > 0) {
if (_congestionControl->shouldNAK() && _lossList.getLength() > 0) {
// check if we need to re-transmit a loss list
// we do this if it has been longer than the current nakInterval since we last sent
auto now = p_high_resolution_clock::now();
@ -271,10 +271,13 @@ void Connection::sendACK(bool wasCausedBySyncTimeout) {
SequenceNumber nextACKNumber = nextACK();
Q_ASSERT_X(nextACKNumber >= _lastSentACK, "Connection::sendACK", "Sending lower ACK, something is wrong");
if (nextACKNumber == _lastSentACK) {
// We already sent this ACK, but check if we should re-send it.
if (nextACKNumber < _lastReceivedAcknowledgedACK) {
// if our congestion control doesn't want to send an ACK for every packet received
// check if we already sent this ACK
if (_congestionControl->_ackInterval > 1 && nextACKNumber == _lastSentACK) {
// if we use ACK2s, check if the receiving side already confirmed receipt of this ACK
if (_congestionControl->shouldACK2() && nextACKNumber < _lastReceivedAcknowledgedACK) {
// we already got an ACK2 for this ACK we would be sending, don't bother
return;
}
@ -287,11 +290,11 @@ void Connection::sendACK(bool wasCausedBySyncTimeout) {
}
}
// we have received new packets since the last sent ACK
// or our congestion control dictates that we always send ACKs
// update the last sent ACK
_lastSentACK = nextACKNumber;
_ackPacket->reset(); // We need to reset it every time.
// pack in the ACK sub-sequence number
@ -448,20 +451,22 @@ bool Connection::processReceivedSequenceNumber(SequenceNumber sequenceNumber, in
// mark our last receive time as now (to push the potential expiry farther)
_lastReceiveTime = p_high_resolution_clock::now();
// check if this is a packet pair we should estimate bandwidth from, or just a regular packet
if (((uint32_t) sequenceNumber & 0xF) == 0) {
_receiveWindow.onProbePair1Arrival();
} else if (((uint32_t) sequenceNumber & 0xF) == 1) {
// only use this packet for bandwidth estimation if we didn't just receive a control packet in its place
if (!_receivedControlProbeTail) {
_receiveWindow.onProbePair2Arrival();
} else {
// reset our control probe tail marker so the next probe that comes with data can be used
_receivedControlProbeTail = false;
if (_congestionControl->shouldProbe()) {
// check if this is a packet pair we should estimate bandwidth from, or just a regular packet
if (((uint32_t) sequenceNumber & 0xF) == 0) {
_receiveWindow.onProbePair1Arrival();
} else if (((uint32_t) sequenceNumber & 0xF) == 1) {
// only use this packet for bandwidth estimation if we didn't just receive a control packet in its place
if (!_receivedControlProbeTail) {
_receiveWindow.onProbePair2Arrival();
} else {
// reset our control probe tail marker so the next probe that comes with data can be used
_receivedControlProbeTail = false;
}
}
}
_receiveWindow.onPacketArrival();
// If this is not the next sequence number, report loss

View file

@ -40,9 +40,8 @@ void PluginManager::setInputPluginSettingsPersister(const InputPluginSettingsPer
_inputSettingsPersister = persister;
}
PluginManager* PluginManager::getInstance() {
static PluginManager _manager;
return &_manager;
PluginManagerPointer PluginManager::getInstance() {
return DependencyManager::get<PluginManager>();
}
QString getPluginNameFromMetaData(QJsonObject object) {
@ -136,9 +135,6 @@ const LoaderList& getLoadedPlugins() {
return loadedPlugins;
}
PluginManager::PluginManager() {
}
const CodecPluginList& PluginManager::getCodecPlugins() {
static CodecPluginList codecPlugins;
static std::once_flag once;

View file

@ -9,12 +9,19 @@
#include <QObject>
#include <DependencyManager.h>
#include "Forward.h"
class PluginManager : public QObject {
class PluginManager;
using PluginManagerPointer = QSharedPointer<PluginManager>;
class PluginManager : public QObject, public Dependency {
SINGLETON_DEPENDENCY
public:
static PluginManager* getInstance();
PluginManager();
static PluginManagerPointer getInstance();
const DisplayPluginList& getDisplayPlugins();
const InputPluginList& getInputPlugins();
@ -39,6 +46,8 @@ public:
void setInputPluginSettingsPersister(const InputPluginSettingsPersister& persister);
private:
PluginManager() = default;
DisplayPluginProvider _displayPluginProvider { []()->DisplayPluginList { return {}; } };
InputPluginProvider _inputPluginProvider { []()->InputPluginList { return {}; } };
CodecPluginProvider _codecPluginProvider { []()->CodecPluginList { return {}; } };

View file

@ -60,7 +60,8 @@ enum TextureSlot {
enum ParamSlot {
CameraCorrection = 0,
DeferredFrameTransform,
ShadowTransform
ShadowTransform,
DebugParametersBuffer
};
static const std::string DEFAULT_ALBEDO_SHADER {
@ -139,12 +140,11 @@ static const std::string DEFAULT_LIGHTING_SHADER {
" }"
};
static const std::string DEFAULT_SHADOW_SHADER{
"uniform sampler2DShadow shadowMap;"
static const std::string DEFAULT_SHADOW_DEPTH_SHADER{
"vec4 getFragmentColor() {"
" for (int i = 255; i >= 0; --i) {"
" float depth = i / 255.0;"
" if (texture(shadowMap, vec3(uv, depth)) > 0.5) {"
" if (texture(shadowMaps, vec4(uv, parameters._shadowCascadeIndex, depth)) > 0.5) {"
" return vec4(vec3(depth), 1.0);"
" }"
" }"
@ -323,7 +323,7 @@ std::string DebugDeferredBuffer::getShaderSourceCode(Mode mode, std::string cust
case ShadowCascade1Mode:
case ShadowCascade2Mode:
case ShadowCascade3Mode:
return DEFAULT_SHADOW_SHADER;
return DEFAULT_SHADOW_DEPTH_SHADER;
case ShadowCascadeIndicesMode:
return DEFAULT_SHADOW_CASCADE_SHADER;
case LinearDepthMode:
@ -396,6 +396,7 @@ const gpu::PipelinePointer& DebugDeferredBuffer::getPipeline(Mode mode, std::str
slotBindings.insert(gpu::Shader::Binding("cameraCorrectionBuffer", CameraCorrection));
slotBindings.insert(gpu::Shader::Binding("deferredFrameTransformBuffer", DeferredFrameTransform));
slotBindings.insert(gpu::Shader::Binding("shadowTransformBuffer", ShadowTransform));
slotBindings.insert(gpu::Shader::Binding("parametersBuffer", DebugParametersBuffer));
slotBindings.insert(gpu::Shader::Binding("albedoMap", Albedo));
slotBindings.insert(gpu::Shader::Binding("normalMap", Normal));
@ -403,7 +404,7 @@ const gpu::PipelinePointer& DebugDeferredBuffer::getPipeline(Mode mode, std::str
slotBindings.insert(gpu::Shader::Binding("depthMap", Depth));
slotBindings.insert(gpu::Shader::Binding("obscuranceMap", AmbientOcclusion));
slotBindings.insert(gpu::Shader::Binding("lightingMap", Lighting));
slotBindings.insert(gpu::Shader::Binding("shadowMap", Shadow));
slotBindings.insert(gpu::Shader::Binding("shadowMaps", Shadow));
slotBindings.insert(gpu::Shader::Binding("linearDepthMap", LinearDepth));
slotBindings.insert(gpu::Shader::Binding("halfLinearDepthMap", HalfLinearDepth));
slotBindings.insert(gpu::Shader::Binding("halfNormalMap", HalfNormal));
@ -432,8 +433,11 @@ const gpu::PipelinePointer& DebugDeferredBuffer::getPipeline(Mode mode, std::str
}
void DebugDeferredBuffer::configure(const Config& config) {
auto& parameters = _parameters.edit();
_mode = (Mode)config.mode;
_size = config.size;
parameters._shadowCascadeIndex = glm::clamp(_mode - Mode::ShadowCascade0Mode, 0, (int)SHADOW_CASCADE_MAX_COUNT - 1);
}
void DebugDeferredBuffer::run(const RenderContextPointer& renderContext, const Inputs& inputs) {
@ -483,14 +487,15 @@ void DebugDeferredBuffer::run(const RenderContextPointer& renderContext, const I
batch.setResourceTexture(Velocity, velocityFramebuffer->getVelocityTexture());
}
batch.setUniformBuffer(DebugParametersBuffer, _parameters);
auto lightStage = renderContext->_scene->getStage<LightStage>();
assert(lightStage);
assert(lightStage->getNumLights() > 0);
auto lightAndShadow = lightStage->getCurrentKeyLightAndShadow();
const auto& globalShadow = lightAndShadow.second;
if (globalShadow) {
const auto cascadeIndex = glm::clamp(_mode - Mode::ShadowCascade0Mode, 0, (int)globalShadow->getCascadeCount() - 1);
batch.setResourceTexture(Shadow, globalShadow->getCascade(cascadeIndex).map);
batch.setResourceTexture(Shadow, globalShadow->map);
batch.setUniformBuffer(ShadowTransform, globalShadow->getBuffer());
batch.setUniformBuffer(DeferredFrameTransform, frameTransform->getFrameTransformBuffer());
}

View file

@ -30,7 +30,7 @@ public:
DebugDeferredBufferConfig() : render::Job::Config(false) {}
void setMode(int newMode);
int mode{ 0 };
glm::vec4 size{ 0.0f, -1.0f, 1.0f, 1.0f };
signals:
@ -39,20 +39,26 @@ signals:
class DebugDeferredBuffer {
public:
using Inputs = render::VaryingSet6<DeferredFramebufferPointer, LinearDepthFramebufferPointer, SurfaceGeometryFramebufferPointer, AmbientOcclusionFramebufferPointer, VelocityFramebufferPointer, DeferredFrameTransformPointer>;
using Inputs = render::VaryingSet6<DeferredFramebufferPointer,
LinearDepthFramebufferPointer,
SurfaceGeometryFramebufferPointer,
AmbientOcclusionFramebufferPointer,
VelocityFramebufferPointer,
DeferredFrameTransformPointer>;
using Config = DebugDeferredBufferConfig;
using JobModel = render::Job::ModelI<DebugDeferredBuffer, Inputs, Config>;
DebugDeferredBuffer();
~DebugDeferredBuffer();
void configure(const Config& config);
void run(const render::RenderContextPointer& renderContext, const Inputs& inputs);
protected:
friend class DebugDeferredBufferConfig;
enum Mode : uint8_t {
enum Mode : uint8_t
{
// Use Mode suffix to avoid collisions
Off = 0,
DepthMode,
@ -83,7 +89,7 @@ protected:
AmbientOcclusionMode,
AmbientOcclusionBlurredMode,
VelocityMode,
CustomMode, // Needs to stay last
CustomMode, // Needs to stay last
NumModes,
};
@ -92,20 +98,25 @@ private:
Mode _mode{ Off };
glm::vec4 _size;
#include "debug_deferred_buffer_shared.slh"
using ParametersBuffer = gpu::StructBuffer<DebugParameters>;
struct CustomPipeline {
gpu::PipelinePointer pipeline;
mutable QFileInfo info;
};
using StandardPipelines = std::array<gpu::PipelinePointer, NumModes>;
using CustomPipelines = std::unordered_map<std::string, CustomPipeline>;
bool pipelineNeedsUpdate(Mode mode, std::string customFile = std::string()) const;
const gpu::PipelinePointer& getPipeline(Mode mode, std::string customFile = std::string());
std::string getShaderSourceCode(Mode mode, std::string customFile = std::string());
ParametersBuffer _parameters;
StandardPipelines _pipelines;
CustomPipelines _customPipelines;
int _geometryId { 0 };
int _geometryId{ 0 };
};
#endif // hifi_DebugDeferredBuffer_h
#endif // hifi_DebugDeferredBuffer_h

View file

@ -68,7 +68,7 @@ enum DeferredShader_MapSlot {
SCATTERING_SPECULAR_UNIT = 9,
SKYBOX_MAP_UNIT = render::ShapePipeline::Slot::LIGHT_AMBIENT_MAP, // unit = 10
SHADOW_MAP_UNIT = 11,
nextAvailableUnit = SHADOW_MAP_UNIT + SHADOW_CASCADE_MAX_COUNT
nextAvailableUnit = SHADOW_MAP_UNIT
};
enum DeferredShader_BufferSlot {
DEFERRED_FRAME_TRANSFORM_BUFFER_SLOT = 0,
@ -534,9 +534,7 @@ void RenderDeferredSetup::run(const render::RenderContextPointer& renderContext,
// Bind the shadow buffers
if (globalShadow) {
for (unsigned int i = 0; i < globalShadow->getCascadeCount(); i++) {
batch.setResourceTexture(SHADOW_MAP_UNIT+i, globalShadow->getCascade(i).map);
}
batch.setResourceTexture(SHADOW_MAP_UNIT, globalShadow->map);
}
auto program = deferredLightingEffect->_directionalSkyboxLight;

View file

@ -74,8 +74,6 @@ LightStage::Shadow::Cascade::Cascade() :
_frustum{ std::make_shared<ViewFrustum>() },
_minDistance{ 0.0f },
_maxDistance{ 20.0f } {
framebuffer = gpu::FramebufferPointer(gpu::Framebuffer::createShadowmap(MAP_SIZE));
map = framebuffer->getDepthStencilBuffer();
}
const glm::mat4& LightStage::Shadow::Cascade::getView() const {
@ -127,8 +125,29 @@ LightStage::Shadow::Shadow(graphics::LightPointer light, float maxDistance, unsi
Schema schema;
schema.cascadeCount = cascadeCount;
_schemaBuffer = std::make_shared<gpu::Buffer>(sizeof(Schema), (const gpu::Byte*) &schema);
// Create shadow cascade texture array
auto depthFormat = gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::DEPTH); // Depth32 texel format
map = gpu::TexturePointer(gpu::Texture::createRenderBufferArray(depthFormat, MAP_SIZE, MAP_SIZE, cascadeCount));
gpu::Sampler::Desc samplerDesc;
samplerDesc._borderColor = glm::vec4(1.0f);
samplerDesc._wrapModeU = gpu::Sampler::WRAP_BORDER;
samplerDesc._wrapModeV = gpu::Sampler::WRAP_BORDER;
samplerDesc._filter = gpu::Sampler::FILTER_MIN_MAG_LINEAR;
samplerDesc._comparisonFunc = gpu::LESS;
map->setSampler(gpu::Sampler(samplerDesc));
_cascades.resize(cascadeCount);
for (uint cascadeIndex=0; cascadeIndex < cascadeCount; cascadeIndex++) {
auto& cascade = _cascades[cascadeIndex];
std::string name = "Shadowmap Cascade ";
name += '0' + cascadeIndex;
cascade.framebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(name));
cascade.framebuffer->setDepthBuffer(map, depthFormat, cascadeIndex);
}
setMaxDistance(maxDistance);
}

View file

@ -53,7 +53,6 @@ public:
Cascade();
gpu::FramebufferPointer framebuffer;
gpu::TexturePointer map;
const std::shared_ptr<ViewFrustum>& getFrustum() const { return _frustum; }
@ -93,6 +92,8 @@ public:
const graphics::LightPointer& getLight() const { return _light; }
gpu::TexturePointer map;
protected:
#include "Shadows_shared.slh"

View file

@ -227,7 +227,7 @@ void RenderShadowTask::build(JobModel& task, const render::Varying& input, rende
}
const auto setupOutput = task.addJob<RenderShadowSetup>("ShadowSetup");
const auto queryResolution = setupOutput.getN<RenderShadowSetup::Outputs>(2);
const auto queryResolution = setupOutput.getN<RenderShadowSetup::Outputs>(1);
// Fetch and cull the items from the scene
static const auto shadowCasterReceiverFilter = ItemFilter::Builder::visibleWorldItems().withTypeShape().withOpaque().withoutLayered().withTagBits(tagBits, tagMask);
@ -248,10 +248,12 @@ void RenderShadowTask::build(JobModel& task, const render::Varying& input, rende
const auto sortedShapes = task.addJob<DepthSortShapes>("DepthSortShadow", sortedPipelines, true);
render::Varying cascadeFrustums[SHADOW_CASCADE_MAX_COUNT] = {
ViewFrustumPointer(),
ViewFrustumPointer(),
ViewFrustumPointer()
#if SHADOW_CASCADE_MAX_COUNT>1
,ViewFrustumPointer(),
ViewFrustumPointer(),
ViewFrustumPointer()
#endif
};
for (auto i = 0; i < SHADOW_CASCADE_MAX_COUNT; i++) {
@ -293,13 +295,15 @@ RenderShadowSetup::RenderShadowSetup() :
void RenderShadowSetup::configure(const Config& configuration) {
setConstantBias(0, configuration.constantBias0);
setConstantBias(1, configuration.constantBias1);
setConstantBias(2, configuration.constantBias2);
setConstantBias(3, configuration.constantBias3);
setSlopeBias(0, configuration.slopeBias0);
#if SHADOW_CASCADE_MAX_COUNT>1
setConstantBias(1, configuration.constantBias1);
setSlopeBias(1, configuration.slopeBias1);
setConstantBias(2, configuration.constantBias2);
setSlopeBias(2, configuration.slopeBias2);
setConstantBias(3, configuration.constantBias3);
setSlopeBias(3, configuration.slopeBias3);
#endif
}
void RenderShadowSetup::setConstantBias(int cascadeIndex, float value) {

View file

@ -17,11 +17,11 @@
#define SHADOW_SCREEN_SPACE_DITHER 1
// the shadow texture
uniform sampler2DShadow shadowMaps[SHADOW_CASCADE_MAX_COUNT];
uniform sampler2DArrayShadow shadowMaps;
// Sample the shadowMap with PCF (built-in)
float fetchShadow(int cascadeIndex, vec3 shadowTexcoord) {
return texture(shadowMaps[cascadeIndex], shadowTexcoord);
return texture(shadowMaps, vec4(shadowTexcoord.xy, cascadeIndex, shadowTexcoord.z));
}
vec2 PCFkernel[4] = vec2[4](

View file

@ -23,11 +23,18 @@ uniform sampler2D occlusionMap;
uniform sampler2D occlusionBlurredMap;
uniform sampler2D scatteringMap;
uniform sampler2D velocityMap;
uniform sampler2DArrayShadow shadowMaps;
<@include ShadowCore.slh@>
<$declareDeferredCurvature()$>
<@include debug_deferred_buffer_shared.slh@>
layout(std140) uniform parametersBuffer {
DebugParameters parameters;
};
float curvatureAO(float k) {
return 1.0f - (0.0022f * k * k) + (0.0776f * k) + 0.7369f;
}

View file

@ -0,0 +1,17 @@
// glsl / C++ compatible source as interface for FadeEffect
#ifdef __cplusplus
# define INT32 glm::int32
#else
# define INT32 int
#endif
struct DebugParameters
{
INT32 _shadowCascadeIndex;
};
// <@if 1@>
// Trigger Scribe include
// <@endif@> <!def that !>
//

View file

@ -370,10 +370,13 @@ void CullShapeBounds::run(const RenderContextPointer& renderContext, const Input
const auto& inShapes = inputs.get0();
const auto& cullFilter = inputs.get1();
const auto& boundsFilter = inputs.get2();
const auto& antiFrustum = inputs.get3();
ViewFrustumPointer antiFrustum;
auto& outShapes = outputs.edit0();
auto& outBounds = outputs.edit1();
if (!inputs[3].isNull()) {
antiFrustum = inputs.get3();
}
outShapes.clear();
outBounds = AABox();

View file

@ -2373,7 +2373,6 @@ function selectParticleEntity(entityID) {
particleExplorerTool.createWebView();
particleExplorerTool.setActiveParticleEntity(entityID);
particleExplorerTool.setActiveParticleProperties(properties);
// Switch to particle explorer
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");

View file

@ -101,6 +101,7 @@ function onWebEventReceived(event) {
// If user provides input during a sit, the avatar animation state should be restored
Controller.keyPressEvent.connect(restoreAnimation);
Controller.enableMapping(eventMappingName);
MyAvatar.overrideAnimation(ANIMATIONS[emoteName].url, FPS, false, 0, frameCount);
} else {
@ -132,6 +133,7 @@ function restoreAnimation() {
// Make sure the input is disconnected after animations are restored so it doesn't affect any emotes other than sit
Controller.keyPressEvent.disconnect(restoreAnimation);
Controller.disableMapping(eventMappingName);
}
// Note peek() so as to not interfere with other mappings.
@ -151,7 +153,7 @@ eventMapping.from(Controller.Standard.RS).peek().to(restoreAnimation);
eventMapping.from(Controller.Standard.RightGrip).peek().to(restoreAnimation);
eventMapping.from(Controller.Standard.Back).peek().to(restoreAnimation);
eventMapping.from(Controller.Standard.Start).peek().to(restoreAnimation);
Controller.enableMapping(eventMappingName);
button.clicked.connect(onClicked);
tablet.screenChanged.connect(onScreenChanged);

View file

@ -99,12 +99,17 @@ EntityListTool = function(opts) {
url: url,
locked: properties.locked,
visible: properties.visible,
verticesCount: valueIfDefined(properties.renderInfo.verticesCount),
texturesCount: valueIfDefined(properties.renderInfo.texturesCount),
texturesSize: valueIfDefined(properties.renderInfo.texturesSize),
hasTransparent: valueIfDefined(properties.renderInfo.hasTransparent),
verticesCount: (properties.renderInfo !== undefined ?
valueIfDefined(properties.renderInfo.verticesCount) : ""),
texturesCount: (properties.renderInfo !== undefined ?
valueIfDefined(properties.renderInfo.texturesCount) : ""),
texturesSize: (properties.renderInfo !== undefined ?
valueIfDefined(properties.renderInfo.texturesSize) : ""),
hasTransparent: (properties.renderInfo !== undefined ?
valueIfDefined(properties.renderInfo.hasTransparent) : ""),
isBaked: properties.type == "Model" ? url.toLowerCase().endsWith(".baked.fbx") : false,
drawCalls: valueIfDefined(properties.renderInfo.drawCalls),
drawCalls: (properties.renderInfo !== undefined ?
valueIfDefined(properties.renderInfo.drawCalls) : ""),
hasScript: properties.script !== ""
});
}

View file

@ -61,12 +61,18 @@ function HifiEntityUI(parent) {
this.parent = parent;
var self = this;
this.sendPackage = {};
this.settingsUpdateLock = false;
this.webBridgeSync = _.debounce(function (id, val) {
if (self.EventBridge && !self.settingsUpdateLock) {
var sendPackage = {};
sendPackage[id] = val;
self.submitChanges(sendPackage);
this.webBridgeSync = function(id, val) {
if (!this.settingsUpdateLock) {
this.sendPackage[id] = val;
this.webBridgeSyncDebounce();
}
};
this.webBridgeSyncDebounce = _.debounce(function () {
if (self.EventBridge) {
self.submitChanges(self.sendPackage);
self.sendPackage = {};
}
}, DEBOUNCE_TIMEOUT);
}
@ -159,7 +165,6 @@ HifiEntityUI.prototype = {
var self = this;
var fields = document.getElementsByTagName("input");
self.settingsUpdateLock = true;
if (!currentProperties.locked) {
for (var i = 0; i < fields.length; i++) {
fields[i].removeAttribute("disabled");
@ -179,7 +184,7 @@ HifiEntityUI.prototype = {
for (var e in keys) {
if (keys.hasOwnProperty(e)) {
var value = keys[e];
var property = currentProperties[value];
var field = self.builtRows[value];
if (field) {
@ -235,10 +240,6 @@ HifiEntityUI.prototype = {
}
}
}
// Now unlocking settings Update lock for sending messages on callbacks.
setTimeout(function () {
self.settingsUpdateLock = false;
}, DEBOUNCE_TIMEOUT * 2.5);
},
connect: function (EventBridge) {
this.EventBridge = EventBridge;
@ -253,28 +254,9 @@ HifiEntityUI.prototype = {
data = JSON.parse(data);
if (data.messageType === 'particle_settings') {
// Update settings
var currentProperties = data.currentProperties;
// Update uninitialized variables
if (!currentProperties.alphaStart) {
currentProperties.alphaStart = currentProperties.alpha;
}
if (!currentProperties.alphaFinish) {
currentProperties.alphaFinish = currentProperties.alpha;
}
if (!currentProperties.radiusStart) {
currentProperties.radiusStart = currentProperties.particleRadius;
}
if (!currentProperties.radiusFinish) {
currentProperties.radiusFinish = currentProperties.particleRadius;
}
if (!currentProperties.colorStart || !currentProperties.colorStart.red) {
currentProperties.colorStart = currentProperties.color;
}
if (!currentProperties.colorFinish || !currentProperties.colorFinish.red) {
currentProperties.colorFinish = currentProperties.color;
}
self.fillFields(currentProperties);
self.settingsUpdateLock = true;
self.fillFields(data.currentProperties);
self.settingsUpdateLock = false;
// Do expected property match with structure;
} else if (data.messageType === 'particle_close') {
self.disableFields();

View file

@ -9,7 +9,8 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/* global window, alert, ParticleExplorerTool, EventBridge, dat, listenForSettingsUpdates,createVec3Folder,createQuatFolder,writeVec3ToInterface,writeDataToInterface*/
/* global window, alert, ParticleExplorerTool, EventBridge, dat, listenForSettingsUpdates, createVec3Folder,
createQuatFolder, writeVec3ToInterface, writeDataToInterface */
var PARTICLE_EXPLORER_HTML_URL = Script.resolvePath('particleExplorer.html');
@ -17,7 +18,7 @@ var PARTICLE_EXPLORER_HTML_URL = Script.resolvePath('particleExplorer.html');
ParticleExplorerTool = function() {
var that = {};
that.activeParticleEntity = 0;
that.activeParticleProperties = {};
that.updatedActiveParticleProperties = {};
that.createWebView = function() {
that.webView = Tablet.getTablet("com.highfidelity.interface.tablet.system");
@ -30,7 +31,7 @@ ParticleExplorerTool = function() {
return;
}
that.activeParticleEntity = 0;
that.activeParticleProperties = {};
that.updatedActiveParticleProperties = {};
var messageData = {
messageType: "particle_close"
@ -38,46 +39,86 @@ ParticleExplorerTool = function() {
that.webView.emitScriptEvent(JSON.stringify(messageData));
};
function sendActiveParticleProperties() {
function sendParticleProperties(properties) {
that.webView.emitScriptEvent(JSON.stringify({
messageType: "particle_settings",
currentProperties: that.activeParticleProperties
currentProperties: properties
}));
}
function sendActiveParticleProperties() {
var properties = Entities.getEntityProperties(that.activeParticleEntity);
if (properties.emitOrientation) {
properties.emitOrientation = Quat.safeEulerAngles(properties.emitOrientation);
}
// Update uninitialized variables
if (isNaN(properties.alphaStart)) {
properties.alphaStart = properties.alpha;
}
if (isNaN(properties.alphaFinish)) {
properties.alphaFinish = properties.alpha;
}
if (isNaN(properties.radiusStart)) {
properties.radiusStart = properties.particleRadius;
}
if (isNaN(properties.radiusFinish)) {
properties.radiusFinish = properties.particleRadius;
}
if (isNaN(properties.colorStart.red)) {
properties.colorStart = properties.color;
}
if (isNaN(properties.colorFinish.red)) {
properties.colorFinish = properties.color;
}
sendParticleProperties(properties);
}
function sendUpdatedActiveParticleProperties() {
sendParticleProperties(that.updatedActiveParticleProperties);
that.updatedActiveParticleProperties = {};
}
that.webEventReceived = function(message) {
var data = JSON.parse(message);
if (data.messageType === "settings_update") {
if (data.updatedSettings.emitOrientation) {
data.updatedSettings.emitOrientation = Quat.fromVec3Degrees(data.updatedSettings.emitOrientation);
}
Entities.editEntity(that.activeParticleEntity, data.updatedSettings);
for (var key in data.updatedSettings) {
if (that.activeParticleProperties.hasOwnProperty(key)) {
that.activeParticleProperties[key] = data.updatedSettings[key];
}
}
var updatedSettings = data.updatedSettings;
var optionalProps = ["alphaStart", "alphaFinish", "radiusStart", "radiusFinish", "colorStart", "colorFinish"];
var fallbackProps = ["alpha", "particleRadius", "color"];
var entityProps = Entities.getEntityProperties(that.activeParticleProperties, optionalProps);
for (var i = 0; i < optionalProps.length; i++) {
var fallbackProp = fallbackProps[Math.floor(i / 2)];
var optionalValue = updatedSettings[optionalProps[i]];
var fallbackValue = updatedSettings[fallbackProp];
if (optionalValue && fallbackValue) {
delete updatedSettings[optionalProps[i]];
}
}
if (updatedSettings.emitOrientation) {
updatedSettings.emitOrientation = Quat.fromVec3Degrees(updatedSettings.emitOrientation);
}
Entities.editEntity(that.activeParticleEntity, updatedSettings);
var entityProps = Entities.getEntityProperties(that.activeParticleEntity, optionalProps);
var needsUpdate = false;
for (var i = 0; i < optionalProps.length; i++) {
var fallback = fallbackProps[Math.floor(i / 2)];
if (data.updatedSettings[fallback]) {
var prop = optionalProps[i];
if (!that.activeParticleProperties[prop] || (fallback === "color" && !that.activeParticleProperties[prop].red)) {
that.activeParticleProperties[prop] = entityProps[fallback];
var fallbackProp = fallbackProps[Math.floor(i / 2)];
var fallbackValue = updatedSettings[fallbackProp];
if (fallbackValue) {
var optionalProp = optionalProps[i];
if ((fallbackProp !== "color" && isNaN(entityProps[optionalProp])) || (fallbackProp === "color" && isNaN(entityProps[optionalProp].red))) {
that.updatedActiveParticleProperties[optionalProp] = fallbackValue;
needsUpdate = true;
}
}
}
if (needsUpdate) {
sendActiveParticleProperties();
sendUpdatedActiveParticleProperties();
}
} else if (data.messageType === "page_loaded") {
sendActiveParticleProperties();
}
@ -85,12 +126,8 @@ ParticleExplorerTool = function() {
that.setActiveParticleEntity = function(id) {
that.activeParticleEntity = id;
};
that.setActiveParticleProperties = function(properties) {
that.activeParticleProperties = properties;
sendActiveParticleProperties();
};
return that;
};