mirror of
https://github.com/lubosz/overte.git
synced 2025-04-09 13:12:57 +02:00
Merge pull request #8280 from samcake/skin
Update the upstream skin with master changes and bug fixes for skin
This commit is contained in:
commit
91a0b86d40
36 changed files with 480 additions and 343 deletions
38
cmake/externals/hifiAudioCodec/CMakeLists.txt
vendored
38
cmake/externals/hifiAudioCodec/CMakeLists.txt
vendored
|
@ -1,19 +1,31 @@
|
|||
include(ExternalProject)
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
set(EXTERNAL_NAME HiFiAudioCodec)
|
||||
set(EXTERNAL_NAME hifiAudioCodec)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://s3.amazonaws.com/hifi-public/dependencies/codecSDK-1.zip
|
||||
URL_MD5 23ec3fe51eaa155ea159a4971856fc13
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
if (WIN32 OR APPLE)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://s3.amazonaws.com/hifi-public/dependencies/codecSDK-1.zip
|
||||
URL_MD5 23ec3fe51eaa155ea159a4971856fc13
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
elseif(NOT ANDROID)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://s3.amazonaws.com/hifi-public/dependencies/codecSDK-linux.zip
|
||||
URL_MD5 7d37914a18aa4de971d2f45dd3043bde
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
endif()
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
@ -23,11 +35,9 @@ ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
|||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE TYPE INTERNAL)
|
||||
|
||||
if (WIN32)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/audio.lib CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/audio.lib CACHE TYPE INTERNAL)
|
||||
elseif(APPLE)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/libaudio.a CACHE TYPE INTERNAL)
|
||||
elseif(NOT ANDROID)
|
||||
# FIXME need to account for different architectures
|
||||
#set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux64/audio.so CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/libaudio.a CACHE TYPE INTERNAL)
|
||||
endif()
|
||||
|
||||
|
|
|
@ -43,4 +43,4 @@ macro(ADD_DEPENDENCY_EXTERNAL_PROJECTS)
|
|||
|
||||
endforeach()
|
||||
|
||||
endmacro()
|
||||
endmacro()
|
||||
|
|
|
@ -37,7 +37,7 @@ macro(SETUP_HIFI_CLIENT_SERVER_PLUGIN)
|
|||
${CLIENT_PLUGIN_FULL_PATH}
|
||||
)
|
||||
# copy the client plugin binaries
|
||||
add_custom_command(TARGET ${DIR} POST_BUILD
|
||||
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy
|
||||
"$<TARGET_FILE:${TARGET_NAME}>"
|
||||
${CLIENT_PLUGIN_FULL_PATH}
|
||||
|
@ -50,7 +50,7 @@ macro(SETUP_HIFI_CLIENT_SERVER_PLUGIN)
|
|||
${SERVER_PLUGIN_FULL_PATH}
|
||||
)
|
||||
# copy the server plugin binaries
|
||||
add_custom_command(TARGET ${DIR} POST_BUILD
|
||||
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy
|
||||
"$<TARGET_FILE:${TARGET_NAME}>"
|
||||
${SERVER_PLUGIN_FULL_PATH}
|
||||
|
|
|
@ -305,6 +305,8 @@ public:
|
|||
// Don't actually crash in debug builds, in case this apparent deadlock is simply from
|
||||
// the developer actively debugging code
|
||||
#ifdef NDEBUG
|
||||
|
||||
|
||||
deadlockDetectionCrash();
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -207,8 +207,10 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
|
|||
}
|
||||
|
||||
withWriteLock([&]{
|
||||
if (_previousSet) {
|
||||
if (_previousSet &&
|
||||
_positionalTarget != _previousPositionalTarget) { // don't average in a zero velocity if we get the same data
|
||||
glm::vec3 oneFrameVelocity = (_positionalTarget - _previousPositionalTarget) / deltaTimeStep;
|
||||
|
||||
_measuredLinearVelocities[_measuredLinearVelocitiesIndex++] = oneFrameVelocity;
|
||||
if (_measuredLinearVelocitiesIndex >= AvatarActionHold::velocitySmoothFrames) {
|
||||
_measuredLinearVelocitiesIndex = 0;
|
||||
|
@ -228,9 +230,9 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
|
|||
// 3 -- ignore i of 0 1 2
|
||||
// 4 -- ignore i of 1 2 3
|
||||
// 5 -- ignore i of 2 3 4
|
||||
if ((i + 1) % 6 == _measuredLinearVelocitiesIndex ||
|
||||
(i + 2) % 6 == _measuredLinearVelocitiesIndex ||
|
||||
(i + 3) % 6 == _measuredLinearVelocitiesIndex) {
|
||||
if ((i + 1) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
|
||||
(i + 2) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
|
||||
(i + 3) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex) {
|
||||
continue;
|
||||
}
|
||||
measuredLinearVelocity += _measuredLinearVelocities[i];
|
||||
|
|
|
@ -862,6 +862,9 @@ void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) {
|
|||
static const float INT16_TO_FLOAT_SCALE_FACTOR = 1/32768.0f;
|
||||
|
||||
bool injectorsHaveData = false;
|
||||
|
||||
// lock the injector vector
|
||||
Lock lock(_injectorsMutex);
|
||||
|
||||
for (AudioInjector* injector : getActiveLocalAudioInjectors()) {
|
||||
if (injector->getLocalBuffer()) {
|
||||
|
@ -871,6 +874,7 @@ void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) {
|
|||
AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
||||
|
||||
// get one frame from the injector (mono or stereo)
|
||||
memset(_scratchBuffer, 0, sizeof(_scratchBuffer));
|
||||
if (0 < injector->getLocalBuffer()->readData((char*)_scratchBuffer, samplesToRead)) {
|
||||
|
||||
injectorsHaveData = true;
|
||||
|
@ -894,14 +898,14 @@ void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) {
|
|||
} else {
|
||||
|
||||
qDebug() << "injector has no more data, marking finished for removal";
|
||||
injector->finish();
|
||||
injector->finishLocalInjection();
|
||||
injectorsToRemove.append(injector);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
qDebug() << "injector has no local buffer, marking as finished for removal";
|
||||
injector->finish();
|
||||
injector->finishLocalInjection();
|
||||
injectorsToRemove.append(injector);
|
||||
}
|
||||
}
|
||||
|
@ -1003,6 +1007,7 @@ void AudioClient::setIsStereoInput(bool isStereoInput) {
|
|||
|
||||
|
||||
bool AudioClient::outputLocalInjector(bool isStereo, AudioInjector* injector) {
|
||||
Lock lock(_injectorsMutex);
|
||||
if (injector->getLocalBuffer() && _audioInput ) {
|
||||
// just add it to the vector of active local injectors, if
|
||||
// not already there.
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#include <fstream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <mutex>
|
||||
|
||||
#include <QtCore/qsystemdetection.h>
|
||||
#include <QtCore/QByteArray>
|
||||
|
@ -83,6 +84,9 @@ public:
|
|||
using AudioPositionGetter = std::function<glm::vec3()>;
|
||||
using AudioOrientationGetter = std::function<glm::quat()>;
|
||||
|
||||
using Mutex = std::mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
class AudioOutputIODevice : public QIODevice {
|
||||
public:
|
||||
AudioOutputIODevice(MixedProcessedAudioStream& receivedAudioStream, AudioClient* audio) :
|
||||
|
@ -219,6 +223,7 @@ private:
|
|||
float azimuthForSource(const glm::vec3& relativePosition);
|
||||
float gainForSource(float distance, float volume);
|
||||
|
||||
Mutex _injectorsMutex;
|
||||
QByteArray firstInputFrame;
|
||||
QAudioInput* _audioInput;
|
||||
QAudioFormat _desiredInputFormat;
|
||||
|
|
|
@ -28,6 +28,15 @@
|
|||
|
||||
int audioInjectorPtrMetaTypeId = qRegisterMetaType<AudioInjector*>();
|
||||
|
||||
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs) {
|
||||
return static_cast<AudioInjectorState>(static_cast<uint8_t>(lhs) & static_cast<uint8_t>(rhs));
|
||||
};
|
||||
|
||||
AudioInjectorState& operator|= (AudioInjectorState& lhs, AudioInjectorState rhs) {
|
||||
lhs = static_cast<AudioInjectorState>(static_cast<uint8_t>(lhs) | static_cast<uint8_t>(rhs));
|
||||
return lhs;
|
||||
};
|
||||
|
||||
AudioInjector::AudioInjector(QObject* parent) :
|
||||
QObject(parent)
|
||||
{
|
||||
|
@ -48,6 +57,10 @@ AudioInjector::AudioInjector(const QByteArray& audioData, const AudioInjectorOpt
|
|||
|
||||
}
|
||||
|
||||
bool AudioInjector::stateHas(AudioInjectorState state) const {
|
||||
return (_state & state) == state;
|
||||
}
|
||||
|
||||
void AudioInjector::setOptions(const AudioInjectorOptions& options) {
|
||||
// since options.stereo is computed from the audio stream,
|
||||
// we need to copy it from existing options just in case.
|
||||
|
@ -56,10 +69,25 @@ void AudioInjector::setOptions(const AudioInjectorOptions& options) {
|
|||
_options.stereo = currentlyStereo;
|
||||
}
|
||||
|
||||
void AudioInjector::finishNetworkInjection() {
|
||||
_state |= AudioInjectorState::NetworkInjectionFinished;
|
||||
|
||||
// if we are already finished with local
|
||||
// injection, then we are finished
|
||||
if(stateHas(AudioInjectorState::LocalInjectionFinished)) {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
void AudioInjector::finishLocalInjection() {
|
||||
_state |= AudioInjectorState::LocalInjectionFinished;
|
||||
if(_options.localOnly || stateHas(AudioInjectorState::NetworkInjectionFinished)) {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
void AudioInjector::finish() {
|
||||
bool shouldDelete = (_state == State::NotFinishedWithPendingDelete);
|
||||
_state = State::Finished;
|
||||
_state |= AudioInjectorState::Finished;
|
||||
|
||||
emit finished();
|
||||
|
||||
|
@ -69,7 +97,7 @@ void AudioInjector::finish() {
|
|||
_localBuffer = NULL;
|
||||
}
|
||||
|
||||
if (shouldDelete) {
|
||||
if (stateHas(AudioInjectorState::PendingDelete)) {
|
||||
// we've been asked to delete after finishing, trigger a deleteLater here
|
||||
deleteLater();
|
||||
}
|
||||
|
@ -121,23 +149,27 @@ void AudioInjector::restart() {
|
|||
_hasSentFirstFrame = false;
|
||||
|
||||
// check our state to decide if we need extra handling for the restart request
|
||||
if (_state == State::Finished) {
|
||||
if (stateHas(AudioInjectorState::Finished)) {
|
||||
// we finished playing, need to reset state so we can get going again
|
||||
_hasSetup = false;
|
||||
_shouldStop = false;
|
||||
_state = State::NotFinished;
|
||||
_state = AudioInjectorState::NotFinished;
|
||||
|
||||
// call inject audio to start injection over again
|
||||
setupInjection();
|
||||
|
||||
// if we're a local injector, just inject again
|
||||
if (_options.localOnly) {
|
||||
injectLocally();
|
||||
} else {
|
||||
// wake the AudioInjectorManager back up if it's stuck waiting
|
||||
if (!injectorManager->restartFinishedInjector(this)) {
|
||||
_state = State::Finished; // we're not playing, so reset the state used by isPlaying.
|
||||
// inject locally
|
||||
if(injectLocally()) {
|
||||
|
||||
// if not localOnly, wake the AudioInjectorManager back up if it is stuck waiting
|
||||
if (!_options.localOnly) {
|
||||
|
||||
if (!injectorManager->restartFinishedInjector(this)) {
|
||||
_state = AudioInjectorState::Finished; // we're not playing, so reset the state used by isPlaying.
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_state = AudioInjectorState::Finished; // we failed to play, so we are finished again
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -183,7 +215,7 @@ static const int64_t NEXT_FRAME_DELTA_ERROR_OR_FINISHED = -1;
|
|||
static const int64_t NEXT_FRAME_DELTA_IMMEDIATELY = 0;
|
||||
|
||||
int64_t AudioInjector::injectNextFrame() {
|
||||
if (_state == AudioInjector::State::Finished) {
|
||||
if (stateHas(AudioInjectorState::NetworkInjectionFinished)) {
|
||||
qDebug() << "AudioInjector::injectNextFrame called but AudioInjector has finished and was not restarted. Returning.";
|
||||
return NEXT_FRAME_DELTA_ERROR_OR_FINISHED;
|
||||
}
|
||||
|
@ -234,8 +266,10 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
// pack the stereo/mono type of the stream
|
||||
audioPacketStream << _options.stereo;
|
||||
|
||||
// pack the flag for loopback
|
||||
uchar loopbackFlag = (uchar)true;
|
||||
// pack the flag for loopback. Now, we don't loopback
|
||||
// and _always_ play locally, so loopbackFlag should be
|
||||
// false always.
|
||||
uchar loopbackFlag = (uchar)false;
|
||||
audioPacketStream << loopbackFlag;
|
||||
|
||||
// pack the position for injected audio
|
||||
|
@ -333,7 +367,7 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
}
|
||||
|
||||
if (_currentSendOffset >= _audioData.size() && !_options.loop) {
|
||||
finish();
|
||||
finishNetworkInjection();
|
||||
return NEXT_FRAME_DELTA_ERROR_OR_FINISHED;
|
||||
}
|
||||
|
||||
|
@ -372,10 +406,10 @@ void AudioInjector::triggerDeleteAfterFinish() {
|
|||
return;
|
||||
}
|
||||
|
||||
if (_state == State::Finished) {
|
||||
if (_state == AudioInjectorState::Finished) {
|
||||
stopAndDeleteLater();
|
||||
} else {
|
||||
_state = State::NotFinishedWithPendingDelete;
|
||||
_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -421,7 +455,7 @@ AudioInjector* AudioInjector::playSoundAndDelete(const QByteArray& buffer, const
|
|||
AudioInjector* sound = playSound(buffer, options, localInterface);
|
||||
|
||||
if (sound) {
|
||||
sound->_state = AudioInjector::State::NotFinishedWithPendingDelete;
|
||||
sound->_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
|
||||
return sound;
|
||||
|
@ -438,21 +472,23 @@ AudioInjector* AudioInjector::playSound(const QByteArray& buffer, const AudioInj
|
|||
// setup parameters required for injection
|
||||
injector->setupInjection();
|
||||
|
||||
if (options.localOnly) {
|
||||
if (injector->injectLocally()) {
|
||||
// local injection succeeded, return the pointer to injector
|
||||
return injector;
|
||||
} else {
|
||||
// unable to inject locally, return a nullptr
|
||||
return nullptr;
|
||||
}
|
||||
} else {
|
||||
// attempt to thread the new injector
|
||||
if (injectorManager->threadInjector(injector)) {
|
||||
return injector;
|
||||
} else {
|
||||
// we failed to thread the new injector (we are at the max number of injector threads)
|
||||
return nullptr;
|
||||
}
|
||||
// we always inject locally
|
||||
//
|
||||
if (!injector->injectLocally()) {
|
||||
// failed, so don't bother sending to server
|
||||
qDebug() << "AudioInjector::playSound failed to inject locally";
|
||||
return nullptr;
|
||||
}
|
||||
// if localOnly, we are done, just return injector.
|
||||
if (options.localOnly) {
|
||||
return injector;
|
||||
}
|
||||
|
||||
// send off to server for everyone else
|
||||
if (!injectorManager->threadInjector(injector)) {
|
||||
// we failed to thread the new injector (we are at the max number of injector threads)
|
||||
qDebug() << "AudioInjector::playSound failed to thread injector";
|
||||
}
|
||||
return injector;
|
||||
|
||||
}
|
||||
|
|
|
@ -32,24 +32,29 @@
|
|||
class AbstractAudioInterface;
|
||||
class AudioInjectorManager;
|
||||
|
||||
|
||||
enum class AudioInjectorState : uint8_t {
|
||||
NotFinished = 0,
|
||||
Finished = 1,
|
||||
PendingDelete = 2,
|
||||
LocalInjectionFinished = 4,
|
||||
NetworkInjectionFinished = 8
|
||||
};
|
||||
|
||||
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs);
|
||||
AudioInjectorState& operator|= (AudioInjectorState& lhs, AudioInjectorState rhs);
|
||||
|
||||
// In order to make scripting cleaner for the AudioInjector, the script now holds on to the AudioInjector object
|
||||
// until it dies.
|
||||
|
||||
class AudioInjector : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
enum class State : uint8_t {
|
||||
NotFinished,
|
||||
NotFinishedWithPendingDelete,
|
||||
Finished
|
||||
};
|
||||
|
||||
AudioInjector(QObject* parent);
|
||||
AudioInjector(const Sound& sound, const AudioInjectorOptions& injectorOptions);
|
||||
AudioInjector(const QByteArray& audioData, const AudioInjectorOptions& injectorOptions);
|
||||
|
||||
bool isFinished() const { return _state == State::Finished; }
|
||||
bool isFinished() const { return (stateHas(AudioInjectorState::Finished)); }
|
||||
|
||||
int getCurrentSendOffset() const { return _currentSendOffset; }
|
||||
void setCurrentSendOffset(int currentSendOffset) { _currentSendOffset = currentSendOffset; }
|
||||
|
@ -63,6 +68,7 @@ public:
|
|||
bool isStereo() const { return _options.stereo; }
|
||||
void setLocalAudioInterface(AbstractAudioInterface* localAudioInterface) { _localAudioInterface = localAudioInterface; }
|
||||
|
||||
bool stateHas(AudioInjectorState state) const ;
|
||||
static AudioInjector* playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options, AbstractAudioInterface* localInterface);
|
||||
static AudioInjector* playSound(const QByteArray& buffer, const AudioInjectorOptions options, AbstractAudioInterface* localInterface);
|
||||
static AudioInjector* playSound(SharedSoundPointer sound, const float volume, const float stretchFactor, const glm::vec3 position);
|
||||
|
@ -78,8 +84,10 @@ public slots:
|
|||
void setOptions(const AudioInjectorOptions& options);
|
||||
|
||||
float getLoudness() const { return _loudness; }
|
||||
bool isPlaying() const { return _state == State::NotFinished || _state == State::NotFinishedWithPendingDelete; }
|
||||
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
|
||||
void finish();
|
||||
void finishLocalInjection();
|
||||
void finishNetworkInjection();
|
||||
|
||||
signals:
|
||||
void finished();
|
||||
|
@ -92,7 +100,7 @@ private:
|
|||
|
||||
QByteArray _audioData;
|
||||
AudioInjectorOptions _options;
|
||||
State _state { State::NotFinished };
|
||||
AudioInjectorState _state { AudioInjectorState::NotFinished };
|
||||
bool _hasSentFirstFrame { false };
|
||||
bool _hasSetup { false };
|
||||
bool _shouldStop { false };
|
||||
|
@ -111,4 +119,5 @@ private:
|
|||
friend class AudioInjectorManager;
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_AudioInjector_h
|
||||
|
|
|
@ -36,7 +36,15 @@ OffscreenGLCanvas::~OffscreenGLCanvas() {
|
|||
delete _logger;
|
||||
_logger = nullptr;
|
||||
}
|
||||
|
||||
_context->doneCurrent();
|
||||
delete _context;
|
||||
_context = nullptr;
|
||||
|
||||
_offscreenSurface->destroy();
|
||||
delete _offscreenSurface;
|
||||
_offscreenSurface = nullptr;
|
||||
|
||||
}
|
||||
|
||||
bool OffscreenGLCanvas::create(QOpenGLContext* sharedContext) {
|
||||
|
|
|
@ -34,8 +34,8 @@ public:
|
|||
|
||||
protected:
|
||||
std::once_flag _reportOnce;
|
||||
QOpenGLContext* _context;
|
||||
QOffscreenSurface* _offscreenSurface;
|
||||
QOpenGLContext* _context{ nullptr };
|
||||
QOffscreenSurface* _offscreenSurface{ nullptr };
|
||||
QOpenGLDebugLogger* _logger{ nullptr };
|
||||
};
|
||||
|
||||
|
|
|
@ -28,16 +28,17 @@ QOpenGLContext* QOpenGLContextWrapper::currentContext() {
|
|||
return QOpenGLContext::currentContext();
|
||||
}
|
||||
|
||||
|
||||
QOpenGLContextWrapper::QOpenGLContextWrapper() :
|
||||
_context(new QOpenGLContext)
|
||||
{
|
||||
}
|
||||
|
||||
_ownContext(true), _context(new QOpenGLContext) { }
|
||||
|
||||
QOpenGLContextWrapper::QOpenGLContextWrapper(QOpenGLContext* context) :
|
||||
_context(context)
|
||||
{
|
||||
_context(context) { }
|
||||
|
||||
QOpenGLContextWrapper::~QOpenGLContextWrapper() {
|
||||
if (_ownContext) {
|
||||
delete _context;
|
||||
_context = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void QOpenGLContextWrapper::setFormat(const QSurfaceFormat& format) {
|
||||
|
|
|
@ -23,6 +23,7 @@ class QOpenGLContextWrapper {
|
|||
public:
|
||||
QOpenGLContextWrapper();
|
||||
QOpenGLContextWrapper(QOpenGLContext* context);
|
||||
virtual ~QOpenGLContextWrapper();
|
||||
void setFormat(const QSurfaceFormat& format);
|
||||
bool create();
|
||||
void swapBuffers(QSurface* surface);
|
||||
|
@ -40,6 +41,7 @@ public:
|
|||
|
||||
|
||||
private:
|
||||
bool _ownContext { false };
|
||||
QOpenGLContext* _context { nullptr };
|
||||
};
|
||||
|
||||
|
|
|
@ -187,7 +187,11 @@ GLTexture::~GLTexture() {
|
|||
}
|
||||
}
|
||||
|
||||
Backend::decrementTextureGPUCount();
|
||||
if (_id) {
|
||||
glDeleteTextures(1, &_id);
|
||||
const_cast<GLuint&>(_id) = 0;
|
||||
Backend::decrementTextureGPUCount();
|
||||
}
|
||||
Backend::updateTextureGPUMemoryUsage(_size, 0);
|
||||
Backend::updateTextureGPUVirtualMemoryUsage(_virtualSize, 0);
|
||||
}
|
||||
|
|
61
libraries/gpu/src/gpu/PackedNormal.slh
Normal file
61
libraries/gpu/src/gpu/PackedNormal.slh
Normal file
|
@ -0,0 +1,61 @@
|
|||
<!
|
||||
// PackedNormal.slh
|
||||
// libraries/gpu/src
|
||||
//
|
||||
// Created by Sam Gateau on 7/19/16.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
!>
|
||||
<@if not PACKED_NORMAL_SLH@>
|
||||
<@def PACKED_NORMAL_SLH@>
|
||||
|
||||
vec2 signNotZero(vec2 v) {
|
||||
return vec2((v.x >= 0.0) ? +1.0 : -1.0, (v.y >= 0.0) ? +1.0 : -1.0);
|
||||
}
|
||||
|
||||
vec2 float32x3_to_oct(in vec3 v) {
|
||||
vec2 p = v.xy * (1.0 / (abs(v.x) + abs(v.y) + abs(v.z)));
|
||||
return ((v.z <= 0.0) ? ((1.0 - abs(p.yx)) * signNotZero(p)) : p);
|
||||
}
|
||||
|
||||
|
||||
vec3 oct_to_float32x3(in vec2 e) {
|
||||
vec3 v = vec3(e.xy, 1.0 - abs(e.x) - abs(e.y));
|
||||
if (v.z < 0) {
|
||||
v.xy = (1.0 - abs(v.yx)) * signNotZero(v.xy);
|
||||
}
|
||||
return normalize(v);
|
||||
}
|
||||
|
||||
vec3 snorm12x2_to_unorm8x3(vec2 f) {
|
||||
vec2 u = vec2(round(clamp(f, -1.0, 1.0) * 2047.0 + 2047.0));
|
||||
float t = floor(u.y / 256.0);
|
||||
|
||||
return floor(vec3(
|
||||
u.x / 16.0,
|
||||
fract(u.x / 16.0) * 256.0 + t,
|
||||
u.y - t * 256.0
|
||||
)) / 255.0;
|
||||
}
|
||||
|
||||
vec2 unorm8x3_to_snorm12x2(vec3 u) {
|
||||
u *= 255.0;
|
||||
u.y *= (1.0 / 16.0);
|
||||
vec2 s = vec2( u.x * 16.0 + floor(u.y),
|
||||
fract(u.y) * (16.0 * 256.0) + u.z);
|
||||
return clamp(s * (1.0 / 2047.0) - 1.0, vec2(-1.0), vec2(1.0));
|
||||
}
|
||||
|
||||
|
||||
// Recommended function to pack/unpack vec3<float> normals to vec3<uint8> rgb with best efficiency
|
||||
vec3 packNormal(in vec3 n) {
|
||||
return snorm12x2_to_unorm8x3(float32x3_to_oct(n));
|
||||
}
|
||||
|
||||
vec3 unpackNormal(in vec3 p) {
|
||||
return oct_to_float32x3(unorm8x3_to_snorm12x2(p));
|
||||
}
|
||||
|
||||
<@endif@>
|
|
@ -379,7 +379,6 @@ void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const Ren
|
|||
auto& deferredFramebuffer = inputs.get0();
|
||||
auto& linearDepthTarget = inputs.get1();
|
||||
auto& surfaceGeometryFramebuffer = inputs.get2();
|
||||
auto& diffusedCurvatureFramebuffer = inputs.get3();
|
||||
|
||||
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
|
@ -414,7 +413,7 @@ void DebugDeferredBuffer::run(const SceneContextPointer& sceneContext, const Ren
|
|||
batch.setResourceTexture(HalfNormal, linearDepthTarget->getHalfNormalTexture());
|
||||
|
||||
batch.setResourceTexture(Curvature, surfaceGeometryFramebuffer->getCurvatureTexture());
|
||||
batch.setResourceTexture(DiffusedCurvature, diffusedCurvatureFramebuffer->getRenderBuffer(0));
|
||||
batch.setResourceTexture(DiffusedCurvature, surfaceGeometryFramebuffer->getLowCurvatureTexture());
|
||||
if (DependencyManager::get<DeferredLightingEffect>()->isAmbientOcclusionEnabled()) {
|
||||
batch.setResourceTexture(AmbientOcclusion, framebufferCache->getOcclusionTexture());
|
||||
} else {
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
<@if not DEFERRED_BUFFER_SLH@>
|
||||
<@def DEFERRED_BUFFER_SLH@>
|
||||
|
||||
<@include gpu/PackedNormal.slh@>
|
||||
|
||||
// Unpack the metallic-mode value
|
||||
const float FRAG_PACK_SHADED_NON_METALLIC = 0.0;
|
||||
const float FRAG_PACK_SHADED_METALLIC = 0.1;
|
||||
|
@ -63,44 +65,7 @@ float packUnlit() {
|
|||
return FRAG_PACK_UNLIT;
|
||||
}
|
||||
|
||||
|
||||
vec2 signNotZero(vec2 v) {
|
||||
return vec2((v.x >= 0.0) ? +1.0 : -1.0, (v.y >= 0.0) ? +1.0 : -1.0);
|
||||
}
|
||||
|
||||
vec2 float32x3_to_oct(in vec3 v) {
|
||||
vec2 p = v.xy * (1.0 / (abs(v.x) + abs(v.y) + abs(v.z)));
|
||||
return ((v.z <= 0.0) ? ((1.0 - abs(p.yx)) * signNotZero(p)) : p);
|
||||
}
|
||||
|
||||
|
||||
vec3 oct_to_float32x3(in vec2 e) {
|
||||
vec3 v = vec3(e.xy, 1.0 - abs(e.x) - abs(e.y));
|
||||
if (v.z < 0) {
|
||||
v.xy = (1.0 - abs(v.yx)) * signNotZero(v.xy);
|
||||
}
|
||||
return normalize(v);
|
||||
}
|
||||
|
||||
vec3 snorm12x2_to_unorm8x3(vec2 f) {
|
||||
vec2 u = vec2(round(clamp(f, -1.0, 1.0) * 2047.0 + 2047.0));
|
||||
float t = floor(u.y / 256.0);
|
||||
|
||||
return floor(vec3(
|
||||
u.x / 16.0,
|
||||
fract(u.x / 16.0) * 256.0 + t,
|
||||
u.y - t * 256.0
|
||||
)) / 255.0;
|
||||
}
|
||||
|
||||
vec2 unorm8x3_to_snorm12x2(vec3 u) {
|
||||
u *= 255.0;
|
||||
u.y *= (1.0 / 16.0);
|
||||
vec2 s = vec2( u.x * 16.0 + floor(u.y),
|
||||
fract(u.y) * (16.0 * 256.0) + u.z);
|
||||
return clamp(s * (1.0 / 2047.0) - 1.0, vec2(-1.0), vec2(1.0));
|
||||
}
|
||||
|
||||
<!
|
||||
uniform sampler2D normalFittingMap;
|
||||
|
||||
vec3 bestFitNormal(vec3 normal) {
|
||||
|
@ -119,14 +84,6 @@ vec3 bestFitNormal(vec3 normal) {
|
|||
|
||||
return (cN * 0.5 + 0.5);
|
||||
}
|
||||
|
||||
vec3 packNormal(in vec3 n) {
|
||||
return snorm12x2_to_unorm8x3(float32x3_to_oct(n));
|
||||
}
|
||||
|
||||
vec3 unpackNormal(in vec3 p) {
|
||||
return oct_to_float32x3(unorm8x3_to_snorm12x2(p));
|
||||
}
|
||||
|
||||
!>
|
||||
|
||||
<@endif@>
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
<@def DEFERRED_BUFFER_WRITE_SLH@>
|
||||
|
||||
<@include DeferredBuffer.slh@>
|
||||
<@include LightingModel.slh@>
|
||||
|
||||
|
||||
layout(location = 0) out vec4 _fragColor0;
|
||||
|
@ -40,7 +39,6 @@ void packDeferredFragment(vec3 normal, float alpha, vec3 albedo, float roughness
|
|||
if (alpha != 1.0) {
|
||||
discard;
|
||||
}
|
||||
emissive *= isEmissiveEnabled();
|
||||
_fragColor0 = vec4(albedo, ((scattering > 0.0) ? packScatteringMetallic(metallic) : packShadedMetallic(metallic)));
|
||||
_fragColor1 = vec4(packNormal(normal), clamp(roughness, 0.0, 1.0));
|
||||
_fragColor2 = vec4(((scattering > 0.0) ? vec3(scattering) : emissive), occlusion);
|
||||
|
@ -48,7 +46,6 @@ void packDeferredFragment(vec3 normal, float alpha, vec3 albedo, float roughness
|
|||
_fragColor3 = vec4(emissive, 1.0);
|
||||
}
|
||||
|
||||
|
||||
void packDeferredFragmentLightmap(vec3 normal, float alpha, vec3 albedo, float roughness, float metallic, vec3 fresnel, vec3 lightmap) {
|
||||
if (alpha != 1.0) {
|
||||
discard;
|
||||
|
@ -57,11 +54,8 @@ void packDeferredFragmentLightmap(vec3 normal, float alpha, vec3 albedo, float r
|
|||
_fragColor0 = vec4(albedo, packLightmappedMetallic(metallic));
|
||||
_fragColor1 = vec4(packNormal(normal), clamp(roughness, 0.0, 1.0));
|
||||
_fragColor2 = vec4(lightmap, 1.0);
|
||||
|
||||
_fragColor3 = vec4(lightmap * isLightmapEnabled(), 1.0);
|
||||
if (isAlbedoEnabled() > 0.0) {
|
||||
_fragColor3.rgb *= albedo;
|
||||
}
|
||||
|
||||
_fragColor3 = vec4(lightmap * albedo, 1.0);
|
||||
}
|
||||
|
||||
void packDeferredFragmentUnlit(vec3 normal, float alpha, vec3 color) {
|
||||
|
|
|
@ -448,7 +448,8 @@ void RenderDeferredSetup::run(const render::SceneContextPointer& sceneContext, c
|
|||
batch.setResourceTexture(DEFERRED_BUFFER_CURVATURE_UNIT, surfaceGeometryFramebuffer->getCurvatureTexture());
|
||||
}
|
||||
if (lowCurvatureNormalFramebuffer) {
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_DIFFUSED_CURVATURE_UNIT, lowCurvatureNormalFramebuffer->getRenderBuffer(0));
|
||||
// batch.setResourceTexture(DEFERRED_BUFFER_DIFFUSED_CURVATURE_UNIT, lowCurvatureNormalFramebuffer->getRenderBuffer(0));
|
||||
batch.setResourceTexture(DEFERRED_BUFFER_DIFFUSED_CURVATURE_UNIT, surfaceGeometryFramebuffer->getLowCurvatureTexture());
|
||||
}
|
||||
if (subsurfaceScatteringResource) {
|
||||
batch.setUniformBuffer(SCATTERING_PARAMETERS_BUFFER_SLOT, subsurfaceScatteringResource->getParametersBuffer());
|
||||
|
|
|
@ -20,7 +20,7 @@ struct LightingModel {
|
|||
vec4 _ShowContourObscuranceSpare2;
|
||||
};
|
||||
|
||||
uniform lightingModelBuffer {
|
||||
uniform lightingModelBuffer{
|
||||
LightingModel lightingModel;
|
||||
};
|
||||
|
||||
|
|
|
@ -134,18 +134,12 @@ RenderDeferredTask::RenderDeferredTask(CullFunctor cullFunctor) {
|
|||
const auto surfaceGeometryPassOutputs = addJob<SurfaceGeometryPass>("SurfaceGeometry", surfaceGeometryPassInputs);
|
||||
const auto surfaceGeometryFramebuffer = surfaceGeometryPassOutputs.getN<SurfaceGeometryPass::Outputs>(0);
|
||||
const auto curvatureFramebuffer = surfaceGeometryPassOutputs.getN<SurfaceGeometryPass::Outputs>(1);
|
||||
const auto midCurvatureNormalFramebuffer = surfaceGeometryPassOutputs.getN<SurfaceGeometryPass::Outputs>(2);
|
||||
const auto lowCurvatureNormalFramebuffer = surfaceGeometryPassOutputs.getN<SurfaceGeometryPass::Outputs>(3);
|
||||
|
||||
const auto curvatureRangeTimer = addJob<BeginGPURangeTimer>("BeginCurvatureRangeTimer");
|
||||
|
||||
// TODO: Push this 2 diffusion stages into surfaceGeometryPass as they are working together
|
||||
const auto diffuseCurvaturePassInputs = BlurGaussianDepthAware::Inputs(curvatureFramebuffer, halfLinearDepthTexture).hasVarying();
|
||||
const auto midCurvatureNormalFramebuffer = addJob<render::BlurGaussianDepthAware>("DiffuseCurvatureMid", diffuseCurvaturePassInputs);
|
||||
const auto lowCurvatureNormalFramebuffer = addJob<render::BlurGaussianDepthAware>("DiffuseCurvatureLow", diffuseCurvaturePassInputs, true); // THis blur pass generates it s render resource
|
||||
|
||||
// Simply update the scattering resource
|
||||
const auto scatteringResource = addJob<SubsurfaceScattering>("Scattering");
|
||||
|
||||
addJob<EndGPURangeTimer>("CurvatureRangeTimer", curvatureRangeTimer);
|
||||
|
||||
// AO job
|
||||
addJob<AmbientOcclusionEffect>("AmbientOcclusion");
|
||||
|
||||
|
|
|
@ -282,6 +282,10 @@ void SurfaceGeometryFramebuffer::updateLinearDepth(const gpu::TexturePointer& li
|
|||
void SurfaceGeometryFramebuffer::clear() {
|
||||
_curvatureFramebuffer.reset();
|
||||
_curvatureTexture.reset();
|
||||
_lowCurvatureFramebuffer.reset();
|
||||
_lowCurvatureTexture.reset();
|
||||
_blurringFramebuffer.reset();
|
||||
_blurringTexture.reset();
|
||||
}
|
||||
|
||||
gpu::TexturePointer SurfaceGeometryFramebuffer::getLinearDepthTexture() {
|
||||
|
@ -293,9 +297,17 @@ void SurfaceGeometryFramebuffer::allocate() {
|
|||
auto width = _frameSize.x;
|
||||
auto height = _frameSize.y;
|
||||
|
||||
_curvatureTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, width >> getResolutionLevel(), height >> getResolutionLevel(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT)));
|
||||
_curvatureTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, width, height, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT)));
|
||||
_curvatureFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
_curvatureFramebuffer->setRenderBuffer(0, _curvatureTexture);
|
||||
|
||||
_lowCurvatureTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, width, height, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT)));
|
||||
_lowCurvatureFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
_lowCurvatureFramebuffer->setRenderBuffer(0, _lowCurvatureTexture);
|
||||
|
||||
_blurringTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, width, height, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT)));
|
||||
_blurringFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
_blurringFramebuffer->setRenderBuffer(0, _blurringTexture);
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer SurfaceGeometryFramebuffer::getCurvatureFramebuffer() {
|
||||
|
@ -312,6 +324,34 @@ gpu::TexturePointer SurfaceGeometryFramebuffer::getCurvatureTexture() {
|
|||
return _curvatureTexture;
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer SurfaceGeometryFramebuffer::getLowCurvatureFramebuffer() {
|
||||
if (!_lowCurvatureFramebuffer) {
|
||||
allocate();
|
||||
}
|
||||
return _lowCurvatureFramebuffer;
|
||||
}
|
||||
|
||||
gpu::TexturePointer SurfaceGeometryFramebuffer::getLowCurvatureTexture() {
|
||||
if (!_lowCurvatureTexture) {
|
||||
allocate();
|
||||
}
|
||||
return _lowCurvatureTexture;
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer SurfaceGeometryFramebuffer::getBlurringFramebuffer() {
|
||||
if (!_blurringFramebuffer) {
|
||||
allocate();
|
||||
}
|
||||
return _blurringFramebuffer;
|
||||
}
|
||||
|
||||
gpu::TexturePointer SurfaceGeometryFramebuffer::getBlurringTexture() {
|
||||
if (!_blurringTexture) {
|
||||
allocate();
|
||||
}
|
||||
return _blurringTexture;
|
||||
}
|
||||
|
||||
void SurfaceGeometryFramebuffer::setResolutionLevel(int resolutionLevel) {
|
||||
if (resolutionLevel != getResolutionLevel()) {
|
||||
clear();
|
||||
|
@ -319,15 +359,18 @@ void SurfaceGeometryFramebuffer::setResolutionLevel(int resolutionLevel) {
|
|||
}
|
||||
}
|
||||
|
||||
SurfaceGeometryPass::SurfaceGeometryPass() {
|
||||
SurfaceGeometryPass::SurfaceGeometryPass() :
|
||||
_diffusePass(false)
|
||||
{
|
||||
Parameters parameters;
|
||||
_parametersBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Parameters), (const gpu::Byte*) ¶meters));
|
||||
}
|
||||
|
||||
void SurfaceGeometryPass::configure(const Config& config) {
|
||||
const float CM_TO_M = 0.01f;
|
||||
|
||||
if ((config.depthThreshold * 100.0f) != getCurvatureDepthThreshold()) {
|
||||
_parametersBuffer.edit<Parameters>().curvatureInfo.x = config.depthThreshold * 100.0f;
|
||||
if ((config.depthThreshold * CM_TO_M) != getCurvatureDepthThreshold()) {
|
||||
_parametersBuffer.edit<Parameters>().curvatureInfo.x = config.depthThreshold * CM_TO_M;
|
||||
}
|
||||
|
||||
if (config.basisScale != getCurvatureBasisScale()) {
|
||||
|
@ -341,7 +384,16 @@ void SurfaceGeometryPass::configure(const Config& config) {
|
|||
if (!_surfaceGeometryFramebuffer) {
|
||||
_surfaceGeometryFramebuffer = std::make_shared<SurfaceGeometryFramebuffer>();
|
||||
}
|
||||
|
||||
_surfaceGeometryFramebuffer->setResolutionLevel(config.resolutionLevel);
|
||||
if (config.resolutionLevel != getResolutionLevel()) {
|
||||
_parametersBuffer.edit<Parameters>().resolutionInfo.w = config.resolutionLevel;
|
||||
}
|
||||
|
||||
auto filterRadius = (getResolutionLevel() > 0 ? config.diffuseFilterScale / 2.0f : config.diffuseFilterScale);
|
||||
_diffusePass.getParameters()->setFilterRadiusScale(filterRadius);
|
||||
_diffusePass.getParameters()->setDepthThreshold(config.diffuseDepthThreshold);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -355,33 +407,53 @@ void SurfaceGeometryPass::run(const render::SceneContextPointer& sceneContext, c
|
|||
const auto deferredFramebuffer = inputs.get1();
|
||||
const auto linearDepthFramebuffer = inputs.get2();
|
||||
|
||||
auto linearDepthTexture = linearDepthFramebuffer->getHalfLinearDepthTexture();
|
||||
|
||||
auto linearDepthTexture = linearDepthFramebuffer->getLinearDepthTexture();
|
||||
auto normalTexture = deferredFramebuffer->getDeferredNormalTexture();
|
||||
auto sourceViewport = args->_viewport;
|
||||
auto curvatureViewport = sourceViewport;
|
||||
|
||||
if (_surfaceGeometryFramebuffer->getResolutionLevel() > 0) {
|
||||
linearDepthTexture = linearDepthFramebuffer->getHalfLinearDepthTexture();
|
||||
normalTexture = linearDepthFramebuffer->getHalfNormalTexture();
|
||||
curvatureViewport = curvatureViewport >> _surfaceGeometryFramebuffer->getResolutionLevel();
|
||||
}
|
||||
|
||||
if (!_surfaceGeometryFramebuffer) {
|
||||
_surfaceGeometryFramebuffer = std::make_shared<SurfaceGeometryFramebuffer>();
|
||||
}
|
||||
_surfaceGeometryFramebuffer->updateLinearDepth(linearDepthTexture);
|
||||
|
||||
// auto normalTexture = deferredFramebuffer->getDeferredNormalTexture();
|
||||
auto normalTexture = linearDepthFramebuffer->getHalfNormalTexture();
|
||||
|
||||
auto curvatureFBO = _surfaceGeometryFramebuffer->getCurvatureFramebuffer();
|
||||
auto curvatureFramebuffer = _surfaceGeometryFramebuffer->getCurvatureFramebuffer();
|
||||
auto curvatureTexture = _surfaceGeometryFramebuffer->getCurvatureTexture();
|
||||
#ifdef USE_STENCIL_TEST
|
||||
if (curvatureFBO->getDepthStencilBuffer() != deferredFramebuffer->getPrimaryDepthTexture()) {
|
||||
curvatureFBO->setDepthStencilBuffer(deferredFramebuffer->getPrimaryDepthTexture(), deferredFramebuffer->getPrimaryDepthTexture()->getTexelFormat());
|
||||
if (curvatureFramebuffer->getDepthStencilBuffer() != deferredFramebuffer->getPrimaryDepthTexture()) {
|
||||
curvatureFramebuffer->setDepthStencilBuffer(deferredFramebuffer->getPrimaryDepthTexture(), deferredFramebuffer->getPrimaryDepthTexture()->getTexelFormat());
|
||||
}
|
||||
#endif
|
||||
auto curvatureTexture = _surfaceGeometryFramebuffer->getCurvatureTexture();
|
||||
|
||||
auto lowCurvatureFramebuffer = _surfaceGeometryFramebuffer->getLowCurvatureFramebuffer();
|
||||
auto lowCurvatureTexture = _surfaceGeometryFramebuffer->getLowCurvatureTexture();
|
||||
|
||||
auto blurringFramebuffer = _surfaceGeometryFramebuffer->getBlurringFramebuffer();
|
||||
auto blurringTexture = _surfaceGeometryFramebuffer->getBlurringTexture();
|
||||
|
||||
outputs.edit0() = _surfaceGeometryFramebuffer;
|
||||
outputs.edit1() = curvatureFBO;
|
||||
outputs.edit1() = curvatureFramebuffer;
|
||||
outputs.edit2() = curvatureFramebuffer;
|
||||
outputs.edit3() = lowCurvatureFramebuffer;
|
||||
|
||||
auto curvaturePipeline = getCurvaturePipeline();
|
||||
auto diffuseVPipeline = _diffusePass.getBlurVPipeline();
|
||||
auto diffuseHPipeline = _diffusePass.getBlurHPipeline();
|
||||
|
||||
auto depthViewport = args->_viewport;
|
||||
auto curvatureViewport = depthViewport >> 1;
|
||||
// >> _surfaceGeometryFramebuffer->getResolutionLevel();
|
||||
_diffusePass.getParameters()->setWidthHeight(curvatureViewport.z, curvatureViewport.w, args->_context->isStereo());
|
||||
glm::ivec2 textureSize(curvatureTexture->getDimensions());
|
||||
_diffusePass.getParameters()->setTexcoordTransform(gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(textureSize, curvatureViewport));
|
||||
_diffusePass.getParameters()->setDepthPerspective(args->getViewFrustum().getProjection()[1][1]);
|
||||
_diffusePass.getParameters()->setLinearDepthPosFar(args->getViewFrustum().getFarClip());
|
||||
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
_gpuTimer.begin(batch);
|
||||
batch.enableStereo(false);
|
||||
|
@ -390,32 +462,64 @@ void SurfaceGeometryPass::run(const render::SceneContextPointer& sceneContext, c
|
|||
batch.setViewTransform(Transform());
|
||||
|
||||
batch.setViewportTransform(curvatureViewport);
|
||||
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(_surfaceGeometryFramebuffer->getCurvatureFrameSize(), curvatureViewport));
|
||||
|
||||
batch.setUniformBuffer(SurfaceGeometryPass_FrameTransformSlot, frameTransform->getFrameTransformBuffer());
|
||||
batch.setUniformBuffer(SurfaceGeometryPass_ParamsSlot, _parametersBuffer);
|
||||
batch.setModelTransform(gpu::Framebuffer::evalSubregionTexcoordTransform(_surfaceGeometryFramebuffer->getSourceFrameSize(), curvatureViewport));
|
||||
|
||||
// Curvature pass
|
||||
batch.setFramebuffer(curvatureFBO);
|
||||
|
||||
// We can avoid the clear by drawing the same clear vallue from the makeCurvature shader. same performances or no worse
|
||||
|
||||
batch.setUniformBuffer(SurfaceGeometryPass_FrameTransformSlot, frameTransform->getFrameTransformBuffer());
|
||||
batch.setUniformBuffer(SurfaceGeometryPass_ParamsSlot, _parametersBuffer);
|
||||
batch.setFramebuffer(curvatureFramebuffer);
|
||||
// We can avoid the clear by drawing the same clear vallue from the makeCurvature shader. same performances or no worse
|
||||
#ifdef USE_STENCIL_TEST
|
||||
// Except if stenciling out
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
#endif
|
||||
|
||||
batch.setPipeline(curvaturePipeline);
|
||||
batch.setResourceTexture(SurfaceGeometryPass_DepthMapSlot, linearDepthTexture);
|
||||
batch.setResourceTexture(SurfaceGeometryPass_NormalMapSlot, normalTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
|
||||
batch.setResourceTexture(SurfaceGeometryPass_DepthMapSlot, nullptr);
|
||||
batch.setResourceTexture(SurfaceGeometryPass_NormalMapSlot, nullptr);
|
||||
batch.setUniformBuffer(SurfaceGeometryPass_ParamsSlot, nullptr);
|
||||
batch.setUniformBuffer(SurfaceGeometryPass_FrameTransformSlot, nullptr);
|
||||
|
||||
// Diffusion pass
|
||||
const int BlurTask_ParamsSlot = 0;
|
||||
const int BlurTask_SourceSlot = 0;
|
||||
const int BlurTask_DepthSlot = 1;
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, _diffusePass.getParameters()->_parametersBuffer);
|
||||
|
||||
batch.setResourceTexture(BlurTask_DepthSlot, linearDepthTexture);
|
||||
|
||||
batch.setFramebuffer(blurringFramebuffer);
|
||||
batch.setPipeline(diffuseVPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, curvatureTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setFramebuffer(curvatureFramebuffer);
|
||||
batch.setPipeline(diffuseHPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, blurringTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setFramebuffer(blurringFramebuffer);
|
||||
batch.setPipeline(diffuseVPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, curvatureTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setFramebuffer(lowCurvatureFramebuffer);
|
||||
batch.setPipeline(diffuseHPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, blurringTexture);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, nullptr);
|
||||
batch.setResourceTexture(BlurTask_DepthSlot, nullptr);
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, nullptr);
|
||||
|
||||
_gpuTimer.end(batch);
|
||||
});
|
||||
|
||||
|
||||
|
||||
auto config = std::static_pointer_cast<Config>(renderContext->jobConfig);
|
||||
config->gpuTime = _gpuTimer.getAverage();
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#include <DependencyManager.h>
|
||||
|
||||
#include "render/DrawTask.h"
|
||||
#include "render/BlurTask.h"
|
||||
#include "DeferredFrameTransform.h"
|
||||
#include "DeferredFramebuffer.h"
|
||||
|
||||
|
@ -111,12 +112,17 @@ public:
|
|||
|
||||
gpu::FramebufferPointer getCurvatureFramebuffer();
|
||||
gpu::TexturePointer getCurvatureTexture();
|
||||
|
||||
gpu::FramebufferPointer getLowCurvatureFramebuffer();
|
||||
gpu::TexturePointer getLowCurvatureTexture();
|
||||
|
||||
gpu::FramebufferPointer getBlurringFramebuffer();
|
||||
gpu::TexturePointer getBlurringTexture();
|
||||
|
||||
// Update the source framebuffer size which will drive the allocation of all the other resources.
|
||||
void updateLinearDepth(const gpu::TexturePointer& linearDepthBuffer);
|
||||
gpu::TexturePointer getLinearDepthTexture();
|
||||
const glm::ivec2& getSourceFrameSize() const { return _frameSize; }
|
||||
glm::ivec2 getCurvatureFrameSize() const { return _frameSize >> _resolutionLevel; }
|
||||
|
||||
void setResolutionLevel(int level);
|
||||
int getResolutionLevel() const { return _resolutionLevel; }
|
||||
|
@ -130,6 +136,12 @@ protected:
|
|||
gpu::FramebufferPointer _curvatureFramebuffer;
|
||||
gpu::TexturePointer _curvatureTexture;
|
||||
|
||||
gpu::FramebufferPointer _blurringFramebuffer;
|
||||
gpu::TexturePointer _blurringTexture;
|
||||
|
||||
gpu::FramebufferPointer _lowCurvatureFramebuffer;
|
||||
gpu::TexturePointer _lowCurvatureTexture;
|
||||
|
||||
glm::ivec2 _frameSize;
|
||||
int _resolutionLevel{ 0 };
|
||||
};
|
||||
|
@ -142,6 +154,10 @@ class SurfaceGeometryPassConfig : public render::Job::Config {
|
|||
Q_PROPERTY(float basisScale MEMBER basisScale NOTIFY dirty)
|
||||
Q_PROPERTY(float curvatureScale MEMBER curvatureScale NOTIFY dirty)
|
||||
Q_PROPERTY(int resolutionLevel MEMBER resolutionLevel NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(float diffuseFilterScale MEMBER diffuseFilterScale NOTIFY dirty)
|
||||
Q_PROPERTY(float diffuseDepthThreshold MEMBER diffuseDepthThreshold NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(double gpuTime READ getGpuTime)
|
||||
public:
|
||||
SurfaceGeometryPassConfig() : render::Job::Config(true) {}
|
||||
|
@ -149,7 +165,9 @@ public:
|
|||
float depthThreshold{ 5.0f }; // centimeters
|
||||
float basisScale{ 1.0f };
|
||||
float curvatureScale{ 10.0f };
|
||||
int resolutionLevel{ 0 };
|
||||
int resolutionLevel{ 1 };
|
||||
float diffuseFilterScale{ 0.2f };
|
||||
float diffuseDepthThreshold{ 1.0f };
|
||||
|
||||
double getGpuTime() { return gpuTime; }
|
||||
|
||||
|
@ -162,7 +180,7 @@ signals:
|
|||
class SurfaceGeometryPass {
|
||||
public:
|
||||
using Inputs = render::VaryingSet3<DeferredFrameTransformPointer, DeferredFramebufferPointer, LinearDepthFramebufferPointer>;
|
||||
using Outputs = render::VaryingSet2<SurfaceGeometryFramebufferPointer, gpu::FramebufferPointer>;
|
||||
using Outputs = render::VaryingSet4<SurfaceGeometryFramebufferPointer, gpu::FramebufferPointer, gpu::FramebufferPointer, gpu::FramebufferPointer>;
|
||||
using Config = SurfaceGeometryPassConfig;
|
||||
using JobModel = render::Job::ModelIO<SurfaceGeometryPass, Inputs, Outputs, Config>;
|
||||
|
||||
|
@ -171,9 +189,11 @@ public:
|
|||
void configure(const Config& config);
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& inputs, Outputs& outputs);
|
||||
|
||||
|
||||
float getCurvatureDepthThreshold() const { return _parametersBuffer.get<Parameters>().curvatureInfo.x; }
|
||||
float getCurvatureBasisScale() const { return _parametersBuffer.get<Parameters>().curvatureInfo.y; }
|
||||
float getCurvatureScale() const { return _parametersBuffer.get<Parameters>().curvatureInfo.w; }
|
||||
int getResolutionLevel() const { return (int)_parametersBuffer.get<Parameters>().resolutionInfo.w; }
|
||||
|
||||
private:
|
||||
typedef gpu::BufferView UniformBufferView;
|
||||
|
@ -182,7 +202,7 @@ private:
|
|||
class Parameters {
|
||||
public:
|
||||
// Resolution info
|
||||
glm::vec4 resolutionInfo { -1.0f, 0.0f, 0.0f, 0.0f };
|
||||
glm::vec4 resolutionInfo { 0.0f, 0.0f, 0.0f, 1.0f }; // Default Curvature & Diffusion is running half res
|
||||
// Curvature algorithm
|
||||
glm::vec4 curvatureInfo{ 0.0f };
|
||||
|
||||
|
@ -190,11 +210,14 @@ private:
|
|||
};
|
||||
gpu::BufferView _parametersBuffer;
|
||||
|
||||
|
||||
SurfaceGeometryFramebufferPointer _surfaceGeometryFramebuffer;
|
||||
|
||||
const gpu::PipelinePointer& getCurvaturePipeline();
|
||||
|
||||
gpu::PipelinePointer _curvaturePipeline;
|
||||
|
||||
render::BlurGaussianDepthAware _diffusePass;
|
||||
|
||||
|
||||
gpu::RangeTimer _gpuTimer;
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
//
|
||||
|
||||
<@include DeferredBufferWrite.slh@>
|
||||
<@include LightingModel.slh@>
|
||||
<@include model/Material.slh@>
|
||||
|
||||
<@include MaterialTextures.slh@>
|
||||
|
|
|
@ -41,7 +41,7 @@ void main(void) {
|
|||
#ifdef PROCEDURAL_V1
|
||||
specular = getProceduralColor().rgb;
|
||||
// Procedural Shaders are expected to be Gamma corrected so let's bring back the RGB in linear space for the rest of the pipeline
|
||||
specular = pow(specular, vec3(2.2));
|
||||
//specular = pow(specular, vec3(2.2));
|
||||
emissiveAmount = 1.0;
|
||||
#else
|
||||
emissiveAmount = getProceduralColors(diffuse, specular, shininess);
|
||||
|
|
|
@ -10,35 +10,11 @@
|
|||
//
|
||||
|
||||
|
||||
|
||||
<@include gpu/PackedNormal.slh@>
|
||||
|
||||
uniform sampler2D linearDepthMap;
|
||||
uniform sampler2D normalMap;
|
||||
|
||||
|
||||
vec2 signNotZero(vec2 v) {
|
||||
return vec2((v.x >= 0.0) ? +1.0 : -1.0, (v.y >= 0.0) ? +1.0 : -1.0);
|
||||
}
|
||||
|
||||
vec3 oct_to_float32x3(in vec2 e) {
|
||||
vec3 v = vec3(e.xy, 1.0 - abs(e.x) - abs(e.y));
|
||||
if (v.z < 0) {
|
||||
v.xy = (1.0 - abs(v.yx)) * signNotZero(v.xy);
|
||||
}
|
||||
return normalize(v);
|
||||
}
|
||||
|
||||
vec2 unorm8x3_to_snorm12x2(vec3 u) {
|
||||
u *= 255.0;
|
||||
u.y *= (1.0 / 16.0);
|
||||
vec2 s = vec2( u.x * 16.0 + floor(u.y),
|
||||
fract(u.y) * (16.0 * 256.0) + u.z);
|
||||
return clamp(s * (1.0 / 2047.0) - 1.0, vec2(-1.0), vec2(1.0));
|
||||
}
|
||||
vec3 unpackNormal(in vec3 p) {
|
||||
return oct_to_float32x3(unorm8x3_to_snorm12x2(p));
|
||||
}
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
|
||||
out vec4 outLinearDepth;
|
||||
|
@ -46,43 +22,22 @@ out vec4 outNormal;
|
|||
|
||||
void main(void) {
|
||||
// Gather 2 by 2 quads from texture
|
||||
vec4 Zeyes = textureGather(linearDepthMap, varTexCoord0, 0);
|
||||
|
||||
// Try different filters for Z
|
||||
// vec4 Zeyes = textureGather(linearDepthMap, varTexCoord0, 0);
|
||||
// float Zeye = min(min(Zeyes.x, Zeyes.y), min(Zeyes.z, Zeyes.w));
|
||||
float Zeye = texture(linearDepthMap, varTexCoord0).x;
|
||||
|
||||
vec4 rawNormalsX = textureGather(normalMap, varTexCoord0, 0);
|
||||
vec4 rawNormalsY = textureGather(normalMap, varTexCoord0, 1);
|
||||
vec4 rawNormalsZ = textureGather(normalMap, varTexCoord0, 2);
|
||||
|
||||
float Zeye = min(min(Zeyes.x, Zeyes.y), min(Zeyes.z, Zeyes.w));
|
||||
|
||||
vec3 normal = vec3(0.0);
|
||||
normal += unpackNormal(vec3(rawNormalsX[0], rawNormalsY[0], rawNormalsZ[0]));
|
||||
normal += unpackNormal(vec3(rawNormalsX[1], rawNormalsY[1], rawNormalsZ[1]));
|
||||
normal += unpackNormal(vec3(rawNormalsX[2], rawNormalsY[2], rawNormalsZ[2]));
|
||||
normal += unpackNormal(vec3(rawNormalsX[3], rawNormalsY[3], rawNormalsZ[3]));
|
||||
/*
|
||||
ivec2 texpos = ivec2(gl_FragCoord.xy) * 2;
|
||||
|
||||
vec4 Zeyes;
|
||||
Zeyes[0] = texelFetch(linearDepthMap, texpos, 0).x;
|
||||
Zeyes[1] = texelFetch(linearDepthMap, texpos + ivec2(0, 1), 0).x;
|
||||
Zeyes[2] = texelFetch(linearDepthMap, texpos + ivec2(1, 0), 0).x;
|
||||
Zeyes[3] = texelFetch(linearDepthMap, texpos + ivec2(1, 1), 0).x;
|
||||
|
||||
vec3 rawNormals[4];
|
||||
rawNormals[0] = texelFetch(normalMap, texpos, 0).xyz;
|
||||
rawNormals[1] = texelFetch(normalMap, texpos + ivec2(0, 1), 0).xyz;
|
||||
rawNormals[2] = texelFetch(normalMap, texpos + ivec2(1, 0), 0).xyz;
|
||||
rawNormals[3] = texelFetch(normalMap, texpos + ivec2(1, 1), 0).xyz;
|
||||
|
||||
float Zeye = min(min(Zeyes.x, Zeyes.y), min(Zeyes.z, Zeyes.w));
|
||||
|
||||
vec3 normal = vec3(0.0);
|
||||
|
||||
normal += unpackNormal(rawNormals[0]);
|
||||
normal += unpackNormal(rawNormals[1]);
|
||||
normal += unpackNormal(rawNormals[2]);
|
||||
normal += unpackNormal(rawNormals[3]);
|
||||
*/
|
||||
|
||||
normal = normalize(normal);
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
<@include DeferredTransform.slh@>
|
||||
<$declareDeferredFrameTransform()$>
|
||||
|
||||
<@include gpu/PackedNormal.slh@>
|
||||
|
||||
struct SurfaceGeometryParams {
|
||||
// Resolution info
|
||||
vec4 resolutionInfo;
|
||||
|
@ -35,6 +37,10 @@ float getCurvatureScale() {
|
|||
return params.curvatureInfo.w;
|
||||
}
|
||||
|
||||
bool isFullResolution() {
|
||||
return params.resolutionInfo.w == 0.0;
|
||||
}
|
||||
|
||||
|
||||
uniform sampler2D linearDepthMap;
|
||||
float getZEye(ivec2 pixel) {
|
||||
|
@ -44,29 +50,6 @@ float getZEyeLinear(vec2 texcoord) {
|
|||
return -texture(linearDepthMap, texcoord).x;
|
||||
}
|
||||
|
||||
vec2 signNotZero(vec2 v) {
|
||||
return vec2((v.x >= 0.0) ? +1.0 : -1.0, (v.y >= 0.0) ? +1.0 : -1.0);
|
||||
}
|
||||
|
||||
vec3 oct_to_float32x3(in vec2 e) {
|
||||
vec3 v = vec3(e.xy, 1.0 - abs(e.x) - abs(e.y));
|
||||
if (v.z < 0) {
|
||||
v.xy = (1.0 - abs(v.yx)) * signNotZero(v.xy);
|
||||
}
|
||||
return normalize(v);
|
||||
}
|
||||
|
||||
vec2 unorm8x3_to_snorm12x2(vec3 u) {
|
||||
u *= 255.0;
|
||||
u.y *= (1.0 / 16.0);
|
||||
vec2 s = vec2( u.x * 16.0 + floor(u.y),
|
||||
fract(u.y) * (16.0 * 256.0) + u.z);
|
||||
return clamp(s * (1.0 / 2047.0) - 1.0, vec2(-1.0), vec2(1.0));
|
||||
}
|
||||
vec3 unpackNormal(in vec3 p) {
|
||||
return oct_to_float32x3(unorm8x3_to_snorm12x2(p));
|
||||
}
|
||||
|
||||
vec2 sideToFrameTexcoord(vec2 side, vec2 texcoordPos) {
|
||||
return vec2((texcoordPos.x + side.x) * side.y, texcoordPos.y);
|
||||
}
|
||||
|
@ -78,10 +61,12 @@ vec3 getRawNormal(vec2 texcoord) {
|
|||
}
|
||||
|
||||
vec3 getWorldNormal(vec2 texcoord) {
|
||||
// vec3 rawNormal = getRawNormal(texcoord);
|
||||
// return unpackNormal(rawNormal);
|
||||
vec3 rawNormal = getRawNormal(texcoord);
|
||||
return normalize((rawNormal - vec3(0.5)) * 2.0);
|
||||
if (isFullResolution()) {
|
||||
return unpackNormal(rawNormal);
|
||||
} else {
|
||||
return normalize((rawNormal - vec3(0.5)) * 2.0);
|
||||
}
|
||||
}
|
||||
|
||||
vec3 getWorldNormalDiff(vec2 texcoord, vec2 delta) {
|
||||
|
|
|
@ -258,10 +258,10 @@ void BlurGaussian::run(const SceneContextPointer& sceneContext, const RenderCont
|
|||
|
||||
|
||||
|
||||
BlurGaussianDepthAware::BlurGaussianDepthAware(bool generateOutputFramebuffer) :
|
||||
_inOutResources(generateOutputFramebuffer)
|
||||
BlurGaussianDepthAware::BlurGaussianDepthAware(bool generateOutputFramebuffer, const BlurParamsPointer& params) :
|
||||
_inOutResources(generateOutputFramebuffer),
|
||||
_parameters((params ? params : std::make_shared<BlurParams>()))
|
||||
{
|
||||
_parameters = std::make_shared<BlurParams>();
|
||||
}
|
||||
|
||||
gpu::PipelinePointer BlurGaussianDepthAware::getBlurVPipeline() {
|
||||
|
@ -337,24 +337,23 @@ void BlurGaussianDepthAware::run(const SceneContextPointer& sceneContext, const
|
|||
auto blurHPipeline = getBlurHPipeline();
|
||||
|
||||
auto sourceViewport = args->_viewport;
|
||||
auto blurViewport = sourceViewport >> 1;
|
||||
|
||||
_parameters->setWidthHeight(blurViewport.z, blurViewport.w, args->_context->isStereo());
|
||||
_parameters->setWidthHeight(sourceViewport.z, sourceViewport.w, args->_context->isStereo());
|
||||
glm::ivec2 textureSize(blurringResources.sourceTexture->getDimensions());
|
||||
_parameters->setTexcoordTransform(gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(textureSize, blurViewport));
|
||||
_parameters->setTexcoordTransform(gpu::Framebuffer::evalSubregionTexcoordTransformCoefficients(textureSize, sourceViewport));
|
||||
_parameters->setDepthPerspective(args->getViewFrustum().getProjection()[1][1]);
|
||||
_parameters->setLinearDepthPosFar(args->getViewFrustum().getFarClip());
|
||||
|
||||
gpu::doInBatch(args->_context, [=](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(blurViewport);
|
||||
batch.setViewportTransform(sourceViewport);
|
||||
|
||||
batch.setUniformBuffer(BlurTask_ParamsSlot, _parameters->_parametersBuffer);
|
||||
|
||||
batch.setResourceTexture(BlurTask_DepthSlot, depthTexture);
|
||||
|
||||
batch.setFramebuffer(blurringResources.blurringFramebuffer);
|
||||
// batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
// batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
|
||||
batch.setPipeline(blurVPipeline);
|
||||
batch.setResourceTexture(BlurTask_SourceSlot, blurringResources.sourceTexture);
|
||||
|
@ -362,7 +361,7 @@ void BlurGaussianDepthAware::run(const SceneContextPointer& sceneContext, const
|
|||
|
||||
batch.setFramebuffer(blurringResources.finalFramebuffer);
|
||||
if (_inOutResources._generateOutputFramebuffer) {
|
||||
// batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
// batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0));
|
||||
}
|
||||
|
||||
batch.setPipeline(blurHPipeline);
|
||||
|
|
|
@ -138,22 +138,22 @@ public:
|
|||
using Config = BlurGaussianDepthAwareConfig;
|
||||
using JobModel = Job::ModelIO<BlurGaussianDepthAware, Inputs, gpu::FramebufferPointer, Config>;
|
||||
|
||||
BlurGaussianDepthAware(bool generateNewOutput = false);
|
||||
BlurGaussianDepthAware(bool generateNewOutput = false, const BlurParamsPointer& params = BlurParamsPointer());
|
||||
|
||||
void configure(const Config& config);
|
||||
void run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& SourceAndDepth, gpu::FramebufferPointer& blurredFramebuffer);
|
||||
|
||||
protected:
|
||||
|
||||
BlurParamsPointer _parameters;
|
||||
|
||||
gpu::PipelinePointer _blurVPipeline;
|
||||
gpu::PipelinePointer _blurHPipeline;
|
||||
|
||||
const BlurParamsPointer& getParameters() const { return _parameters; }
|
||||
|
||||
gpu::PipelinePointer getBlurVPipeline();
|
||||
gpu::PipelinePointer getBlurHPipeline();
|
||||
|
||||
protected:
|
||||
gpu::PipelinePointer _blurVPipeline;
|
||||
gpu::PipelinePointer _blurHPipeline;
|
||||
|
||||
BlurInOutResource _inOutResources;
|
||||
BlurParamsPointer _parameters;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -126,7 +126,7 @@ vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep
|
|||
// Accumulate the center sample
|
||||
vec4 srcBlurred = gaussianDistributionCurve[0] * sampleCenter;
|
||||
|
||||
/* for(int i = 1; i < NUM_TAPS; i++) {
|
||||
for(int i = 1; i < NUM_TAPS; i++) {
|
||||
// Fetch color and depth for current sample.
|
||||
vec2 sampleCoord = texcoord + (gaussianDistributionOffset[i] * finalStep);
|
||||
float srcDepth = texture(depthMap, sampleCoord).x;
|
||||
|
@ -139,8 +139,8 @@ vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep
|
|||
// Accumulate.
|
||||
srcBlurred += gaussianDistributionCurve[i] * srcSample;
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
for(int i = 1; i < NUM_HALF_TAPS; i++) {
|
||||
// Fetch color and depth for current sample.
|
||||
vec2 texcoordOffset = (gaussianDistributionOffsetHalf[i] * finalStep);
|
||||
|
@ -159,7 +159,7 @@ vec4 pixelShaderGaussianDepthAware(vec2 texcoord, vec2 direction, vec2 pixelStep
|
|||
|
||||
// Accumulate.
|
||||
srcBlurred += gaussianDistributionCurveHalf[i] * (srcSampleP + srcSampleN);
|
||||
}
|
||||
}*/
|
||||
|
||||
return srcBlurred;
|
||||
}
|
||||
|
|
|
@ -6,15 +6,11 @@
|
|||
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
if (WIN32 OR APPLE)
|
||||
set(TARGET_NAME hifiCodec)
|
||||
setup_hifi_client_server_plugin()
|
||||
|
||||
link_hifi_libraries(audio shared plugins)
|
||||
|
||||
add_dependency_external_projects(HiFiAudioCodec)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${HIFIAUDIOCODEC_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${HIFIAUDIOCODEC_LIBRARIES})
|
||||
install_beside_console()
|
||||
endif()
|
||||
set(TARGET_NAME hifiCodec)
|
||||
setup_hifi_client_server_plugin()
|
||||
link_hifi_libraries(audio shared plugins)
|
||||
add_dependency_external_projects(hifiAudioCodec)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${HIFIAUDIOCODEC_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${HIFIAUDIOCODEC_LIBRARIES})
|
||||
install_beside_console()
|
||||
|
||||
|
|
|
@ -13,8 +13,7 @@ var qml = Script.resolvePath('surfaceGeometryPass.qml');
|
|||
var window = new OverlayWindow({
|
||||
title: 'Surface Geometry Pass',
|
||||
source: qml,
|
||||
width: 400, height: 300,
|
||||
width: 400, height: 170,
|
||||
});
|
||||
window.setPosition(250, 400);
|
||||
window.setPosition(Window.innerWidth - 420, 50 + 550 + 50);
|
||||
window.closed.connect(function() { Script.stop(); });
|
||||
|
||||
|
|
|
@ -53,11 +53,6 @@ Item {
|
|||
prop: "gpuTime",
|
||||
label: "SurfaceGeometry",
|
||||
color: "#00FFFF"
|
||||
},{
|
||||
object: Render.getConfig("CurvatureRangeTimer"),
|
||||
prop: "gpuTime",
|
||||
label: "Curvature",
|
||||
color: "#00FF00"
|
||||
},
|
||||
{
|
||||
object: Render.getConfig("RenderDeferred"),
|
||||
|
|
|
@ -30,7 +30,6 @@ Column {
|
|||
model: [
|
||||
"Basis Scale:basisScale:2.0:false",
|
||||
"Curvature Scale:curvatureScale:100.0:false",
|
||||
"Downscale:resolutionLevel:4:true"
|
||||
]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
|
@ -41,16 +40,16 @@ Column {
|
|||
min: 0.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Column{
|
||||
CheckBox {
|
||||
text: "Diffuse Curvature Mid"
|
||||
checked: true
|
||||
onCheckedChanged: { Render.getConfig("DiffuseCurvatureMid").enabled = checked }
|
||||
}
|
||||
text: "Half Resolution"
|
||||
checked: Render.getConfig("SurfaceGeometry")["resolutionLevel"]
|
||||
onCheckedChanged: { Render.getConfig("SurfaceGeometry")["resolutionLevel"] = checked }
|
||||
}
|
||||
|
||||
Repeater {
|
||||
model: [ "Blur Scale:DiffuseCurvatureMid:filterScale:2.0", "Blur Depth Threshold:DiffuseCurvatureMid:depthThreshold:1.0", "Blur Scale2:DiffuseCurvatureLow:filterScale:2.0", "Blur Depth Threshold 2:DiffuseCurvatureLow:depthThreshold:1.0"]
|
||||
model: [ "Diffusion Scale:SurfaceGeometry:diffuseFilterScale:2.0",
|
||||
"Diffusion Depth Threshold:SurfaceGeometry:diffuseDepthThreshold:1.0"
|
||||
]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: false
|
||||
|
@ -60,12 +59,6 @@ Column {
|
|||
min: 0.0
|
||||
}
|
||||
}
|
||||
|
||||
CheckBox {
|
||||
text: "Diffuse Curvature Low"
|
||||
checked: true
|
||||
onCheckedChanged: { Render.getConfig("DiffuseCurvatureLow").enabled = checked }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ var OVERLAY_DATA_HMD = {
|
|||
color: {red: 255, green: 255, blue: 255},
|
||||
alpha: 1,
|
||||
scale: 2,
|
||||
emissive: true,
|
||||
isFacingAvatar: true,
|
||||
drawInFront: true
|
||||
};
|
||||
|
|
|
@ -202,8 +202,7 @@ CONTROLLER_STATE_MACHINE[STATE_NEAR_GRABBING] = {
|
|||
CONTROLLER_STATE_MACHINE[STATE_HOLD] = {
|
||||
name: "hold",
|
||||
enterMethod: "nearGrabbingEnter",
|
||||
updateMethod: "nearGrabbing",
|
||||
exitMethod: "holdExit"
|
||||
updateMethod: "nearGrabbing"
|
||||
};
|
||||
CONTROLLER_STATE_MACHINE[STATE_NEAR_TRIGGER] = {
|
||||
name: "trigger",
|
||||
|
@ -228,7 +227,7 @@ function colorPow(color, power) {
|
|||
return {
|
||||
red: Math.pow(color.red / 255.0, power) * 255,
|
||||
green: Math.pow(color.green / 255.0, power) * 255,
|
||||
blue: Math.pow(color.blue / 255.0, power) * 255,
|
||||
blue: Math.pow(color.blue / 255.0, power) * 255
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -271,14 +270,12 @@ function propsArePhysical(props) {
|
|||
return isPhysical;
|
||||
}
|
||||
|
||||
// currently disabled.
|
||||
var USE_ATTACH_POINT_SETTINGS = false;
|
||||
var USE_ATTACH_POINT_SETTINGS = true;
|
||||
|
||||
var ATTACH_POINT_SETTINGS = "io.highfidelity.attachPoints";
|
||||
function getAttachPointSettings() {
|
||||
try {
|
||||
var str = Settings.getValue(ATTACH_POINT_SETTINGS);
|
||||
print("getAttachPointSettings = " + str);
|
||||
if (str === "false") {
|
||||
return {};
|
||||
} else {
|
||||
|
@ -291,7 +288,6 @@ function getAttachPointSettings() {
|
|||
}
|
||||
function setAttachPointSettings(attachPointSettings) {
|
||||
var str = JSON.stringify(attachPointSettings);
|
||||
print("setAttachPointSettings = " + str);
|
||||
Settings.setValue(ATTACH_POINT_SETTINGS, str);
|
||||
}
|
||||
function getAttachPointForHotspotFromSettings(hotspot, hand) {
|
||||
|
@ -765,9 +761,8 @@ function MyController(hand) {
|
|||
}
|
||||
};
|
||||
|
||||
var SEARCH_SPHERE_ALPHA = 0.5;
|
||||
this.searchSphereOn = function (location, size, color) {
|
||||
|
||||
|
||||
var rotation = Quat.lookAt(location, Camera.getPosition(), Vec3.UP);
|
||||
var brightColor = colorPow(color, 0.06);
|
||||
if (this.searchSphere === null) {
|
||||
|
@ -790,7 +785,7 @@ function MyController(hand) {
|
|||
position: location,
|
||||
rotation: rotation,
|
||||
innerColor: brightColor,
|
||||
outerColor: color,
|
||||
outerColor: color,
|
||||
innerAlpha: 1.0,
|
||||
outerAlpha: 0.0,
|
||||
outerRadius: size * 1.2,
|
||||
|
@ -1961,12 +1956,12 @@ function MyController(hand) {
|
|||
this.currentObjectRotation = grabbedProperties.rotation;
|
||||
this.currentVelocity = ZERO_VEC;
|
||||
this.currentAngularVelocity = ZERO_VEC;
|
||||
|
||||
this.prevDropDetected = false;
|
||||
};
|
||||
|
||||
this.nearGrabbing = function (deltaTime, timestamp) {
|
||||
|
||||
var dropDetected = this.dropGestureProcess(deltaTime);
|
||||
|
||||
if (this.state == STATE_NEAR_GRABBING && this.triggerSmoothedReleased()) {
|
||||
this.callEntityMethodOnGrabbed("releaseGrab");
|
||||
this.setState(STATE_OFF, "trigger released");
|
||||
|
@ -1975,6 +1970,16 @@ function MyController(hand) {
|
|||
|
||||
if (this.state == STATE_HOLD) {
|
||||
|
||||
var dropDetected = this.dropGestureProcess(deltaTime);
|
||||
|
||||
if (this.triggerSmoothedReleased()) {
|
||||
this.waitForTriggerRelease = false;
|
||||
}
|
||||
|
||||
if (dropDetected && this.prevDropDetected != dropDetected) {
|
||||
this.waitForTriggerRelease = true;
|
||||
}
|
||||
|
||||
// highlight the grabbed hotspot when the dropGesture is detected.
|
||||
if (dropDetected) {
|
||||
entityPropertiesCache.addEntity(this.grabbedHotspot.entityID);
|
||||
|
@ -1982,17 +1987,24 @@ function MyController(hand) {
|
|||
equipHotspotBuddy.highlightHotspot(this.grabbedHotspot);
|
||||
}
|
||||
|
||||
if (dropDetected && this.triggerSmoothedGrab()) {
|
||||
if (dropDetected && !this.waitForTriggerRelease && this.triggerSmoothedGrab()) {
|
||||
this.callEntityMethodOnGrabbed("releaseEquip");
|
||||
this.setState(STATE_OFF, "drop gesture detected");
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.thumbPressed()) {
|
||||
this.callEntityMethodOnGrabbed("releaseEquip");
|
||||
this.setState(STATE_OFF, "drop via thumb press");
|
||||
// store the offset attach points into preferences.
|
||||
if (USE_ATTACH_POINT_SETTINGS && this.grabbedHotspot && this.grabbedEntity) {
|
||||
var props = Entities.getEntityProperties(this.grabbedEntity, ["localPosition", "localRotation"]);
|
||||
if (props && props.localPosition && props.localRotation) {
|
||||
storeAttachPointForHotspotInSettings(this.grabbedHotspot, this.hand, props.localPosition, props.localRotation);
|
||||
}
|
||||
}
|
||||
|
||||
var grabbedEntity = this.grabbedEntity;
|
||||
this.release();
|
||||
this.grabbedEntity = grabbedEntity;
|
||||
this.setState(STATE_NEAR_GRABBING, "drop gesture detected");
|
||||
return;
|
||||
}
|
||||
this.prevDropDetected = dropDetected;
|
||||
}
|
||||
|
||||
this.heartBeat(this.grabbedEntity);
|
||||
|
@ -2088,22 +2100,6 @@ function MyController(hand) {
|
|||
}
|
||||
};
|
||||
|
||||
this.holdExit = function () {
|
||||
// store the offset attach points into preferences.
|
||||
if (USE_ATTACH_POINT_SETTINGS && this.grabbedHotspot && this.grabbedEntity) {
|
||||
entityPropertiesCache.addEntity(this.grabbedEntity);
|
||||
var props = entityPropertiesCache.getProps(this.grabbedEntity);
|
||||
var entityXform = new Xform(props.rotation, props.position);
|
||||
var avatarXform = new Xform(MyAvatar.orientation, MyAvatar.position);
|
||||
var handRot = (this.hand === RIGHT_HAND) ? MyAvatar.getRightPalmRotation() : MyAvatar.getLeftPalmRotation();
|
||||
var avatarHandPos = (this.hand === RIGHT_HAND) ? MyAvatar.rightHandPosition : MyAvatar.leftHandPosition;
|
||||
var palmXform = new Xform(handRot, avatarXform.xformPoint(avatarHandPos));
|
||||
var offsetXform = Xform.mul(palmXform.inv(), entityXform);
|
||||
|
||||
storeAttachPointForHotspotInSettings(this.grabbedHotspot, this.hand, offsetXform.pos, offsetXform.rot);
|
||||
}
|
||||
};
|
||||
|
||||
this.nearTriggerEnter = function () {
|
||||
|
||||
this.clearEquipHaptics();
|
||||
|
|
Loading…
Reference in a new issue