mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-12 19:44:11 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into loginOnLaunch
This commit is contained in:
commit
6bcf6eaae7
136 changed files with 1295 additions and 948 deletions
|
@ -96,7 +96,6 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
DependencyManager::set<recording::Recorder>();
|
||||
DependencyManager::set<recording::ClipCache>();
|
||||
|
||||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
DependencyManager::set<UsersScriptingInterface>();
|
||||
|
||||
|
@ -177,6 +176,8 @@ void Agent::run() {
|
|||
// Create ScriptEngines on threaded-assignment thread then move to main thread.
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::AGENT_SCRIPT)->moveToThread(qApp->thread());
|
||||
|
||||
DependencyManager::set<ScriptCache>();
|
||||
|
||||
// make sure we request our script once the agent connects to the domain
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
@ -360,154 +361,178 @@ void Agent::scriptRequestFinished() {
|
|||
}
|
||||
|
||||
void Agent::executeScript() {
|
||||
_scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
|
||||
// the following block is scoped so that any shared pointers we take here
|
||||
// are cleared before we call setFinished at the end of the function
|
||||
{
|
||||
_scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
|
||||
|
||||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
|
||||
scriptedAvatar->setID(getSessionUUID());
|
||||
scriptedAvatar->setID(getSessionUUID());
|
||||
|
||||
connect(_scriptEngine.data(), SIGNAL(update(float)),
|
||||
scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
connect(_scriptEngine.data(), SIGNAL(update(float)),
|
||||
scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
|
||||
// force lazy initialization of the head data for the scripted avatar
|
||||
// since it is referenced below by computeLoudness and getAudioLoudness
|
||||
scriptedAvatar->getHeadOrientation();
|
||||
// force lazy initialization of the head data for the scripted avatar
|
||||
// since it is referenced below by computeLoudness and getAudioLoudness
|
||||
scriptedAvatar->getHeadOrientation();
|
||||
|
||||
// give this AvatarData object to the script engine
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
// give this AvatarData object to the script engine
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
|
||||
// give scripts access to the Users object
|
||||
_scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
// give scripts access to the Users object
|
||||
_scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
connect(player.data(), &recording::Deck::playbackStateChanged, [=] {
|
||||
if (player->isPlaying()) {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
if (recordingInterface->getPlayFromCurrentLocation()) {
|
||||
scriptedAvatar->setRecordingBasis();
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
connect(player.data(), &recording::Deck::playbackStateChanged, [&player, &scriptedAvatar] {
|
||||
if (player->isPlaying()) {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
if (recordingInterface->getPlayFromCurrentLocation()) {
|
||||
scriptedAvatar->setRecordingBasis();
|
||||
}
|
||||
|
||||
// these procedural movements are included in the recordings
|
||||
scriptedAvatar->setHasProceduralEyeFaceMovement(false);
|
||||
scriptedAvatar->setHasProceduralBlinkFaceMovement(false);
|
||||
scriptedAvatar->setHasAudioEnabledFaceMovement(false);
|
||||
} else {
|
||||
scriptedAvatar->clearRecordingBasis();
|
||||
|
||||
// restore procedural blendshape movement
|
||||
scriptedAvatar->setHasProceduralEyeFaceMovement(true);
|
||||
scriptedAvatar->setHasProceduralBlinkFaceMovement(true);
|
||||
scriptedAvatar->setHasAudioEnabledFaceMovement(true);
|
||||
}
|
||||
} else {
|
||||
scriptedAvatar->clearRecordingBasis();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
|
||||
|
||||
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
|
||||
if (!useFrameSkeleton) {
|
||||
static std::once_flag warning;
|
||||
std::call_once(warning, [] {
|
||||
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
|
||||
});
|
||||
}
|
||||
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
|
||||
QByteArray audio(frame->data);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audio.data());
|
||||
int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
||||
_audioGate.render(samples, samples, numSamples);
|
||||
}
|
||||
|
||||
computeLoudness(&audio, scriptedAvatar);
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
Q_UNUSED(openedInLastBlock);
|
||||
|
||||
// the codec must be flushed to silence before sending silent packets,
|
||||
// so delay the transition to silent packets by one packet after becoming silent.
|
||||
auto packetType = PacketType::MicrophoneAudioNoEcho;
|
||||
if (!audioGateOpen && !closedInLastBlock) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
auto headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
|
||||
audioTransform.setRotation(headOrientation);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audio;
|
||||
}
|
||||
|
||||
AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
|
||||
audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
|
||||
packetType, _selectedCodecName);
|
||||
});
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
|
||||
|
||||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
|
||||
|
||||
_scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCacheScriptingInterface>().data());
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
_scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
|
||||
LocationScriptingInterface::locationSetter);
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
|
||||
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
|
||||
if (!useFrameSkeleton) {
|
||||
static std::once_flag warning;
|
||||
std::call_once(warning, [] {
|
||||
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
|
||||
});
|
||||
entityScriptingInterface->init();
|
||||
|
||||
_entityViewer.init();
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
_avatarAudioTimer.start();
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
|
||||
QTimer* avatarDataTimer = new QTimer(this);
|
||||
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
|
||||
avatarDataTimer->setSingleShot(false);
|
||||
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
|
||||
avatarDataTimer->setTimerType(Qt::PreciseTimer);
|
||||
avatarDataTimer->start();
|
||||
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
if (recordingInterface->isPlaying()) {
|
||||
recordingInterface->stopPlaying();
|
||||
}
|
||||
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
|
||||
QByteArray audio(frame->data);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audio.data());
|
||||
int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
||||
_audioGate.render(samples, samples, numSamples);
|
||||
if (recordingInterface->isRecording()) {
|
||||
recordingInterface->stopRecording();
|
||||
}
|
||||
|
||||
computeLoudness(&audio, scriptedAvatar);
|
||||
avatarDataTimer->stop();
|
||||
_avatarAudioTimer.stop();
|
||||
}
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
Q_UNUSED(openedInLastBlock);
|
||||
|
||||
// the codec must be flushed to silence before sending silent packets,
|
||||
// so delay the transition to silent packets by one packet after becoming silent.
|
||||
auto packetType = PacketType::MicrophoneAudioNoEcho;
|
||||
if (!audioGateOpen && !closedInLastBlock) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
auto headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
|
||||
audioTransform.setRotation(headOrientation);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audio;
|
||||
}
|
||||
|
||||
AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
|
||||
audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
|
||||
packetType, _selectedCodecName);
|
||||
});
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
|
||||
|
||||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
|
||||
|
||||
_scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCacheScriptingInterface>().data());
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
_scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
|
||||
LocationScriptingInterface::locationSetter);
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
entityScriptingInterface->init();
|
||||
|
||||
_entityViewer.init();
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
|
||||
QTimer* avatarDataTimer = new QTimer(this);
|
||||
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
|
||||
avatarDataTimer->setSingleShot(false);
|
||||
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
|
||||
avatarDataTimer->setTimerType(Qt::PreciseTimer);
|
||||
avatarDataTimer->start();
|
||||
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
setFinished(true);
|
||||
}
|
||||
|
||||
|
@ -859,17 +884,25 @@ void Agent::aboutToFinish() {
|
|||
DependencyManager::destroy<SoundCache>();
|
||||
DependencyManager::destroy<AudioScriptingInterface>();
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
DependencyManager::destroy<recording::Deck>();
|
||||
DependencyManager::destroy<recording::Recorder>();
|
||||
DependencyManager::destroy<recording::ClipCache>();
|
||||
DependencyManager::destroy<ScriptEngine>();
|
||||
|
||||
// drop our shared pointer to the script engine, then ask ScriptEngines to shutdown scripting
|
||||
// this ensures that the ScriptEngine goes down before ScriptEngines
|
||||
_scriptEngine.clear();
|
||||
|
||||
{
|
||||
DependencyManager::get<ScriptEngines>()->shutdownScripting();
|
||||
}
|
||||
|
||||
DependencyManager::destroy<ScriptEngines>();
|
||||
|
||||
DependencyManager::destroy<AssignmentDynamicFactory>();
|
||||
|
||||
DependencyManager::destroy<ScriptableAvatar>();
|
||||
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
|
||||
|
||||
// cleanup codec & encoder
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
|
|
|
@ -145,3 +145,15 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
|
||||
_clientTraitsHandler->sendChangedTraitsToMixer();
|
||||
}
|
||||
|
||||
void ScriptableAvatar::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
|
||||
}
|
||||
|
||||
void ScriptableAvatar::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
||||
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
|
||||
}
|
||||
|
||||
void ScriptableAvatar::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
||||
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
|
||||
}
|
||||
|
|
|
@ -157,9 +157,16 @@ public:
|
|||
|
||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
|
||||
|
||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||
bool getHasProceduralBlinkFaceMovement() const override { return _headData->getHasProceduralBlinkFaceMovement(); }
|
||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||
bool getHasProceduralEyeFaceMovement() const override { return _headData->getHasProceduralEyeFaceMovement(); }
|
||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
|
||||
|
||||
private slots:
|
||||
void update(float deltatime);
|
||||
|
||||
|
||||
private:
|
||||
AnimationPointer _animation;
|
||||
AnimationDetails _animationDetails;
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
#
|
||||
|
||||
macro(AUTOSCRIBE_SHADER)
|
||||
message(STATUS "Processing shader ${SHADER_FILE}")
|
||||
unset(SHADER_INCLUDE_FILES)
|
||||
# Grab include files
|
||||
foreach(includeFile ${ARGN})
|
||||
|
|
|
@ -8,6 +8,5 @@
|
|||
macro(TARGET_JSON)
|
||||
add_dependency_external_projects(json)
|
||||
find_package(JSON REQUIRED)
|
||||
message("JSON_INCLUDE_DIRS ${JSON_INCLUDE_DIRS}")
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${JSON_INCLUDE_DIRS})
|
||||
endmacro()
|
Binary file not shown.
BIN
interface/resources/avatar/animations/jog_bwd.fbx
Normal file
BIN
interface/resources/avatar/animations/jog_bwd.fbx
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
interface/resources/avatar/animations/jump_standing_launch.fbx
Normal file
BIN
interface/resources/avatar/animations/jump_standing_launch.fbx
Normal file
Binary file not shown.
Binary file not shown.
BIN
interface/resources/avatar/animations/run_bwd.fbx
Normal file
BIN
interface/resources/avatar/animations/run_bwd.fbx
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -585,149 +585,188 @@
|
|||
"states": [
|
||||
{
|
||||
"id": "idle",
|
||||
"interpTarget": 0,
|
||||
"interpDuration": 4,
|
||||
"interpTarget": 20,
|
||||
"interpDuration": 8,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isMovingForward", "state": "idleToWalkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "idleToWalkFwd",
|
||||
"interpTarget": 10,
|
||||
"interpDuration": 4,
|
||||
"interpType": "snapshotPrev",
|
||||
"interpTarget": 12,
|
||||
"interpDuration": 8,
|
||||
"transitions": [
|
||||
{ "var": "idleToWalkFwdOnDone", "state": "walkFwd" },
|
||||
{ "var": "idleToWalkFwdOnDone", "state": "WALKFWD" },
|
||||
{ "var": "isNotMoving", "state": "idle" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "idleSettle",
|
||||
"interpTarget": 10,
|
||||
"interpDuration": 10,
|
||||
"interpTarget": 15,
|
||||
"interpDuration": 8,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{"var": "idleSettleOnDone", "state": "idle" },
|
||||
{"var": "isMovingForward", "state": "idleToWalkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{"var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" }
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "walkFwd",
|
||||
"interpTarget": 16,
|
||||
"interpDuration": 6,
|
||||
"id": "WALKFWD",
|
||||
"interpTarget": 35,
|
||||
"interpDuration": 10,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotMoving", "state": "idleSettle" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "walkBwd",
|
||||
"interpTarget": 8,
|
||||
"interpDuration": 6,
|
||||
"id": "WALKBWD",
|
||||
"interpTarget": 35,
|
||||
"interpDuration": 10,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotMoving", "state": "idleSettle" },
|
||||
{ "var": "isMovingForward", "state": "walkFwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "strafeRight",
|
||||
"interpTarget": 5,
|
||||
"id": "STRAFERIGHT",
|
||||
"interpTarget": 25,
|
||||
"interpDuration": 8,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotMoving", "state": "idleSettle" },
|
||||
{ "var": "isMovingForward", "state": "walkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "strafeLeft",
|
||||
"interpTarget": 5,
|
||||
"id": "STRAFELEFT",
|
||||
"interpTarget": 25,
|
||||
"interpDuration": 8,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotMoving", "state": "idleSettle" },
|
||||
{ "var": "isMovingForward", "state": "walkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "turnRight",
|
||||
"interpTarget": 6,
|
||||
"interpDuration": 8,
|
||||
"transitions": [
|
||||
{ "var": "isNotTurning", "state": "idle" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "turnLeft",
|
||||
"interpTarget": 6,
|
||||
"interpDuration": 8,
|
||||
"transitions": [
|
||||
{ "var": "isNotTurning", "state": "idle" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
|
@ -739,18 +778,18 @@
|
|||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotMoving", "state": "idleSettle" },
|
||||
{ "var": "isMovingForward", "state": "walkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" }
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" }
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -760,60 +799,18 @@
|
|||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotMoving", "state": "idleSettle" },
|
||||
{ "var": "isMovingForward", "state": "walkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "turnRight",
|
||||
"interpTarget": 6,
|
||||
"interpDuration": 8,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotTurning", "state": "idle" },
|
||||
{ "var": "isMovingForward", "state": "walkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "turnLeft",
|
||||
"interpTarget": 6,
|
||||
"interpDuration": 8,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotTurning", "state": "idle" },
|
||||
{ "var": "isMovingForward", "state": "walkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" }
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -826,79 +823,79 @@
|
|||
},
|
||||
{
|
||||
"id": "takeoffStand",
|
||||
"interpTarget": 0,
|
||||
"interpDuration": 6,
|
||||
"interpTarget": 2,
|
||||
"interpDuration": 2,
|
||||
"transitions": [
|
||||
{ "var": "isNotTakeoff", "state": "inAirStand" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "takeoffRun",
|
||||
"interpTarget": 0,
|
||||
"interpDuration": 6,
|
||||
"id": "TAKEOFFRUN",
|
||||
"interpTarget": 2,
|
||||
"interpDuration": 2,
|
||||
"transitions": [
|
||||
{ "var": "isNotTakeoff", "state": "inAirRun" }
|
||||
{ "var": "isNotTakeoff", "state": "INAIRRUN" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "inAirStand",
|
||||
"interpTarget": 0,
|
||||
"interpDuration": 6,
|
||||
"interpTarget": 3,
|
||||
"interpDuration": 3,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotInAir", "state": "landStandImpact" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "inAirRun",
|
||||
"interpTarget": 0,
|
||||
"interpDuration": 6,
|
||||
"id": "INAIRRUN",
|
||||
"interpTarget": 3,
|
||||
"interpDuration": 3,
|
||||
"interpType": "snapshotPrev",
|
||||
"transitions": [
|
||||
{ "var": "isNotInAir", "state": "landRun" }
|
||||
{ "var": "isNotInAir", "state": "WALKFWD" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "landStandImpact",
|
||||
"interpTarget": 6,
|
||||
"interpDuration": 4,
|
||||
"interpTarget": 1,
|
||||
"interpDuration": 1,
|
||||
"transitions": [
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "landStandImpactOnDone", "state": "landStand" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "landStand",
|
||||
"interpTarget": 0,
|
||||
"interpTarget": 1,
|
||||
"interpDuration": 1,
|
||||
"transitions": [
|
||||
{ "var": "isMovingForward", "state": "idleToWalkFwd" },
|
||||
{ "var": "isMovingBackward", "state": "walkBwd" },
|
||||
{ "var": "isMovingRight", "state": "strafeRight" },
|
||||
{ "var": "isMovingLeft", "state": "strafeLeft" },
|
||||
{ "var": "isMovingForward", "state": "WALKFWD" },
|
||||
{ "var": "isMovingBackward", "state": "WALKBWD" },
|
||||
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
|
||||
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
|
||||
{ "var": "isTurningRight", "state": "turnRight" },
|
||||
{ "var": "isTurningLeft", "state": "turnLeft" },
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "isInAirStand", "state": "inAirStand" },
|
||||
{ "var": "isInAirRun", "state": "inAirRun" },
|
||||
{ "var": "isInAirRun", "state": "INAIRRUN" },
|
||||
{ "var": "landStandOnDone", "state": "idle" },
|
||||
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
|
||||
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "landRun",
|
||||
"interpTarget": 1,
|
||||
"interpDuration": 7,
|
||||
"id": "LANDRUN",
|
||||
"interpTarget": 2,
|
||||
"interpDuration": 2,
|
||||
"transitions": [
|
||||
{ "var": "isFlying", "state": "fly" },
|
||||
{ "var": "isTakeoffStand", "state": "takeoffStand" },
|
||||
{ "var": "isTakeoffRun", "state": "takeoffRun" },
|
||||
{ "var": "landRunOnDone", "state": "walkFwd" }
|
||||
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
|
||||
{ "var": "landRunOnDone", "state": "WALKFWD" }
|
||||
]
|
||||
}
|
||||
]
|
||||
|
@ -913,7 +910,7 @@
|
|||
{
|
||||
"id": "idleStand",
|
||||
"interpTarget": 6,
|
||||
"interpDuration": 6,
|
||||
"interpDuration": 10,
|
||||
"transitions": [
|
||||
{ "var": "isTalking", "state": "idleTalk" }
|
||||
]
|
||||
|
@ -921,7 +918,7 @@
|
|||
{
|
||||
"id": "idleTalk",
|
||||
"interpTarget": 6,
|
||||
"interpDuration": 6,
|
||||
"interpDuration": 10,
|
||||
"transitions": [
|
||||
{ "var": "notIsTalking", "state": "idleStand" }
|
||||
]
|
||||
|
@ -956,12 +953,12 @@
|
|||
]
|
||||
},
|
||||
{
|
||||
"id": "walkFwd",
|
||||
"id": "WALKFWD",
|
||||
"type": "blendLinearMove",
|
||||
"data": {
|
||||
"alpha": 0.0,
|
||||
"desiredSpeed": 1.4,
|
||||
"characteristicSpeeds": [0.5, 1.5, 2.5, 3.2, 4.5],
|
||||
"characteristicSpeeds": [0.5, 1.8, 2.3, 3.2, 4.5],
|
||||
"alphaVar": "moveForwardAlpha",
|
||||
"desiredSpeedVar": "moveForwardSpeed"
|
||||
},
|
||||
|
@ -984,7 +981,7 @@
|
|||
"data": {
|
||||
"url": "qrc:///avatar/animations/walk_fwd.fbx",
|
||||
"startFrame": 0.0,
|
||||
"endFrame": 35.0,
|
||||
"endFrame": 30.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": true
|
||||
},
|
||||
|
@ -1046,25 +1043,25 @@
|
|||
"data": {
|
||||
"url": "qrc:///avatar/animations/settle_to_idle.fbx",
|
||||
"startFrame": 1.0,
|
||||
"endFrame": 48.0,
|
||||
"endFrame": 59.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "walkBwd",
|
||||
"id": "WALKBWD",
|
||||
"type": "blendLinearMove",
|
||||
"data": {
|
||||
"alpha": 0.0,
|
||||
"desiredSpeed": 1.4,
|
||||
"characteristicSpeeds": [0.6, 1.7],
|
||||
"characteristicSpeeds": [0.6, 1.6, 2.3, 3.1],
|
||||
"alphaVar": "moveBackwardAlpha",
|
||||
"desiredSpeedVar": "moveBackwardSpeed"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"id": "walkBwdShort",
|
||||
"id": "walkBwdShort_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/walk_short_bwd.fbx",
|
||||
|
@ -1076,7 +1073,7 @@
|
|||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "walkBwdNormal",
|
||||
"id": "walkBwdFast_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/walk_bwd_fast.fbx",
|
||||
|
@ -1086,6 +1083,30 @@
|
|||
"loopFlag": true
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "jogBwd_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jog_bwd.fbx",
|
||||
"startFrame": 0.0,
|
||||
"endFrame": 24.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": true
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "runBwd_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/run_bwd.fbx",
|
||||
"startFrame": 0.0,
|
||||
"endFrame": 16.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": true
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1115,18 +1136,18 @@
|
|||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "strafeLeft",
|
||||
"id": "STRAFELEFT",
|
||||
"type": "blendLinearMove",
|
||||
"data": {
|
||||
"alpha": 0.0,
|
||||
"desiredSpeed": 1.4,
|
||||
"characteristicSpeeds": [0, 0.5, 1.5, 2.6, 3.0],
|
||||
"characteristicSpeeds": [0.1, 0.5, 1.0, 2.6, 3.0],
|
||||
"alphaVar": "moveLateralAlpha",
|
||||
"desiredSpeedVar": "moveLateralSpeed"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"id": "strafeLeftShort_c",
|
||||
"id": "strafeLeftShortStep_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/side_step_short_left.fbx",
|
||||
|
@ -1138,7 +1159,7 @@
|
|||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "strafeLeft_c",
|
||||
"id": "strafeLeftStep_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/side_step_left.fbx",
|
||||
|
@ -1150,19 +1171,19 @@
|
|||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "strafeLeftAnim_c",
|
||||
"id": "strafeLeftWalk_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/walk_left.fbx",
|
||||
"startFrame": 0.0,
|
||||
"endFrame": 33.0,
|
||||
"endFrame": 35.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": true
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "strafeLeftFast_c",
|
||||
"id": "strafeLeftWalkFast_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/walk_left_fast.fbx",
|
||||
|
@ -1188,17 +1209,17 @@
|
|||
]
|
||||
},
|
||||
{
|
||||
"id": "strafeRight",
|
||||
"id": "STRAFERIGHT",
|
||||
"type": "blendLinearMove",
|
||||
"data": {
|
||||
"alpha": 0.0,
|
||||
"desiredSpeed": 1.4,
|
||||
"characteristicSpeeds": [0, 0.5, 1.5, 2.6, 3.0],
|
||||
"characteristicSpeeds": [0.1, 0.5, 1.0, 2.6, 3.0],
|
||||
"alphaVar": "moveLateralAlpha",
|
||||
"desiredSpeedVar": "moveLateralSpeed"
|
||||
},
|
||||
"children": [ {
|
||||
"id": "stepRightShort_c",
|
||||
"id": "strafeRightShortStep_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/side_step_short_left.fbx",
|
||||
|
@ -1211,7 +1232,7 @@
|
|||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "stepRight_c",
|
||||
"id": "strafeRightStep_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/side_step_left.fbx",
|
||||
|
@ -1224,12 +1245,12 @@
|
|||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "strafeRight_c",
|
||||
"id": "strafeRightWalk_c",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/walk_left.fbx",
|
||||
"startFrame": 0.0,
|
||||
"endFrame": 33.0,
|
||||
"endFrame": 35.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": true,
|
||||
"mirrorFlag": true
|
||||
|
@ -1381,22 +1402,22 @@
|
|||
"id": "takeoffStand",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jump_standing_takeoff.fbx",
|
||||
"startFrame": 17.0,
|
||||
"endFrame": 25.0,
|
||||
"url": "qrc:///avatar/animations/jump_standing_launch.fbx",
|
||||
"startFrame": 2.0,
|
||||
"endFrame": 16.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "takeoffRun",
|
||||
"id": "TAKEOFFRUN",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jump_takeoff.fbx",
|
||||
"startFrame": 1.0,
|
||||
"endFrame": 2.5,
|
||||
"timeScale": 0.01,
|
||||
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
|
||||
"startFrame": 4.0,
|
||||
"endFrame": 15.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
"children": []
|
||||
|
@ -1416,7 +1437,7 @@
|
|||
"url": "qrc:///avatar/animations/jump_standing_apex.fbx",
|
||||
"startFrame": 0.0,
|
||||
"endFrame": 0.0,
|
||||
"timeScale": 0.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
"children": []
|
||||
|
@ -1448,7 +1469,7 @@
|
|||
]
|
||||
},
|
||||
{
|
||||
"id": "inAirRun",
|
||||
"id": "INAIRRUN",
|
||||
"type": "blendLinear",
|
||||
"data": {
|
||||
"alpha": 0.0,
|
||||
|
@ -1459,10 +1480,10 @@
|
|||
"id": "inAirRunPreApex",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jump_in_air.fbx",
|
||||
"startFrame": 0.0,
|
||||
"endFrame": 0.0,
|
||||
"timeScale": 0.0,
|
||||
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
|
||||
"startFrame": 16.0,
|
||||
"endFrame": 16.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
"children": []
|
||||
|
@ -1471,9 +1492,9 @@
|
|||
"id": "inAirRunApex",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jump_in_air.fbx",
|
||||
"startFrame": 6.0,
|
||||
"endFrame": 6.0,
|
||||
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
|
||||
"startFrame": 22.0,
|
||||
"endFrame": 22.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
|
@ -1483,9 +1504,9 @@
|
|||
"id": "inAirRunPostApex",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jump_in_air.fbx",
|
||||
"startFrame": 11.0,
|
||||
"endFrame": 11.0,
|
||||
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
|
||||
"startFrame": 33.0,
|
||||
"endFrame": 33.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
|
@ -1497,7 +1518,7 @@
|
|||
"id": "landStandImpact",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jump_standing_land.fbx",
|
||||
"url": "qrc:///avatar/animations/jump_standing_land_settle.fbx",
|
||||
"startFrame": 1.0,
|
||||
"endFrame": 6.0,
|
||||
"timeScale": 1.0,
|
||||
|
@ -1509,22 +1530,22 @@
|
|||
"id": "landStand",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jump_standing_land.fbx",
|
||||
"url": "qrc:///avatar/animations/jump_standing_land_settle.fbx",
|
||||
"startFrame": 6.0,
|
||||
"endFrame": 28.0,
|
||||
"endFrame": 68.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"id": "landRun",
|
||||
"id": "LANDRUN",
|
||||
"type": "clip",
|
||||
"data": {
|
||||
"url": "qrc:///avatar/animations/jump_land.fbx",
|
||||
"startFrame": 1.0,
|
||||
"endFrame": 6.0,
|
||||
"timeScale": 0.65,
|
||||
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
|
||||
"startFrame": 29.0,
|
||||
"endFrame": 40.0,
|
||||
"timeScale": 1.0,
|
||||
"loopFlag": false
|
||||
},
|
||||
"children": []
|
||||
|
|
|
@ -26,7 +26,7 @@ Rectangle {
|
|||
HifiConstants { id: hifi; }
|
||||
|
||||
property var eventBridge;
|
||||
property string title: "Audio Settings - " + AudioScriptingInterface.context;
|
||||
property string title: "Audio Settings"
|
||||
signal sendToScript(var message);
|
||||
|
||||
color: hifi.colors.baseGray;
|
||||
|
|
|
@ -18,6 +18,7 @@ import "../../windows"
|
|||
Rectangle {
|
||||
id: root
|
||||
objectName: "DCConectionTiming"
|
||||
property string title: "Domain Connection Timing"
|
||||
|
||||
signal sendToScript(var message);
|
||||
property bool isHMD: false
|
||||
|
@ -33,7 +34,7 @@ Rectangle {
|
|||
Row {
|
||||
id: header
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: hifi.dimensions.tabletMenuHeader
|
||||
anchors.topMargin: hifi.dimensions.contentMargin.y
|
||||
anchors.leftMargin: 5
|
||||
anchors.rightMargin: 5
|
||||
anchors.left: parent.left
|
||||
|
|
|
@ -18,6 +18,7 @@ import "../../windows"
|
|||
Rectangle {
|
||||
id: root
|
||||
objectName: "EntityStatistics"
|
||||
property string title: "Entity Statistics"
|
||||
|
||||
signal sendToScript(var message);
|
||||
property bool isHMD: false
|
||||
|
@ -40,6 +41,7 @@ Rectangle {
|
|||
id: scrollView
|
||||
width: parent.width
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: hifi.dimensions.contentMargin.y
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: hifi.dimensions.tabletMenuHeader
|
||||
contentWidth: column.implicitWidth
|
||||
|
@ -48,10 +50,15 @@ Rectangle {
|
|||
|
||||
Column {
|
||||
id: column
|
||||
anchors.margins: 10
|
||||
anchors.top: parent.top
|
||||
anchors.left: parent.left
|
||||
anchors.right: parent.right
|
||||
y: hifi.dimensions.tabletMenuHeader //-bgNavBar
|
||||
anchors {
|
||||
topMargin: 0
|
||||
leftMargin: 10
|
||||
rightMargin: 10
|
||||
bottomMargin: 0
|
||||
}
|
||||
spacing: 20
|
||||
|
||||
TabletEntityStatisticsItem {
|
||||
|
|
|
@ -24,6 +24,8 @@ Item {
|
|||
height: parent.height
|
||||
width: parent.width
|
||||
|
||||
property string title: "Controls"
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
TabBar {
|
||||
|
|
|
@ -105,7 +105,6 @@ StackView {
|
|||
propagateComposedEvents: true
|
||||
onPressed: {
|
||||
parent.forceActiveFocus();
|
||||
addressBarDialog.keyboardEnabled = false;
|
||||
mouse.accepted = false;
|
||||
}
|
||||
}
|
||||
|
@ -223,7 +222,6 @@ StackView {
|
|||
updateLocationText(text.length > 0);
|
||||
}
|
||||
onAccepted: {
|
||||
addressBarDialog.keyboardEnabled = false;
|
||||
toggleOrGo();
|
||||
}
|
||||
|
||||
|
@ -378,7 +376,7 @@ StackView {
|
|||
|
||||
HifiControls.Keyboard {
|
||||
id: keyboard
|
||||
raised: parent.keyboardEnabled
|
||||
raised: parent.keyboardEnabled && parent.keyboardRaised
|
||||
numeric: parent.punctuationMode
|
||||
anchors {
|
||||
bottom: parent.bottom
|
||||
|
|
|
@ -23,6 +23,8 @@ FocusScope {
|
|||
property string subMenu: ""
|
||||
signal sendToScript(var message);
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
Rectangle {
|
||||
id: bgNavBar
|
||||
height: 90
|
||||
|
@ -45,24 +47,22 @@ FocusScope {
|
|||
anchors.topMargin: 0
|
||||
anchors.top: parent.top
|
||||
|
||||
Image {
|
||||
HiFiGlyphs {
|
||||
id: menuRootIcon
|
||||
width: 40
|
||||
height: 40
|
||||
source: "../../../icons/tablet-icons/menu-i.svg"
|
||||
text: breadcrumbText.text !== "Menu" ? hifi.glyphs.backward : ""
|
||||
size: 72
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 15
|
||||
width: breadcrumbText.text === "Menu" ? 32 : 50
|
||||
visible: breadcrumbText.text !== "Menu"
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
onEntered: iconColorOverlay.color = "#1fc6a6";
|
||||
onExited: iconColorOverlay.color = "#34a2c7";
|
||||
// navigate back to root level menu
|
||||
onClicked: {
|
||||
buildMenu();
|
||||
breadcrumbText.text = "Menu";
|
||||
menuPopperUpper.closeLastMenu();
|
||||
tabletRoot.playButtonClickSound();
|
||||
}
|
||||
}
|
||||
|
@ -79,23 +79,10 @@ FocusScope {
|
|||
id: breadcrumbText
|
||||
text: "Menu"
|
||||
size: 26
|
||||
color: "#34a2c7"
|
||||
color: "#e3e3e3"
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: menuRootIcon.right
|
||||
anchors.leftMargin: 15
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
onEntered: breadcrumbText.color = "#1fc6a6";
|
||||
onExited: breadcrumbText.color = "#34a2c7";
|
||||
// navigate back to parent level menu if there is one
|
||||
onClicked: {
|
||||
if (breadcrumbText.text !== "Menu") {
|
||||
menuPopperUpper.closeLastMenu();
|
||||
}
|
||||
tabletRoot.playButtonClickSound();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -103,7 +90,6 @@ FocusScope {
|
|||
menuPopperUpper.closeLastMenu();
|
||||
}
|
||||
|
||||
|
||||
function setRootMenu(rootMenu, subMenu) {
|
||||
tabletMenu.subMenu = subMenu;
|
||||
tabletMenu.rootMenu = rootMenu;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//
|
||||
// MessageDialog.qml
|
||||
// TabletMenuStack.qml
|
||||
//
|
||||
// Created by Dante Ruiz on 13 Feb 2017
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
|
@ -66,7 +66,7 @@ Item {
|
|||
|
||||
function popSource() {
|
||||
console.log("trying to pop page");
|
||||
d.pop();
|
||||
closeLastMenu();
|
||||
}
|
||||
|
||||
function toModel(items, newMenu) {
|
||||
|
|
|
@ -41,7 +41,11 @@ Item {
|
|||
section.saveAll();
|
||||
}
|
||||
|
||||
closeDialog();
|
||||
if (HMD.active) {
|
||||
tablet.popFromStack();
|
||||
} else {
|
||||
closeDialog();
|
||||
}
|
||||
}
|
||||
|
||||
function restoreAll() {
|
||||
|
@ -50,7 +54,11 @@ Item {
|
|||
section.restoreAll();
|
||||
}
|
||||
|
||||
closeDialog();
|
||||
if (HMD.active) {
|
||||
tablet.popFromStack();
|
||||
} else {
|
||||
closeDialog();
|
||||
}
|
||||
}
|
||||
|
||||
function closeDialog() {
|
||||
|
|
|
@ -2684,6 +2684,10 @@ Application::~Application() {
|
|||
void Application::initializeGL() {
|
||||
qCDebug(interfaceapp) << "Created Display Window.";
|
||||
|
||||
#ifdef DISABLE_QML
|
||||
setAttribute(Qt::AA_DontCheckOpenGLContextThreadAffinity);
|
||||
#endif
|
||||
|
||||
// initialize glut for shape drawing; Qt apparently initializes it on OS X
|
||||
if (_isGLInitialized) {
|
||||
return;
|
||||
|
@ -6892,6 +6896,9 @@ bool Application::askToLoadScript(const QString& scriptFilenameOrURL) {
|
|||
shortName = shortName.mid(startIndex, endIndex - startIndex);
|
||||
}
|
||||
|
||||
#ifdef DISABLE_QML
|
||||
DependencyManager::get<ScriptEngines>()->loadScript(scriptFilenameOrURL);
|
||||
#else
|
||||
QString message = "Would you like to run this script:\n" + shortName;
|
||||
ModalDialogListener* dlg = OffscreenUi::asyncQuestion(getWindow(), "Run Script", message,
|
||||
QMessageBox::Yes | QMessageBox::No);
|
||||
|
@ -6906,7 +6913,7 @@ bool Application::askToLoadScript(const QString& scriptFilenameOrURL) {
|
|||
}
|
||||
QObject::disconnect(dlg, &ModalDialogListener::response, this, nullptr);
|
||||
});
|
||||
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -255,7 +255,7 @@ Menu::Menu() {
|
|||
connect(action, &QAction::triggered, [] {
|
||||
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
tablet->loadQMLSource("hifi/tablet/ControllerSettings.qml");
|
||||
tablet->pushOntoStack("hifi/tablet/ControllerSettings.qml");
|
||||
|
||||
if (!hmd->getShouldShowTablet()) {
|
||||
hmd->toggleShouldShowTablet();
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#include <gpu/Batch.h>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
|
|
|
@ -11,7 +11,7 @@ layout(location=0) in vec2 varTexCoord0;
|
|||
layout(location=0) out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
ivec2 texCoord = ivec2(floor(varTexCoord0 * textureData.textureSize));
|
||||
ivec2 texCoord = ivec2(floor(varTexCoord0 * vec2(textureData.textureSize)));
|
||||
texCoord.x /= 2;
|
||||
int row = int(floor(gl_FragCoord.y));
|
||||
if (row % 2 > 0) {
|
||||
|
|
|
@ -1609,6 +1609,7 @@ PolyVoxEntityRenderer::PolyVoxEntityRenderer(const EntityItemPointer& entity) :
|
|||
_vertexFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
|
||||
_vertexFormat->setAttribute(gpu::Stream::NORMAL, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 12);
|
||||
});
|
||||
_params = std::make_shared<gpu::Buffer>(sizeof(glm::vec4), nullptr);
|
||||
}
|
||||
|
||||
ShapeKey PolyVoxEntityRenderer::getShapeKey() {
|
||||
|
@ -1671,9 +1672,12 @@ void PolyVoxEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& s
|
|||
|
||||
void PolyVoxEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) {
|
||||
_lastVoxelToWorldMatrix = entity->voxelToWorldMatrix();
|
||||
_lastVoxelVolumeSize = entity->getVoxelVolumeSize();
|
||||
_params->setSubData(0, vec4(_lastVoxelVolumeSize, 0.0));
|
||||
graphics::MeshPointer newMesh;
|
||||
entity->withReadLock([&] {
|
||||
newMesh = entity->_mesh;
|
||||
|
||||
});
|
||||
|
||||
if (newMesh && newMesh->getIndexBuffer()._buffer) {
|
||||
|
@ -1686,6 +1690,7 @@ void PolyVoxEntityRenderer::doRender(RenderArgs* args) {
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
PerformanceTimer perfTimer("RenderablePolyVoxEntityItem::render");
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
|
||||
|
@ -1695,6 +1700,7 @@ void PolyVoxEntityRenderer::doRender(RenderArgs* args) {
|
|||
batch.setInputBuffer(gpu::Stream::POSITION, _mesh->getVertexBuffer()._buffer, 0,
|
||||
sizeof(PolyVox::PositionMaterialNormal));
|
||||
|
||||
|
||||
// TODO -- should we be setting this?
|
||||
// batch.setInputBuffer(gpu::Stream::NORMAL, mesh->getVertexBuffer()._buffer,
|
||||
// 12,
|
||||
|
@ -1710,7 +1716,7 @@ void PolyVoxEntityRenderer::doRender(RenderArgs* args) {
|
|||
}
|
||||
}
|
||||
|
||||
batch._glUniform3f(entities_renderer::slot::uniform::PolyvoxVoxelSize, _lastVoxelVolumeSize.x, _lastVoxelVolumeSize.y, _lastVoxelVolumeSize.z);
|
||||
batch.setUniformBuffer(0, _params);
|
||||
batch.drawIndexed(gpu::TRIANGLES, (gpu::uint32)_mesh->getNumIndices(), 0);
|
||||
}
|
||||
|
||||
|
|
|
@ -187,6 +187,7 @@ private:
|
|||
#endif
|
||||
|
||||
graphics::MeshPointer _mesh;
|
||||
gpu::BufferPointer _params;
|
||||
std::array<NetworkTexturePointer, 3> _xyzTextures;
|
||||
glm::vec3 _lastVoxelVolumeSize;
|
||||
glm::mat4 _lastVoxelToWorldMatrix;
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
#define ENTITIES_SHADER_CONSTANTS_H
|
||||
|
||||
// Polyvox
|
||||
#define ENTITIES_UNIFORM_POLYVOX_VOXEL_SIZE 0
|
||||
#define ENTITIES_TEXTURE_POLYVOX_XMAP 0
|
||||
#define ENTITIES_TEXTURE_POLYVOX_YMAP 1
|
||||
#define ENTITIES_TEXTURE_POLYVOX_ZMAP 2
|
||||
|
@ -26,17 +25,6 @@
|
|||
|
||||
namespace entities_renderer { namespace slot {
|
||||
|
||||
namespace uniform {
|
||||
enum Uniform {
|
||||
PolyvoxVoxelSize = ENTITIES_UNIFORM_POLYVOX_VOXEL_SIZE,
|
||||
};
|
||||
}
|
||||
|
||||
namespace buffer {
|
||||
enum Buffer {
|
||||
};
|
||||
} // namespace buffer
|
||||
|
||||
namespace texture {
|
||||
enum Texture {
|
||||
PolyvoxXMap = ENTITIES_TEXTURE_POLYVOX_XMAP,
|
||||
|
|
|
@ -45,7 +45,7 @@ void main(void) {
|
|||
int frontCondition = 1 -int(gl_FrontFacing) * 2;
|
||||
vec3 color = varColor.rgb;
|
||||
packDeferredFragmentTranslucent(
|
||||
interpolatedNormal * frontCondition,
|
||||
interpolatedNormal * float(frontCondition),
|
||||
texel.a * varColor.a,
|
||||
polyline.color * texel.rgb + fadeEmissive,
|
||||
vec3(0.01, 0.01, 0.01),
|
||||
|
|
|
@ -23,15 +23,22 @@ layout(location=RENDER_UTILS_ATTR_POSITION_WS) in vec4 _worldPosition;
|
|||
layout(binding=ENTITIES_TEXTURE_POLYVOX_XMAP) uniform sampler2D xMap;
|
||||
layout(binding=ENTITIES_TEXTURE_POLYVOX_YMAP) uniform sampler2D yMap;
|
||||
layout(binding=ENTITIES_TEXTURE_POLYVOX_ZMAP) uniform sampler2D zMap;
|
||||
layout(location=ENTITIES_UNIFORM_POLYVOX_VOXEL_SIZE) uniform vec3 voxelVolumeSize;
|
||||
|
||||
struct PolyvoxParams {
|
||||
vec4 voxelVolumeSize;
|
||||
};
|
||||
|
||||
layout(binding=0) uniform polyvoxParamsBuffer {
|
||||
PolyvoxParams params;
|
||||
};
|
||||
|
||||
void main(void) {
|
||||
vec3 worldNormal = cross(dFdy(_worldPosition.xyz), dFdx(_worldPosition.xyz));
|
||||
worldNormal = normalize(worldNormal);
|
||||
|
||||
float inPositionX = (_worldPosition.x - 0.5) / voxelVolumeSize.x;
|
||||
float inPositionY = (_worldPosition.y - 0.5) / voxelVolumeSize.y;
|
||||
float inPositionZ = (_worldPosition.z - 0.5) / voxelVolumeSize.z;
|
||||
float inPositionX = (_worldPosition.x - 0.5) / params.voxelVolumeSize.x;
|
||||
float inPositionY = (_worldPosition.y - 0.5) / params.voxelVolumeSize.y;
|
||||
float inPositionZ = (_worldPosition.z - 0.5) / params.voxelVolumeSize.z;
|
||||
|
||||
vec4 xyDiffuse = texture(xMap, vec2(-inPositionX, -inPositionY));
|
||||
vec4 xzDiffuse = texture(yMap, vec2(-inPositionX, inPositionZ));
|
||||
|
|
|
@ -27,7 +27,13 @@ layout(binding=ENTITIES_TEXTURE_POLYVOX_XMAP) uniform sampler2D xMap;
|
|||
layout(binding=ENTITIES_TEXTURE_POLYVOX_YMAP) uniform sampler2D yMap;
|
||||
layout(binding=ENTITIES_TEXTURE_POLYVOX_ZMAP) uniform sampler2D zMap;
|
||||
|
||||
layout(location=ENTITIES_UNIFORM_POLYVOX_VOXEL_SIZE) uniform vec3 voxelVolumeSize;
|
||||
struct PolyvoxParams {
|
||||
vec4 voxelVolumeSize;
|
||||
};
|
||||
|
||||
layout(binding=0) uniform polyvoxParamsBuffer {
|
||||
PolyvoxParams params;
|
||||
};
|
||||
|
||||
// Declare after all samplers to prevent sampler location mix up with voxel shading (sampler locations are hardcoded in RenderablePolyVoxEntityItem)
|
||||
<$declareFadeFragment()$>
|
||||
|
@ -42,9 +48,9 @@ void main(void) {
|
|||
vec3 worldNormal = cross(dFdy(_worldPosition.xyz), dFdx(_worldPosition.xyz));
|
||||
worldNormal = normalize(worldNormal);
|
||||
|
||||
float inPositionX = (_worldPosition.x - 0.5) / voxelVolumeSize.x;
|
||||
float inPositionY = (_worldPosition.y - 0.5) / voxelVolumeSize.y;
|
||||
float inPositionZ = (_worldPosition.z - 0.5) / voxelVolumeSize.z;
|
||||
float inPositionX = (_worldPosition.x - 0.5) / params.voxelVolumeSize.x;
|
||||
float inPositionY = (_worldPosition.y - 0.5) / params.voxelVolumeSize.y;
|
||||
float inPositionZ = (_worldPosition.z - 0.5) / params.voxelVolumeSize.z;
|
||||
|
||||
vec4 xyDiffuse = texture(xMap, vec2(-inPositionX, -inPositionY));
|
||||
vec4 xzDiffuse = texture(yMap, vec2(-inPositionX, inPositionZ));
|
||||
|
|
|
@ -585,13 +585,8 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
|
|||
|
||||
FBXMesh& fbxMesh = extractedMesh;
|
||||
graphics::MeshPointer mesh(new graphics::Mesh());
|
||||
|
||||
// Grab the vertices in a buffer
|
||||
auto vb = std::make_shared<gpu::Buffer>();
|
||||
vb->setData(extractedMesh.vertices.size() * sizeof(glm::vec3),
|
||||
(const gpu::Byte*) extractedMesh.vertices.data());
|
||||
gpu::BufferView vbv(vb, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ));
|
||||
mesh->setVertexBuffer(vbv);
|
||||
bool hasBlendShapes = !fbxMesh.blendshapes.empty();
|
||||
int numVerts = extractedMesh.vertices.size();
|
||||
|
||||
if (!fbxMesh.normals.empty() && fbxMesh.tangents.empty()) {
|
||||
// Fill with a dummy value to force tangents to be present if there are normals
|
||||
|
@ -607,43 +602,61 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
|
|||
}
|
||||
}
|
||||
|
||||
// evaluate all attribute channels sizes
|
||||
const int normalsSize = fbxMesh.normals.size() * sizeof(NormalType);
|
||||
const int tangentsSize = fbxMesh.tangents.size() * sizeof(NormalType);
|
||||
// evaluate all attribute elements and data sizes
|
||||
|
||||
// Position is a vec3
|
||||
const auto positionElement = gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ);
|
||||
const int positionsSize = numVerts * positionElement.getSize();
|
||||
|
||||
// Normal and tangent are always there together packed in normalized xyz32bits word (times 2)
|
||||
const auto normalElement = FBX_NORMAL_ELEMENT;
|
||||
const int normalsSize = fbxMesh.normals.size() * normalElement.getSize();
|
||||
const int tangentsSize = fbxMesh.tangents.size() * normalElement.getSize();
|
||||
// If there are normals then there should be tangents
|
||||
|
||||
assert(normalsSize <= tangentsSize);
|
||||
if (tangentsSize > normalsSize) {
|
||||
qWarning() << "Unexpected tangents in " << url;
|
||||
}
|
||||
const auto normalsAndTangentsSize = normalsSize + tangentsSize;
|
||||
const int normalsAndTangentsStride = 2 * sizeof(NormalType);
|
||||
const int colorsSize = fbxMesh.colors.size() * sizeof(ColorType);
|
||||
const int normalsAndTangentsStride = 2 * normalElement.getSize();
|
||||
|
||||
// Color attrib
|
||||
const auto colorElement = FBX_COLOR_ELEMENT;
|
||||
const int colorsSize = fbxMesh.colors.size() * colorElement.getSize();
|
||||
|
||||
// Texture coordinates are stored in 2 half floats
|
||||
const int texCoordsSize = fbxMesh.texCoords.size() * sizeof(vec2h);
|
||||
const int texCoords1Size = fbxMesh.texCoords1.size() * sizeof(vec2h);
|
||||
const auto texCoordsElement = gpu::Element(gpu::VEC2, gpu::HALF, gpu::UV);
|
||||
const int texCoordsSize = fbxMesh.texCoords.size() * texCoordsElement.getSize();
|
||||
const int texCoords1Size = fbxMesh.texCoords1.size() * texCoordsElement.getSize();
|
||||
|
||||
int clusterIndicesSize = fbxMesh.clusterIndices.size() * sizeof(uint8_t);
|
||||
if (fbxMesh.clusters.size() > UINT8_MAX) {
|
||||
// we need 16 bits instead of just 8 for clusterIndices
|
||||
clusterIndicesSize *= 2;
|
||||
}
|
||||
// Support for 4 skinning clusters:
|
||||
// 4 Indices are uint8 ideally, uint16 if more than 256.
|
||||
const auto clusterIndiceElement = (fbxMesh.clusters.size() < UINT8_MAX ? gpu::Element(gpu::VEC4, gpu::UINT8, gpu::XYZW) : gpu::Element(gpu::VEC4, gpu::UINT16, gpu::XYZW));
|
||||
// 4 Weights are normalized 16bits
|
||||
const auto clusterWeightElement = gpu::Element(gpu::VEC4, gpu::NUINT16, gpu::XYZW);
|
||||
|
||||
const int clusterWeightsSize = fbxMesh.clusterWeights.size() * sizeof(uint16_t);
|
||||
// Cluster indices and weights must be the same sizes
|
||||
const int NUM_CLUSTERS_PER_VERT = 4;
|
||||
const int numVertClusters = (fbxMesh.clusterIndices.size() == fbxMesh.clusterWeights.size() ? fbxMesh.clusterIndices.size() / NUM_CLUSTERS_PER_VERT : 0);
|
||||
const int clusterIndicesSize = numVertClusters * clusterIndiceElement.getSize();
|
||||
const int clusterWeightsSize = numVertClusters * clusterWeightElement.getSize();
|
||||
|
||||
// Normals and tangents are interleaved
|
||||
const int normalsOffset = 0;
|
||||
const int tangentsOffset = normalsOffset + sizeof(NormalType);
|
||||
const int colorsOffset = normalsOffset + normalsSize + tangentsSize;
|
||||
// Decide on where to put what seequencially in a big buffer:
|
||||
const int positionsOffset = 0;
|
||||
const int normalsAndTangentsOffset = positionsOffset + positionsSize;
|
||||
const int colorsOffset = normalsAndTangentsOffset + normalsAndTangentsSize;
|
||||
const int texCoordsOffset = colorsOffset + colorsSize;
|
||||
const int texCoords1Offset = texCoordsOffset + texCoordsSize;
|
||||
const int clusterIndicesOffset = texCoords1Offset + texCoords1Size;
|
||||
const int clusterWeightsOffset = clusterIndicesOffset + clusterIndicesSize;
|
||||
const int totalAttributeSize = clusterWeightsOffset + clusterWeightsSize;
|
||||
const int totalVertsSize = clusterWeightsOffset + clusterWeightsSize;
|
||||
|
||||
// Copy all attribute data in a single attribute buffer
|
||||
auto attribBuffer = std::make_shared<gpu::Buffer>();
|
||||
attribBuffer->resize(totalAttributeSize);
|
||||
// Copy all vertex data in a single buffer
|
||||
auto vertBuffer = std::make_shared<gpu::Buffer>();
|
||||
vertBuffer->resize(totalVertsSize);
|
||||
|
||||
// First positions
|
||||
vertBuffer->setSubData(positionsOffset, positionsSize, (const gpu::Byte*) extractedMesh.vertices.data());
|
||||
|
||||
// Interleave normals and tangents
|
||||
if (normalsSize > 0) {
|
||||
|
@ -651,8 +664,8 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
|
|||
|
||||
normalsAndTangents.reserve(fbxMesh.normals.size() + fbxMesh.tangents.size());
|
||||
for (auto normalIt = fbxMesh.normals.constBegin(), tangentIt = fbxMesh.tangents.constBegin();
|
||||
normalIt != fbxMesh.normals.constEnd();
|
||||
++normalIt, ++tangentIt) {
|
||||
normalIt != fbxMesh.normals.constEnd();
|
||||
++normalIt, ++tangentIt) {
|
||||
#if FBX_PACK_NORMALS
|
||||
const auto normal = normalizeDirForPacking(*normalIt);
|
||||
const auto tangent = normalizeDirForPacking(*tangentIt);
|
||||
|
@ -665,9 +678,10 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
|
|||
normalsAndTangents.push_back(packedNormal);
|
||||
normalsAndTangents.push_back(packedTangent);
|
||||
}
|
||||
attribBuffer->setSubData(normalsOffset, normalsAndTangentsSize, (const gpu::Byte*) normalsAndTangents.data());
|
||||
vertBuffer->setSubData(normalsAndTangentsOffset, normalsAndTangentsSize, (const gpu::Byte*) normalsAndTangents.data());
|
||||
}
|
||||
|
||||
// Pack colors
|
||||
if (colorsSize > 0) {
|
||||
#if FBX_PACK_COLORS
|
||||
std::vector<ColorType> colors;
|
||||
|
@ -676,12 +690,13 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
|
|||
for (const auto& color : fbxMesh.colors) {
|
||||
colors.push_back(glm::packUnorm4x8(glm::vec4(color, 1.0f)));
|
||||
}
|
||||
attribBuffer->setSubData(colorsOffset, colorsSize, (const gpu::Byte*) colors.data());
|
||||
vertBuffer->setSubData(colorsOffset, colorsSize, (const gpu::Byte*) colors.data());
|
||||
#else
|
||||
attribBuffer->setSubData(colorsOffset, colorsSize, (const gpu::Byte*) fbxMesh.colors.constData());
|
||||
vertBuffer->setSubData(colorsOffset, colorsSize, (const gpu::Byte*) fbxMesh.colors.constData());
|
||||
#endif
|
||||
}
|
||||
|
||||
// Pack Texcoords 0 and 1 (if exists)
|
||||
if (texCoordsSize > 0) {
|
||||
QVector<vec2h> texCoordData;
|
||||
texCoordData.reserve(fbxMesh.texCoords.size());
|
||||
|
@ -692,9 +707,8 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
|
|||
texCoordVec2h.y = glm::detail::toFloat16(texCoordVec2f.y);
|
||||
texCoordData.push_back(texCoordVec2h);
|
||||
}
|
||||
attribBuffer->setSubData(texCoordsOffset, texCoordsSize, (const gpu::Byte*) texCoordData.constData());
|
||||
vertBuffer->setSubData(texCoordsOffset, texCoordsSize, (const gpu::Byte*) texCoordData.constData());
|
||||
}
|
||||
|
||||
if (texCoords1Size > 0) {
|
||||
QVector<vec2h> texCoordData;
|
||||
texCoordData.reserve(fbxMesh.texCoords1.size());
|
||||
|
@ -705,69 +719,170 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
|
|||
texCoordVec2h.y = glm::detail::toFloat16(texCoordVec2f.y);
|
||||
texCoordData.push_back(texCoordVec2h);
|
||||
}
|
||||
attribBuffer->setSubData(texCoords1Offset, texCoords1Size, (const gpu::Byte*) texCoordData.constData());
|
||||
vertBuffer->setSubData(texCoords1Offset, texCoords1Size, (const gpu::Byte*) texCoordData.constData());
|
||||
}
|
||||
|
||||
if (fbxMesh.clusters.size() < UINT8_MAX) {
|
||||
// yay! we can fit the clusterIndices within 8-bits
|
||||
int32_t numIndices = fbxMesh.clusterIndices.size();
|
||||
QVector<uint8_t> clusterIndices;
|
||||
clusterIndices.resize(numIndices);
|
||||
for (int32_t i = 0; i < numIndices; ++i) {
|
||||
assert(fbxMesh.clusterIndices[i] <= UINT8_MAX);
|
||||
clusterIndices[i] = (uint8_t)(fbxMesh.clusterIndices[i]);
|
||||
// Clusters data
|
||||
if (clusterIndicesSize > 0) {
|
||||
if (fbxMesh.clusters.size() < UINT8_MAX) {
|
||||
// yay! we can fit the clusterIndices within 8-bits
|
||||
int32_t numIndices = fbxMesh.clusterIndices.size();
|
||||
QVector<uint8_t> clusterIndices;
|
||||
clusterIndices.resize(numIndices);
|
||||
for (int32_t i = 0; i < numIndices; ++i) {
|
||||
assert(fbxMesh.clusterIndices[i] <= UINT8_MAX);
|
||||
clusterIndices[i] = (uint8_t)(fbxMesh.clusterIndices[i]);
|
||||
}
|
||||
vertBuffer->setSubData(clusterIndicesOffset, clusterIndicesSize, (const gpu::Byte*) clusterIndices.constData());
|
||||
} else {
|
||||
vertBuffer->setSubData(clusterIndicesOffset, clusterIndicesSize, (const gpu::Byte*) fbxMesh.clusterIndices.constData());
|
||||
}
|
||||
attribBuffer->setSubData(clusterIndicesOffset, clusterIndicesSize, (const gpu::Byte*) clusterIndices.constData());
|
||||
} else {
|
||||
attribBuffer->setSubData(clusterIndicesOffset, clusterIndicesSize, (const gpu::Byte*) fbxMesh.clusterIndices.constData());
|
||||
}
|
||||
attribBuffer->setSubData(clusterWeightsOffset, clusterWeightsSize, (const gpu::Byte*) fbxMesh.clusterWeights.constData());
|
||||
if (clusterWeightsSize > 0) {
|
||||
vertBuffer->setSubData(clusterWeightsOffset, clusterWeightsSize, (const gpu::Byte*) fbxMesh.clusterWeights.constData());
|
||||
}
|
||||
|
||||
if (normalsSize) {
|
||||
mesh->addAttribute(gpu::Stream::NORMAL,
|
||||
graphics::BufferView(attribBuffer, normalsOffset, normalsAndTangentsSize,
|
||||
normalsAndTangentsStride, FBX_NORMAL_ELEMENT));
|
||||
mesh->addAttribute(gpu::Stream::TANGENT,
|
||||
graphics::BufferView(attribBuffer, tangentsOffset, normalsAndTangentsSize,
|
||||
normalsAndTangentsStride, FBX_NORMAL_ELEMENT));
|
||||
|
||||
// Now we decide on how to interleave the attributes and provide the vertices among bufers:
|
||||
// Aka the Vertex format and the vertexBufferStream
|
||||
auto vertexFormat = std::make_shared<gpu::Stream::Format>();
|
||||
auto vertexBufferStream = std::make_shared<gpu::BufferStream>();
|
||||
|
||||
// Decision time:
|
||||
// if blendshapes then keep position and normals/tangents as separated channel buffers from interleaved attributes
|
||||
// else everything is interleaved in one buffer
|
||||
|
||||
// Default case is no blend shapes
|
||||
gpu::BufferPointer attribBuffer;
|
||||
int totalAttribBufferSize = totalVertsSize;
|
||||
gpu::uint8 posChannel = 0;
|
||||
gpu::uint8 tangentChannel = posChannel;
|
||||
gpu::uint8 attribChannel = posChannel;
|
||||
bool interleavePositions = true;
|
||||
bool interleaveNormalsTangents = true;
|
||||
|
||||
// TODO: We are using the same vertex format layout for all meshes because this is more efficient
|
||||
// This work is going into rc73 release which is meant to be used for the SPot500 event and we are picking the format
|
||||
// that works best for blendshaped and skinned meshes aka the avatars.
|
||||
// We will improve this technique in a hot fix to 73.
|
||||
hasBlendShapes = true;
|
||||
|
||||
// If has blend shapes allocate and assign buffers for pos and tangents now
|
||||
if (hasBlendShapes) {
|
||||
auto posBuffer = std::make_shared<gpu::Buffer>();
|
||||
posBuffer->setData(positionsSize, (const gpu::Byte*) vertBuffer->getData() + positionsOffset);
|
||||
vertexBufferStream->addBuffer(posBuffer, 0, positionElement.getSize());
|
||||
|
||||
auto normalsAndTangentsBuffer = std::make_shared<gpu::Buffer>();
|
||||
normalsAndTangentsBuffer->setData(normalsAndTangentsSize, (const gpu::Byte*) vertBuffer->getData() + normalsAndTangentsOffset);
|
||||
vertexBufferStream->addBuffer(normalsAndTangentsBuffer, 0, normalsAndTangentsStride);
|
||||
|
||||
// update channels and attribBuffer size accordingly
|
||||
interleavePositions = false;
|
||||
interleaveNormalsTangents = false;
|
||||
|
||||
tangentChannel = 1;
|
||||
attribChannel = 2;
|
||||
|
||||
totalAttribBufferSize = totalVertsSize - positionsSize - normalsAndTangentsSize;
|
||||
}
|
||||
|
||||
// Define the vertex format, compute the offset for each attributes as we append them to the vertex format
|
||||
gpu::Offset bufOffset = 0;
|
||||
if (positionsSize) {
|
||||
vertexFormat->setAttribute(gpu::Stream::POSITION, posChannel, positionElement, bufOffset);
|
||||
bufOffset += positionElement.getSize();
|
||||
if (!interleavePositions) {
|
||||
bufOffset = 0;
|
||||
}
|
||||
}
|
||||
if (normalsSize) {
|
||||
vertexFormat->setAttribute(gpu::Stream::NORMAL, tangentChannel, normalElement, bufOffset);
|
||||
bufOffset += normalElement.getSize();
|
||||
vertexFormat->setAttribute(gpu::Stream::TANGENT, tangentChannel, normalElement, bufOffset);
|
||||
bufOffset += normalElement.getSize();
|
||||
if (!interleaveNormalsTangents) {
|
||||
bufOffset = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Pack normal and Tangent with the rest of atributes if no blend shapes
|
||||
if (colorsSize) {
|
||||
mesh->addAttribute(gpu::Stream::COLOR,
|
||||
graphics::BufferView(attribBuffer, colorsOffset, colorsSize, FBX_COLOR_ELEMENT));
|
||||
vertexFormat->setAttribute(gpu::Stream::COLOR, attribChannel, colorElement, bufOffset);
|
||||
bufOffset += colorElement.getSize();
|
||||
}
|
||||
if (texCoordsSize) {
|
||||
mesh->addAttribute(gpu::Stream::TEXCOORD,
|
||||
graphics::BufferView( attribBuffer, texCoordsOffset, texCoordsSize,
|
||||
gpu::Element(gpu::VEC2, gpu::HALF, gpu::UV)));
|
||||
vertexFormat->setAttribute(gpu::Stream::TEXCOORD, attribChannel, texCoordsElement, bufOffset);
|
||||
bufOffset += texCoordsElement.getSize();
|
||||
}
|
||||
if (texCoords1Size) {
|
||||
mesh->addAttribute( gpu::Stream::TEXCOORD1,
|
||||
graphics::BufferView(attribBuffer, texCoords1Offset, texCoords1Size,
|
||||
gpu::Element(gpu::VEC2, gpu::HALF, gpu::UV)));
|
||||
vertexFormat->setAttribute(gpu::Stream::TEXCOORD1, attribChannel, texCoordsElement, bufOffset);
|
||||
bufOffset += texCoordsElement.getSize();
|
||||
} else if (texCoordsSize) {
|
||||
mesh->addAttribute(gpu::Stream::TEXCOORD1,
|
||||
graphics::BufferView(attribBuffer, texCoordsOffset, texCoordsSize,
|
||||
gpu::Element(gpu::VEC2, gpu::HALF, gpu::UV)));
|
||||
vertexFormat->setAttribute(gpu::Stream::TEXCOORD1, attribChannel, texCoordsElement, bufOffset - texCoordsElement.getSize());
|
||||
}
|
||||
|
||||
if (clusterIndicesSize) {
|
||||
if (fbxMesh.clusters.size() < UINT8_MAX) {
|
||||
mesh->addAttribute(gpu::Stream::SKIN_CLUSTER_INDEX,
|
||||
graphics::BufferView(attribBuffer, clusterIndicesOffset, clusterIndicesSize,
|
||||
gpu::Element(gpu::VEC4, gpu::UINT8, gpu::XYZW)));
|
||||
} else {
|
||||
mesh->addAttribute(gpu::Stream::SKIN_CLUSTER_INDEX,
|
||||
graphics::BufferView(attribBuffer, clusterIndicesOffset, clusterIndicesSize,
|
||||
gpu::Element(gpu::VEC4, gpu::UINT16, gpu::XYZW)));
|
||||
}
|
||||
vertexFormat->setAttribute(gpu::Stream::SKIN_CLUSTER_INDEX, attribChannel, clusterIndiceElement, bufOffset);
|
||||
bufOffset += clusterIndiceElement.getSize();
|
||||
}
|
||||
if (clusterWeightsSize) {
|
||||
mesh->addAttribute(gpu::Stream::SKIN_CLUSTER_WEIGHT,
|
||||
graphics::BufferView(attribBuffer, clusterWeightsOffset, clusterWeightsSize,
|
||||
gpu::Element(gpu::VEC4, gpu::NUINT16, gpu::XYZW)));
|
||||
vertexFormat->setAttribute(gpu::Stream::SKIN_CLUSTER_WEIGHT, attribChannel, clusterWeightElement, bufOffset);
|
||||
bufOffset += clusterWeightElement.getSize();
|
||||
}
|
||||
|
||||
// Finally, allocate and fill the attribBuffer interleaving the attributes as needed:
|
||||
{
|
||||
auto vPositionOffset = 0;
|
||||
auto vPositionSize = (interleavePositions ? positionsSize / numVerts : 0);
|
||||
|
||||
auto vNormalsAndTangentsOffset = vPositionOffset + vPositionSize;
|
||||
auto vNormalsAndTangentsSize = (interleaveNormalsTangents ? normalsAndTangentsSize / numVerts : 0);
|
||||
|
||||
auto vColorOffset = vNormalsAndTangentsOffset + vNormalsAndTangentsSize;
|
||||
auto vColorSize = colorsSize / numVerts;
|
||||
|
||||
auto vTexcoord0Offset = vColorOffset + vColorSize;
|
||||
auto vTexcoord0Size = texCoordsSize / numVerts;
|
||||
|
||||
auto vTexcoord1Offset = vTexcoord0Offset + vTexcoord0Size;
|
||||
auto vTexcoord1Size = texCoords1Size / numVerts;
|
||||
|
||||
auto vClusterIndiceOffset = vTexcoord1Offset + vTexcoord1Size;
|
||||
auto vClusterIndiceSize = clusterIndicesSize / numVerts;
|
||||
|
||||
auto vClusterWeightOffset = vClusterIndiceOffset + vClusterIndiceSize;
|
||||
auto vClusterWeightSize = clusterWeightsSize / numVerts;
|
||||
|
||||
auto vStride = vClusterWeightOffset + vClusterWeightSize;
|
||||
|
||||
std::vector<gpu::Byte> dest;
|
||||
dest.resize(totalAttribBufferSize);
|
||||
auto vDest = dest.data();
|
||||
|
||||
auto source = vertBuffer->getData();
|
||||
|
||||
for (int i = 0; i < numVerts; i++) {
|
||||
|
||||
if (vPositionSize) memcpy(vDest + vPositionOffset, source + positionsOffset + i * vPositionSize, vPositionSize);
|
||||
if (vNormalsAndTangentsSize) memcpy(vDest + vNormalsAndTangentsOffset, source + normalsAndTangentsOffset + i * vNormalsAndTangentsSize, vNormalsAndTangentsSize);
|
||||
if (vColorSize) memcpy(vDest + vColorOffset, source + colorsOffset + i * vColorSize, vColorSize);
|
||||
if (vTexcoord0Size) memcpy(vDest + vTexcoord0Offset, source + texCoordsOffset + i * vTexcoord0Size, vTexcoord0Size);
|
||||
if (vTexcoord1Size) memcpy(vDest + vTexcoord1Offset, source + texCoords1Offset + i * vTexcoord1Size, vTexcoord1Size);
|
||||
if (vClusterIndiceSize) memcpy(vDest + vClusterIndiceOffset, source + clusterIndicesOffset + i * vClusterIndiceSize, vClusterIndiceSize);
|
||||
if (vClusterWeightSize) memcpy(vDest + vClusterWeightOffset, source + clusterWeightsOffset + i * vClusterWeightSize, vClusterWeightSize);
|
||||
|
||||
vDest += vStride;
|
||||
}
|
||||
|
||||
auto attribBuffer = std::make_shared<gpu::Buffer>();
|
||||
attribBuffer->setData(totalAttribBufferSize, dest.data());
|
||||
vertexBufferStream->addBuffer(attribBuffer, 0, vStride);
|
||||
}
|
||||
|
||||
// Mesh vertex format and vertex stream is ready
|
||||
mesh->setVertexFormatAndStream(vertexFormat, vertexBufferStream);
|
||||
|
||||
// Index and Part Buffers
|
||||
unsigned int totalIndices = 0;
|
||||
foreach(const FBXMeshPart& part, extractedMesh.parts) {
|
||||
totalIndices += (part.quadTrianglesIndices.size() + part.triangleIndices.size());
|
||||
|
|
|
@ -239,6 +239,7 @@ public:
|
|||
virtual GLuint getFramebufferID(const FramebufferPointer& framebuffer) = 0;
|
||||
virtual GLuint getTextureID(const TexturePointer& texture) final;
|
||||
virtual GLuint getBufferID(const Buffer& buffer) = 0;
|
||||
virtual GLuint getBufferIDUnsynced(const Buffer& buffer) = 0;
|
||||
virtual GLuint getQueryID(const QueryPointer& query) = 0;
|
||||
|
||||
virtual GLFramebuffer* syncGPUObject(const Framebuffer& framebuffer) = 0;
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
#include "GLBackend.h"
|
||||
#include "GLShared.h"
|
||||
#include "GLInputFormat.h"
|
||||
#include "GLBuffer.h"
|
||||
|
||||
using namespace gpu;
|
||||
using namespace gpu::gl;
|
||||
|
@ -43,13 +44,7 @@ void GLBackend::do_setInputBuffer(const Batch& batch, size_t paramOffset) {
|
|||
bool isModified = false;
|
||||
if (_input._buffers[channel] != buffer) {
|
||||
_input._buffers[channel] = buffer;
|
||||
|
||||
GLuint vbo = 0;
|
||||
if (buffer) {
|
||||
vbo = getBufferID((*buffer));
|
||||
}
|
||||
_input._bufferVBOs[channel] = vbo;
|
||||
|
||||
_input._bufferVBOs[channel] = getBufferIDUnsynced((*buffer));
|
||||
isModified = true;
|
||||
}
|
||||
|
||||
|
@ -128,7 +123,7 @@ void GLBackend::do_setIndexBuffer(const Batch& batch, size_t paramOffset) {
|
|||
if (indexBuffer != _input._indexBuffer) {
|
||||
_input._indexBuffer = indexBuffer;
|
||||
if (indexBuffer) {
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, getBufferID(*indexBuffer));
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, getBufferIDUnsynced(*indexBuffer));
|
||||
} else {
|
||||
// FIXME do we really need this? Is there ever a draw call where we care that the element buffer is null?
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
|
||||
|
@ -145,7 +140,7 @@ void GLBackend::do_setIndirectBuffer(const Batch& batch, size_t paramOffset) {
|
|||
if (buffer != _input._indirectBuffer) {
|
||||
_input._indirectBuffer = buffer;
|
||||
if (buffer) {
|
||||
glBindBuffer(GL_DRAW_INDIRECT_BUFFER, getBufferID(*buffer));
|
||||
glBindBuffer(GL_DRAW_INDIRECT_BUFFER, getBufferIDUnsynced(*buffer));
|
||||
} else {
|
||||
// FIXME do we really need this? Is there ever a draw call where we care that the element buffer is null?
|
||||
glBindBuffer(GL_DRAW_INDIRECT_BUFFER, 0);
|
||||
|
@ -261,9 +256,17 @@ void GLBackend::updateInput() {
|
|||
auto offset = _input._bufferOffsets.data();
|
||||
auto stride = _input._bufferStrides.data();
|
||||
|
||||
// Profile the count of buffers to update and use it to short cut the for loop
|
||||
int numInvalids = (int) _input._invalidBuffers.count();
|
||||
_stats._ISNumInputBufferChanges += numInvalids;
|
||||
|
||||
for (GLuint buffer = 0; buffer < _input._buffers.size(); buffer++, vbo++, offset++, stride++) {
|
||||
if (_input._invalidBuffers.test(buffer)) {
|
||||
glBindVertexBuffer(buffer, (*vbo), (*offset), (GLsizei)(*stride));
|
||||
numInvalids--;
|
||||
if (numInvalids <= 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -85,6 +85,8 @@ void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
|
|||
auto& cameraCorrectionBuffer = _transform._viewCorrectionEnabled ?
|
||||
_pipeline._cameraCorrectionBuffer._buffer :
|
||||
_pipeline._cameraCorrectionBufferIdentity._buffer;
|
||||
// Because we don't sync Buffers in the bindUniformBuffer, let s force this buffer synced
|
||||
getBufferID(*cameraCorrectionBuffer);
|
||||
bindUniformBuffer(gpu::slot::buffer::CameraCorrection, cameraCorrectionBuffer, 0, sizeof(CameraCorrection));
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
@ -170,11 +172,10 @@ void GLBackend::bindUniformBuffer(uint32_t slot, const BufferPointer& buffer, GL
|
|||
return;
|
||||
}
|
||||
|
||||
// Sync BufferObject
|
||||
auto* object = syncGPUObject(*bufferState.buffer);
|
||||
if (object) {
|
||||
glBindBufferRange(GL_UNIFORM_BUFFER, slot, object->_buffer, bufferState.offset, bufferState.size);
|
||||
|
||||
// Grab the true gl Buffer object
|
||||
auto glBO = getBufferIDUnsynced(*buffer);
|
||||
if (glBO) {
|
||||
glBindBufferRange(GL_UNIFORM_BUFFER, slot, glBO, bufferState.offset, bufferState.size);
|
||||
_uniform._buffers[slot] = bufferState;
|
||||
(void)CHECK_GL_ERROR();
|
||||
} else {
|
||||
|
|
|
@ -49,6 +49,16 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
template <typename GLBufferType>
|
||||
static GLuint getIdUnsynced(GLBackend& backend, const Buffer& buffer) {
|
||||
GLBufferType* object = Backend::getGPUObject<GLBufferType>(buffer);
|
||||
if (object) {
|
||||
return object->_buffer;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
const GLuint& _buffer { _id };
|
||||
const GLuint _size;
|
||||
const Stamp _stamp;
|
||||
|
|
|
@ -134,6 +134,7 @@ protected:
|
|||
GLFramebuffer* syncGPUObject(const Framebuffer& framebuffer) override;
|
||||
|
||||
GLuint getBufferID(const Buffer& buffer) override;
|
||||
GLuint getBufferIDUnsynced(const Buffer& buffer) override;
|
||||
GLuint getResourceBufferID(const Buffer& buffer);
|
||||
GLBuffer* syncGPUObject(const Buffer& buffer) override;
|
||||
|
||||
|
|
|
@ -83,6 +83,10 @@ GLuint GL41Backend::getBufferID(const Buffer& buffer) {
|
|||
return GL41Buffer::getId<GL41Buffer>(*this, buffer);
|
||||
}
|
||||
|
||||
GLuint GL41Backend::getBufferIDUnsynced(const Buffer& buffer) {
|
||||
return GL41Buffer::getIdUnsynced<GL41Buffer>(*this, buffer);
|
||||
}
|
||||
|
||||
GLuint GL41Backend::getResourceBufferID(const Buffer& buffer) {
|
||||
auto* object = GL41Buffer::sync<GL41Buffer>(*this, buffer);
|
||||
if (object) {
|
||||
|
|
|
@ -78,8 +78,9 @@ void GL41Backend::updateInput() {
|
|||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
_stats._ISNumInputBufferChanges++;
|
||||
|
||||
int numInvalids = (int)_input._invalidBuffers.count();
|
||||
_stats._ISNumInputBufferChanges += numInvalids;
|
||||
|
||||
GLuint boundVBO = 0;
|
||||
for (auto& channelIt : inputChannels) {
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
|
||||
|
|
|
@ -235,6 +235,7 @@ protected:
|
|||
GLFramebuffer* syncGPUObject(const Framebuffer& framebuffer) override;
|
||||
|
||||
GLuint getBufferID(const Buffer& buffer) override;
|
||||
GLuint getBufferIDUnsynced(const Buffer& buffer) override;
|
||||
GLBuffer* syncGPUObject(const Buffer& buffer) override;
|
||||
|
||||
GLTexture* syncGPUObject(const TexturePointer& texture) override;
|
||||
|
|
|
@ -51,6 +51,10 @@ GLuint GL45Backend::getBufferID(const Buffer& buffer) {
|
|||
return GL45Buffer::getId<GL45Buffer>(*this, buffer);
|
||||
}
|
||||
|
||||
GLuint GL45Backend::getBufferIDUnsynced(const Buffer& buffer) {
|
||||
return GL45Buffer::getIdUnsynced<GL45Buffer>(*this, buffer);
|
||||
}
|
||||
|
||||
GLBuffer* GL45Backend::syncGPUObject(const Buffer& buffer) {
|
||||
return GL45Buffer::sync<GL45Buffer>(*this, buffer);
|
||||
}
|
||||
|
|
|
@ -132,9 +132,18 @@ void GL45Backend::updateInput() {
|
|||
auto offset = _input._bufferOffsets.data();
|
||||
auto stride = _input._bufferStrides.data();
|
||||
|
||||
for (GLuint buffer = 0; buffer < _input._buffers.size(); buffer++, vbo++, offset++, stride++) {
|
||||
// Profile the count of buffers to update and use it to short cut the for loop
|
||||
int numInvalids = (int) _input._invalidBuffers.count();
|
||||
_stats._ISNumInputBufferChanges += numInvalids;
|
||||
|
||||
auto numBuffers = _input._buffers.size();
|
||||
for (GLuint buffer = 0; buffer < numBuffers; buffer++, vbo++, offset++, stride++) {
|
||||
if (_input._invalidBuffers.test(buffer)) {
|
||||
glBindVertexBuffer(buffer, (*vbo), (*offset), (GLsizei)(*stride));
|
||||
numInvalids--;
|
||||
if (numInvalids <= 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -130,6 +130,7 @@ protected:
|
|||
GLFramebuffer* syncGPUObject(const Framebuffer& framebuffer) override;
|
||||
|
||||
GLuint getBufferID(const Buffer& buffer) override;
|
||||
GLuint getBufferIDUnsynced(const Buffer& buffer) override;
|
||||
GLuint getResourceBufferID(const Buffer& buffer);
|
||||
GLBuffer* syncGPUObject(const Buffer& buffer) override;
|
||||
|
||||
|
|
|
@ -64,6 +64,10 @@ GLuint GLESBackend::getBufferID(const Buffer& buffer) {
|
|||
return GLESBuffer::getId<GLESBuffer>(*this, buffer);
|
||||
}
|
||||
|
||||
GLuint GLESBackend::getBufferIDUnsynced(const Buffer& buffer) {
|
||||
return GLESBuffer::getIdUnsynced<GLESBuffer>(*this, buffer);
|
||||
}
|
||||
|
||||
GLBuffer* GLESBackend::syncGPUObject(const Buffer& buffer) {
|
||||
return GLESBuffer::sync<GLESBuffer>(*this, buffer);
|
||||
}
|
||||
|
|
|
@ -417,10 +417,7 @@ public:
|
|||
}
|
||||
|
||||
const Data& get(uint32 offset) const {
|
||||
if (offset >= _items.size()) {
|
||||
static const Data EMPTY;
|
||||
return EMPTY;
|
||||
}
|
||||
assert((offset < _items.size()));
|
||||
return (_items.data() + offset)->_data;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,29 +31,29 @@ vec4 color_sRGBAToLinear(vec4 srgba) {
|
|||
}
|
||||
|
||||
vec3 color_LinearToYCoCg(vec3 rgb) {
|
||||
// Y = R/4 + G/2 + B/4
|
||||
// Co = R/2 - B/2
|
||||
// Cg = -R/4 + G/2 - B/4
|
||||
return vec3(
|
||||
rgb.x/4.0 + rgb.y/2.0 + rgb.z/4.0,
|
||||
rgb.x/2.0 - rgb.z/2.0,
|
||||
-rgb.x/4.0 + rgb.y/2.0 - rgb.z/4.0
|
||||
);
|
||||
// Y = R/4 + G/2 + B/4
|
||||
// Co = R/2 - B/2
|
||||
// Cg = -R/4 + G/2 - B/4
|
||||
return vec3(
|
||||
rgb.x/4.0 + rgb.y/2.0 + rgb.z/4.0,
|
||||
rgb.x/2.0 - rgb.z/2.0,
|
||||
-rgb.x/4.0 + rgb.y/2.0 - rgb.z/4.0
|
||||
);
|
||||
}
|
||||
|
||||
vec3 color_YCoCgToUnclampedLinear(vec3 ycocg) {
|
||||
// R = Y + Co - Cg
|
||||
// G = Y + Cg
|
||||
// B = Y - Co - Cg
|
||||
return vec3(
|
||||
ycocg.x + ycocg.y - ycocg.z,
|
||||
ycocg.x + ycocg.z,
|
||||
ycocg.x - ycocg.y - ycocg.z
|
||||
);
|
||||
// R = Y + Co - Cg
|
||||
// G = Y + Cg
|
||||
// B = Y - Co - Cg
|
||||
return vec3(
|
||||
ycocg.x + ycocg.y - ycocg.z,
|
||||
ycocg.x + ycocg.z,
|
||||
ycocg.x - ycocg.y - ycocg.z
|
||||
);
|
||||
}
|
||||
|
||||
vec3 color_YCoCgToLinear(vec3 ycocg) {
|
||||
return clamp(color_YCoCgToUnclampedLinear(ycocg), vec3(0.0), vec3(1.0));
|
||||
return clamp(color_YCoCgToUnclampedLinear(ycocg), vec3(0.0), vec3(1.0));
|
||||
}
|
||||
|
||||
<@func declareColorWheel()@>
|
||||
|
|
|
@ -13,12 +13,16 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/ShaderConstants.h@>
|
||||
struct DrawColorParams {
|
||||
vec4 color;
|
||||
};
|
||||
|
||||
layout(location=GPU_UNIFORM_COLOR) uniform vec4 color;
|
||||
layout(binding=0) uniform drawColorParamsBuffer {
|
||||
DrawColorParams params;
|
||||
};
|
||||
|
||||
layout(location=0) out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
outFragColor = color;
|
||||
outFragColor = params.color;
|
||||
}
|
||||
|
|
|
@ -13,14 +13,19 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/ShaderConstants.h@>
|
||||
|
||||
layout(binding=0) uniform sampler2D colorMap;
|
||||
layout(location=GPU_UNIFORM_COLOR) uniform vec4 color;
|
||||
|
||||
struct DrawColorParams {
|
||||
vec4 color;
|
||||
};
|
||||
|
||||
layout(binding=0) uniform drawColorParams {
|
||||
DrawColorParams params;
|
||||
};
|
||||
|
||||
layout(location=0) in vec2 varTexCoord0;
|
||||
layout(location=0) out vec4 outFragColor;
|
||||
|
||||
void main(void) {
|
||||
outFragColor = texture(colorMap, varTexCoord0) * color;
|
||||
outFragColor = texture(colorMap, varTexCoord0) * params.color;
|
||||
}
|
||||
|
|
|
@ -21,7 +21,13 @@
|
|||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
layout(location=GPU_UNIFORM_TEXCOORD_RECT) uniform vec4 texcoordRect;
|
||||
struct TexCoordRectParams {
|
||||
vec4 texcoordRect;
|
||||
};
|
||||
|
||||
layout(binding=0) uniform texcoordRectBuffer {
|
||||
TexCoordRectParams params;
|
||||
};
|
||||
|
||||
layout(location=0) out vec2 varTexCoord0;
|
||||
|
||||
|
@ -39,5 +45,5 @@ void main(void) {
|
|||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToClipPos(cam, obj, pos, gl_Position)$>
|
||||
|
||||
varTexCoord0 = ((pos.xy + 1.0) * 0.5) * texcoordRect.zw + texcoordRect.xy;
|
||||
varTexCoord0 = ((pos.xy + 1.0) * 0.5) * params.texcoordRect.zw + params.texcoordRect.xy;
|
||||
}
|
||||
|
|
|
@ -284,14 +284,14 @@ float hifi_noise(in vec2 x) {
|
|||
// https://www.shadertoy.com/view/MdX3Rr
|
||||
// https://en.wikipedia.org/wiki/Fractional_Brownian_motion
|
||||
float hifi_fbm(in vec2 p) {
|
||||
const mat2 m2 = mat2(0.8, -0.6, 0.6, 0.8);
|
||||
float f = 0.0;
|
||||
f += 0.5000 * hifi_noise(p); p = m2 * p * 2.02;
|
||||
f += 0.2500 * hifi_noise(p); p = m2 * p * 2.03;
|
||||
f += 0.1250 * hifi_noise(p); p = m2 * p * 2.01;
|
||||
f += 0.0625 * hifi_noise(p);
|
||||
const mat2 m2 = mat2(0.8, -0.6, 0.6, 0.8);
|
||||
float f = 0.0;
|
||||
f += 0.5000 * hifi_noise(p); p = m2 * p * 2.02;
|
||||
f += 0.2500 * hifi_noise(p); p = m2 * p * 2.03;
|
||||
f += 0.1250 * hifi_noise(p); p = m2 * p * 2.01;
|
||||
f += 0.0625 * hifi_noise(p);
|
||||
|
||||
return f / 0.9375;
|
||||
return f / 0.9375;
|
||||
}
|
||||
|
||||
<@endif@>
|
|
@ -17,7 +17,7 @@ float paintStripe(float value, float offset, float scale, float edge) {
|
|||
float width = fwidth(value);
|
||||
float normalizedWidth = width * scale;
|
||||
|
||||
float x0 = (value + offset) * scale - normalizedWidth / 2;
|
||||
float x0 = (value + offset) * scale - normalizedWidth / 2.0;
|
||||
float x1 = x0 + normalizedWidth;
|
||||
|
||||
float balance = 1.0 - edge;
|
||||
|
|
|
@ -40,8 +40,6 @@
|
|||
// OSX seems to have an issue using 14 as an attribute location for passing from the vertex to the fragment shader
|
||||
#define GPU_ATTR_V2F_STEREO_SIDE 8
|
||||
|
||||
#define GPU_UNIFORM_COLOR 101
|
||||
#define GPU_UNIFORM_TEXCOORD_RECT 102
|
||||
#define GPU_UNIFORM_EXTRA0 110
|
||||
#define GPU_UNIFORM_EXTRA1 111
|
||||
#define GPU_UNIFORM_EXTRA2 112
|
||||
|
@ -98,8 +96,6 @@ enum Attribute {
|
|||
|
||||
namespace uniform {
|
||||
enum Uniform {
|
||||
Color = GPU_UNIFORM_COLOR,
|
||||
TexCoordRect = GPU_UNIFORM_TEXCOORD_RECT,
|
||||
Extra0 = GPU_UNIFORM_EXTRA0,
|
||||
Extra1 = GPU_UNIFORM_EXTRA1,
|
||||
Extra2 = GPU_UNIFORM_EXTRA2,
|
||||
|
|
|
@ -152,6 +152,8 @@ public:
|
|||
|
||||
BufferStream makeRangedStream(uint32 offset, uint32 count = -1) const;
|
||||
|
||||
BufferStream& operator = (const BufferStream& src) = default;
|
||||
|
||||
protected:
|
||||
Buffers _buffers;
|
||||
Offsets _offsets;
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
#ifdef __cplusplus
|
||||
# define _MAT4 Mat4
|
||||
# define _VEC4 Vec4
|
||||
# define _MUTABLE mutable
|
||||
# define _MUTABLE mutable
|
||||
#else
|
||||
# define _MAT4 mat4
|
||||
# define _VEC4 vec4
|
||||
# define _MUTABLE
|
||||
# define _MUTABLE
|
||||
#endif
|
||||
|
||||
struct _TransformCamera {
|
||||
|
|
|
@ -32,6 +32,15 @@ Mesh::Mesh(const Mesh& mesh) :
|
|||
Mesh::~Mesh() {
|
||||
}
|
||||
|
||||
void Mesh::setVertexFormatAndStream(const gpu::Stream::FormatPointer& vf, const gpu::BufferStreamPointer& vbs) {
|
||||
_vertexFormat = vf;
|
||||
_vertexStream = (*vbs);
|
||||
|
||||
auto attrib = _vertexFormat->getAttribute(gpu::Stream::POSITION);
|
||||
_vertexBuffer = BufferView(vbs->getBuffers()[attrib._channel], vbs->getOffsets()[attrib._channel], vbs->getBuffers()[attrib._channel]->getSize(),
|
||||
(gpu::uint16) vbs->getStrides()[attrib._channel], attrib._element);
|
||||
}
|
||||
|
||||
void Mesh::setVertexBuffer(const BufferView& buffer) {
|
||||
_vertexBuffer = buffer;
|
||||
evalVertexFormat();
|
||||
|
@ -107,11 +116,10 @@ Box Mesh::evalPartBound(int partNum) const {
|
|||
index += part._startIndex;
|
||||
auto endIndex = index;
|
||||
endIndex += part._numIndices;
|
||||
auto vertices = &_vertexBuffer.get<Vec3>(part._baseVertex);
|
||||
for (;index != endIndex; index++) {
|
||||
// skip primitive restart indices
|
||||
if ((*index) != PRIMITIVE_RESTART_INDEX) {
|
||||
box += vertices[(*index)];
|
||||
box += _vertexBuffer.get<Vec3>(part._baseVertex + (*index));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -128,11 +136,10 @@ Box Mesh::evalPartsBound(int partStart, int partEnd) const {
|
|||
Box partBound;
|
||||
auto index = _indexBuffer.cbegin<uint>() + (*part)._startIndex;
|
||||
auto endIndex = index + (*part)._numIndices;
|
||||
auto vertices = &_vertexBuffer.get<Vec3>((*part)._baseVertex);
|
||||
for (;index != endIndex; index++) {
|
||||
// skip primitive restart indices
|
||||
if ((*index) != (uint) PRIMITIVE_RESTART_INDEX) {
|
||||
partBound += vertices[(*index)];
|
||||
partBound += _vertexBuffer.get<Vec3>((*part)._baseVertex + (*index));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -59,6 +59,9 @@ public:
|
|||
void removeAttribute(Slot slot);
|
||||
const BufferView getAttributeBuffer(int attrib) const;
|
||||
|
||||
// Force vertex stream and Vertex format
|
||||
void setVertexFormatAndStream(const gpu::Stream::FormatPointer& vf, const gpu::BufferStreamPointer& vbs);
|
||||
|
||||
// Stream format
|
||||
const gpu::Stream::FormatPointer getVertexFormat() const { return _vertexFormat; }
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#ifndef PROCEDURAL_SHADER_CONSTANTS_H
|
||||
#define PROCEDURAL_SHADER_CONSTANTS_H
|
||||
|
||||
// Polyvox
|
||||
#define PROCEDURAL_UNIFORM_TIME 200
|
||||
#define PROCEDURAL_UNIFORM_DATE 201
|
||||
#define PROCEDURAL_UNIFORM_FRAME_COUNT 202
|
||||
|
|
|
@ -29,7 +29,6 @@
|
|||
|
||||
|
||||
namespace ru {
|
||||
using render_utils::slot::uniform::Uniform;
|
||||
using render_utils::slot::texture::Texture;
|
||||
using render_utils::slot::buffer::Buffer;
|
||||
}
|
||||
|
@ -39,13 +38,7 @@ namespace gr {
|
|||
using graphics::slot::buffer::Buffer;
|
||||
}
|
||||
|
||||
#define ANTIALIASING_USE_TAA 1
|
||||
|
||||
#if !ANTIALIASING_USE_TAA
|
||||
#include "fxaa_vert.h"
|
||||
#include "fxaa_frag.h"
|
||||
#include "fxaa_blend_frag.h"
|
||||
|
||||
|
||||
Antialiasing::Antialiasing() {
|
||||
_geometryId = DependencyManager::get<GeometryCache>()->allocateID();
|
||||
|
@ -58,30 +51,9 @@ Antialiasing::~Antialiasing() {
|
|||
}
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& Antialiasing::getAntialiasingPipeline(RenderArgs* args) {
|
||||
int width = args->_viewport.z;
|
||||
int height = args->_viewport.w;
|
||||
|
||||
if (_antialiasingBuffer && _antialiasingBuffer->getSize() != uvec2(width, height)) {
|
||||
_antialiasingBuffer.reset();
|
||||
}
|
||||
|
||||
if (!_antialiasingBuffer) {
|
||||
// Link the antialiasing FBO to texture
|
||||
_antialiasingBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("antialiasing"));
|
||||
auto format = gpu::Element::COLOR_SRGBA_32;
|
||||
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
|
||||
_antialiasingTexture = gpu::Texture::createRenderBuffer(format, width, height, gpu::Texture::SINGLE_MIP, defaultSampler);
|
||||
_antialiasingBuffer->setRenderBuffer(0, _antialiasingTexture);
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& Antialiasing::getAntialiasingPipeline() {
|
||||
if (!_antialiasingPipeline) {
|
||||
auto vs = fxaa_vert::getShader();
|
||||
auto ps = fxaa_frag::getShader();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
|
||||
_texcoordOffsetLoc = program->getUniforms().findLocation("texcoordOffset");
|
||||
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::render_utils::program::fxaa);
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
state->setDepthTest(false, false, gpu::LESS_EQUAL);
|
||||
|
@ -96,9 +68,7 @@ const gpu::PipelinePointer& Antialiasing::getAntialiasingPipeline(RenderArgs* ar
|
|||
|
||||
const gpu::PipelinePointer& Antialiasing::getBlendPipeline() {
|
||||
if (!_blendPipeline) {
|
||||
auto vs = fxaa_vert::getShader();
|
||||
auto ps = fxaa_blend_frag::getShader();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::render_utils::program::fxaa_blend);
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
state->setDepthTest(false, false, gpu::LESS_EQUAL);
|
||||
PrepareStencil::testNoAA(*state);
|
||||
|
@ -119,13 +89,30 @@ void Antialiasing::run(const render::RenderContextPointer& renderContext, const
|
|||
batch.enableStereo(false);
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
||||
// FIXME: NEED to simplify that code to avoid all the GeometryCahce call, this is purely pixel manipulation
|
||||
float fbWidth = renderContext->args->_viewport.z;
|
||||
float fbHeight = renderContext->args->_viewport.w;
|
||||
// float sMin = args->_viewport.x / fbWidth;
|
||||
// float sWidth = args->_viewport.z / fbWidth;
|
||||
// float tMin = args->_viewport.y / fbHeight;
|
||||
// float tHeight = args->_viewport.w / fbHeight;
|
||||
if (!_paramsBuffer) {
|
||||
_paramsBuffer = std::make_shared<gpu::Buffer>(sizeof(glm::vec4), nullptr);
|
||||
}
|
||||
|
||||
{
|
||||
int width = args->_viewport.z;
|
||||
int height = args->_viewport.w;
|
||||
if (_antialiasingBuffer && _antialiasingBuffer->getSize() != uvec2(width, height)) {
|
||||
_antialiasingBuffer.reset();
|
||||
}
|
||||
|
||||
if (!_antialiasingBuffer) {
|
||||
// Link the antialiasing FBO to texture
|
||||
_antialiasingBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("antialiasing"));
|
||||
auto format = gpu::Element::COLOR_SRGBA_32;
|
||||
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
|
||||
_antialiasingTexture = gpu::Texture::createRenderBuffer(format, width, height, gpu::Texture::SINGLE_MIP, defaultSampler);
|
||||
_antialiasingBuffer->setRenderBuffer(0, _antialiasingTexture);
|
||||
glm::vec2 fbExtent { args->_viewport.z, args->_viewport.w };
|
||||
glm::vec2 inverseFbExtent = 1.0f / fbExtent;
|
||||
_paramsBuffer->setSubData(0, glm::vec4(inverseFbExtent, 0.0, 0.0));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
glm::mat4 projMat;
|
||||
Transform viewMat;
|
||||
|
@ -136,40 +123,18 @@ void Antialiasing::run(const render::RenderContextPointer& renderContext, const
|
|||
batch.setModelTransform(Transform());
|
||||
|
||||
// FXAA step
|
||||
auto pipeline = getAntialiasingPipeline(renderContext->args);
|
||||
auto pipeline = getAntialiasingPipeline();
|
||||
batch.setResourceTexture(0, sourceBuffer->getRenderBuffer(0));
|
||||
batch.setFramebuffer(_antialiasingBuffer);
|
||||
batch.setPipeline(pipeline);
|
||||
|
||||
// initialize the view-space unpacking uniforms using frustum data
|
||||
float left, right, bottom, top, nearVal, farVal;
|
||||
glm::vec4 nearClipPlane, farClipPlane;
|
||||
|
||||
args->getViewFrustum().computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
|
||||
|
||||
// float depthScale = (farVal - nearVal) / farVal;
|
||||
// float nearScale = -1.0f / nearVal;
|
||||
// float depthTexCoordScaleS = (right - left) * nearScale / sWidth;
|
||||
// float depthTexCoordScaleT = (top - bottom) * nearScale / tHeight;
|
||||
// float depthTexCoordOffsetS = left * nearScale - sMin * depthTexCoordScaleS;
|
||||
// float depthTexCoordOffsetT = bottom * nearScale - tMin * depthTexCoordScaleT;
|
||||
|
||||
batch._glUniform2f(_texcoordOffsetLoc, 1.0f / fbWidth, 1.0f / fbHeight);
|
||||
|
||||
glm::vec4 color(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
glm::vec2 bottomLeft(-1.0f, -1.0f);
|
||||
glm::vec2 topRight(1.0f, 1.0f);
|
||||
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
|
||||
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color, _geometryId);
|
||||
|
||||
batch.setUniformBuffer(0, _paramsBuffer);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
// Blend step
|
||||
batch.setResourceTexture(0, _antialiasingTexture);
|
||||
batch.setFramebuffer(sourceBuffer);
|
||||
batch.setPipeline(getBlendPipeline());
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color, _geometryId);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
}
|
||||
#else
|
||||
|
@ -314,7 +279,11 @@ void Antialiasing::run(const render::RenderContextPointer& renderContext, const
|
|||
// Must match the bindg point in the fxaa_blend.slf shader
|
||||
batch.setResourceFramebufferSwapChainTexture(0, _antialiasingBuffers, 1);
|
||||
// Disable sharpen if FXAA
|
||||
batch._glUniform1f(ru::Uniform::TaaSharpenIntensity, _sharpen * _params.get().regionInfo.z);
|
||||
if (!_blendParamsBuffer) {
|
||||
_blendParamsBuffer = std::make_shared<gpu::Buffer>(sizeof(glm::vec4), nullptr);
|
||||
}
|
||||
_blendParamsBuffer->setSubData(0, _sharpen * _params.get().regionInfo.z);
|
||||
batch.setUniformBuffer(0, _blendParamsBuffer);
|
||||
}
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
batch.advance(_antialiasingBuffers);
|
||||
|
|
|
@ -134,6 +134,10 @@ signals:
|
|||
#define SET_BIT(bitfield, bitIndex, value) bitfield = ((bitfield) & ~(1 << (bitIndex))) | ((value) << (bitIndex))
|
||||
#define GET_BIT(bitfield, bitIndex) ((bitfield) & (1 << (bitIndex)))
|
||||
|
||||
#define ANTIALIASING_USE_TAA 1
|
||||
|
||||
#if ANTIALIASING_USE_TAA
|
||||
|
||||
struct TAAParams {
|
||||
float nope{ 0.0f };
|
||||
float blend{ 0.15f };
|
||||
|
@ -186,7 +190,7 @@ private:
|
|||
|
||||
gpu::FramebufferSwapChainPointer _antialiasingBuffers;
|
||||
gpu::TexturePointer _antialiasingTextures[2];
|
||||
|
||||
gpu::BufferPointer _blendParamsBuffer;
|
||||
gpu::PipelinePointer _antialiasingPipeline;
|
||||
gpu::PipelinePointer _blendPipeline;
|
||||
gpu::PipelinePointer _debugBlendPipeline;
|
||||
|
@ -197,7 +201,7 @@ private:
|
|||
};
|
||||
|
||||
|
||||
/*
|
||||
#else
|
||||
class AntiAliasingConfig : public render::Job::Config {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(bool enabled MEMBER enabled)
|
||||
|
@ -219,18 +223,15 @@ public:
|
|||
const gpu::PipelinePointer& getBlendPipeline();
|
||||
|
||||
private:
|
||||
|
||||
// Uniforms for AA
|
||||
gpu::int32 _texcoordOffsetLoc;
|
||||
|
||||
gpu::FramebufferPointer _antialiasingBuffer;
|
||||
|
||||
gpu::TexturePointer _antialiasingTexture;
|
||||
gpu::BufferPointer _paramsBuffer;
|
||||
|
||||
gpu::PipelinePointer _antialiasingPipeline;
|
||||
gpu::PipelinePointer _blendPipeline;
|
||||
int _geometryId { 0 };
|
||||
};
|
||||
*/
|
||||
#endif
|
||||
|
||||
#endif // hifi_AntialiasingEffect_h
|
||||
|
|
|
@ -184,6 +184,7 @@ void BloomDraw::run(const render::RenderContextPointer& renderContext, const Inp
|
|||
}
|
||||
|
||||
DebugBloom::DebugBloom() {
|
||||
_params = std::make_shared<gpu::Buffer>(sizeof(glm::vec4), nullptr);
|
||||
}
|
||||
|
||||
void DebugBloom::configure(const Config& config) {
|
||||
|
@ -227,7 +228,8 @@ void DebugBloom::run(const render::RenderContextPointer& renderContext, const In
|
|||
|
||||
Transform modelTransform;
|
||||
if (_mode == DebugBloomConfig::MODE_ALL_LEVELS) {
|
||||
batch._glUniform4f(gpu::slot::uniform::TexCoordRect, 0.0f, 0.0f, 1.f, 1.f);
|
||||
_params->setSubData(0, vec4(0.0f, 0.0f, 1.f, 1.f));
|
||||
batch.setUniformBuffer(0, _params);
|
||||
|
||||
modelTransform = gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, args->_viewport / 2);
|
||||
modelTransform.postTranslate(glm::vec3(-1.0f, 1.0f, 0.0f));
|
||||
|
@ -255,7 +257,8 @@ void DebugBloom::run(const render::RenderContextPointer& renderContext, const In
|
|||
|
||||
viewport.z /= 2;
|
||||
|
||||
batch._glUniform4f(gpu::slot::uniform::TexCoordRect, 0.5f, 0.0f, 0.5f, 1.f);
|
||||
_params->setSubData(0, vec4(0.5f, 0.0f, 0.5f, 1.f));
|
||||
batch.setUniformBuffer(0, _params);
|
||||
|
||||
modelTransform = gpu::Framebuffer::evalSubregionTexcoordTransform(framebufferSize, viewport);
|
||||
modelTransform.postTranslate(glm::vec3(-1.0f, 0.0f, 0.0f));
|
||||
|
|
|
@ -121,6 +121,7 @@ public:
|
|||
|
||||
private:
|
||||
gpu::PipelinePointer _pipeline;
|
||||
gpu::BufferPointer _params;
|
||||
DebugBloomConfig::Mode _mode;
|
||||
};
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
struct Parameters
|
||||
{
|
||||
BT_VEC2 _deltaUV;
|
||||
float _threshold;
|
||||
float _threshold;
|
||||
int _sampleCount;
|
||||
};
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ void main(void) {
|
|||
for (int x=0 ; x<parameters._sampleCount ; x++) {
|
||||
vec4 color = texture(colorMap, uv);
|
||||
float luminance = (color.r+color.g+color.b) / 3.0;
|
||||
float mask = clamp((luminance-parameters._threshold)*0.25, 0, 1);
|
||||
float mask = clamp((luminance-parameters._threshold)*0.25, 0.0, 1.0);
|
||||
|
||||
color *= mask;
|
||||
maskedColor += color;
|
||||
|
@ -39,6 +39,6 @@ void main(void) {
|
|||
|
||||
startUv.y += parameters._deltaUV.y;
|
||||
}
|
||||
maskedColor /= parameters._sampleCount * parameters._sampleCount;
|
||||
maskedColor /= float(parameters._sampleCount * parameters._sampleCount);
|
||||
outFragColor = vec4(maskedColor.rgb, 1.0);
|
||||
}
|
||||
|
|
|
@ -137,7 +137,7 @@ vec4 unpackDeferredPosition(float depthValue, vec2 texcoord) {
|
|||
// This method to unpack position is fastesst
|
||||
vec4 unpackDeferredPositionFromZdb(vec2 texcoord) {
|
||||
float Zdb = texture(depthMap, texcoord).x;
|
||||
return unpackDeferredPosition(Zdb, texcoord);
|
||||
return unpackDeferredPosition(Zdb, texcoord);
|
||||
}
|
||||
|
||||
vec4 unpackDeferredPositionFromZeye(vec2 texcoord) {
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
namespace ru {
|
||||
using render_utils::slot::texture::Texture;
|
||||
using render_utils::slot::buffer::Buffer;
|
||||
using render_utils::slot::uniform::Uniform;
|
||||
}
|
||||
|
||||
namespace gr {
|
||||
|
|
|
@ -38,8 +38,8 @@ struct DeferredFrameTransform {
|
|||
mat4 _projectionMono;
|
||||
mat4 _viewInverse;
|
||||
mat4 _view;
|
||||
mat4 _projectionUnJittered[2];
|
||||
mat4 _invProjectionUnJittered[2];
|
||||
mat4 _projectionUnJittered[2];
|
||||
mat4 _invProjectionUnJittered[2];
|
||||
};
|
||||
|
||||
layout(binding=RENDER_UTILS_BUFFER_DEFERRED_FRAME_TRANSFORM) uniform deferredFrameTransformBuffer {
|
||||
|
@ -68,10 +68,10 @@ mat4 getProjectionMono() {
|
|||
return frameTransform._projectionMono;
|
||||
}
|
||||
mat4 getUnjitteredProjection(int side) {
|
||||
return frameTransform._projectionUnJittered[side];
|
||||
return frameTransform._projectionUnJittered[side];
|
||||
}
|
||||
mat4 getUnjitteredInvProjection(int side) {
|
||||
return frameTransform._invProjectionUnJittered[side];
|
||||
return frameTransform._invProjectionUnJittered[side];
|
||||
}
|
||||
|
||||
// positive near distance of the projection
|
||||
|
@ -158,7 +158,7 @@ vec3 evalUnjitteredEyePositionFromZdb(int side, float Zdb, vec2 texcoord) {
|
|||
}
|
||||
|
||||
vec3 evalEyePositionFromZeye(int side, float Zeye, vec2 texcoord) {
|
||||
float Zdb = evalZdbFromZeye(Zeye);
|
||||
float Zdb = evalZdbFromZeye(Zeye);
|
||||
return evalEyePositionFromZdb(side, Zdb, texcoord);
|
||||
}
|
||||
|
||||
|
|
|
@ -13,12 +13,12 @@
|
|||
|
||||
struct FadeParameters
|
||||
{
|
||||
VEC4 _noiseInvSizeAndLevel;
|
||||
VEC4 _innerEdgeColor;
|
||||
VEC4 _outerEdgeColor;
|
||||
VEC2 _edgeWidthInvWidth;
|
||||
FLOAT32 _baseLevel;
|
||||
INT32 _isInverted;
|
||||
VEC4 _noiseInvSizeAndLevel;
|
||||
VEC4 _innerEdgeColor;
|
||||
VEC4 _outerEdgeColor;
|
||||
VEC2 _edgeWidthInvWidth;
|
||||
FLOAT32 _baseLevel;
|
||||
INT32 _isInverted;
|
||||
};
|
||||
|
||||
// <@if 1@>
|
||||
|
|
|
@ -2104,9 +2104,7 @@ void GeometryCache::useSimpleDrawPipeline(gpu::Batch& batch, bool noBlend) {
|
|||
auto stateNoBlend = std::make_shared<gpu::State>();
|
||||
PrepareStencil::testMaskDrawShape(*stateNoBlend);
|
||||
|
||||
auto noBlendPS = gpu::Shader::createVertex(shader::gpu::fragment::DrawTextureOpaque);
|
||||
auto programNoBlend = gpu::Shader::createProgram(shader::render_utils::program::standardDrawTextureNoBlend);
|
||||
|
||||
_standardDrawPipelineNoBlend = gpu::Pipeline::create(programNoBlend, stateNoBlend);
|
||||
});
|
||||
|
||||
|
|
|
@ -22,8 +22,8 @@ layout(binding=RENDER_UTILS_BUFFER_HIGHLIGHT_PARAMS) uniform highlightParamsBuff
|
|||
layout(binding=RENDER_UTILS_TEXTURE_HIGHLIGHT_SCENE_DEPTH) uniform sampler2D sceneDepthMap;
|
||||
layout(binding=RENDER_UTILS_TEXTURE_HIGHLIGHT_DEPTH) uniform sampler2D highlightedDepthMap;
|
||||
|
||||
in vec2 varTexCoord0;
|
||||
out vec4 outFragColor;
|
||||
layout(location=0) in vec2 varTexCoord0;
|
||||
layout(location=0) out vec4 outFragColor;
|
||||
|
||||
const float FAR_Z = 1.0;
|
||||
const float LINEAR_DEPTH_BIAS = 5e-3;
|
||||
|
@ -55,10 +55,10 @@ void main(void) {
|
|||
discard;
|
||||
<@endif@>
|
||||
} else {
|
||||
vec2 halfTexel = getInvWidthHeight() / 2;
|
||||
vec2 halfTexel = getInvWidthHeight() / 2.0;
|
||||
vec2 texCoord0 = varTexCoord0+halfTexel;
|
||||
float weight = 0.0;
|
||||
vec2 deltaUv = params._size / params._blurKernelSize;
|
||||
vec2 deltaUv = params._size / float(params._blurKernelSize);
|
||||
vec2 lineStartUv = texCoord0 - params._size / 2.0;
|
||||
vec2 uv;
|
||||
int x;
|
||||
|
@ -87,7 +87,7 @@ void main(void) {
|
|||
}
|
||||
}
|
||||
|
||||
if (intensity > 0) {
|
||||
if (intensity > 0.0) {
|
||||
// sumOutlineDepth /= intensity;
|
||||
} else {
|
||||
sumOutlineDepth = FAR_Z;
|
||||
|
|
|
@ -28,7 +28,6 @@ using namespace render;
|
|||
namespace ru {
|
||||
using render_utils::slot::texture::Texture;
|
||||
using render_utils::slot::buffer::Buffer;
|
||||
using render_utils::slot::uniform::Uniform;
|
||||
}
|
||||
|
||||
namespace gr {
|
||||
|
|
|
@ -45,7 +45,7 @@ struct ShadowSampleOffsets {
|
|||
};
|
||||
|
||||
ShadowSampleOffsets evalShadowFilterOffsets(vec4 position) {
|
||||
float shadowScale = getShadowScale();
|
||||
float shadowScale = getShadowScale();
|
||||
ShadowSampleOffsets offsets;
|
||||
|
||||
#if SHADOW_SCREEN_SPACE_DITHER
|
||||
|
@ -67,10 +67,10 @@ ShadowSampleOffsets evalShadowFilterOffsets(vec4 position) {
|
|||
ivec2 offset = coords & ivec2(1,1);
|
||||
offset.y = (offset.x+offset.y) & 1;
|
||||
|
||||
offsets.points[0] = shadowScale * vec3(offset + PCFkernel[0], 0.0);
|
||||
offsets.points[1] = shadowScale * vec3(offset + PCFkernel[1], 0.0);
|
||||
offsets.points[2] = shadowScale * vec3(offset + PCFkernel[2], 0.0);
|
||||
offsets.points[3] = shadowScale * vec3(offset + PCFkernel[3], 0.0);
|
||||
offsets.points[0] = shadowScale * vec3(vec2(offset) + PCFkernel[0], 0.0);
|
||||
offsets.points[1] = shadowScale * vec3(vec2(offset) + PCFkernel[1], 0.0);
|
||||
offsets.points[2] = shadowScale * vec3(vec2(offset) + PCFkernel[2], 0.0);
|
||||
offsets.points[3] = shadowScale * vec3(vec2(offset) + PCFkernel[3], 0.0);
|
||||
|
||||
return offsets;
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ float evalShadowAttenuation(vec3 worldLightDir, vec4 worldPosition, float viewDe
|
|||
vec3 cascadeMix;
|
||||
bvec4 isPixelOnCascade;
|
||||
int cascadeIndex;
|
||||
float oneMinusNdotL = 1.0 - clamp(dot(worldLightDir, worldNormal), 0, 1);
|
||||
float oneMinusNdotL = 1.0 - clamp(dot(worldLightDir, worldNormal), 0.0, 1.0);
|
||||
|
||||
for (cascadeIndex=0 ; cascadeIndex<getShadowCascadeCount() ; cascadeIndex++) {
|
||||
cascadeShadowCoords[cascadeIndex] = evalShadowTexcoord(cascadeIndex, worldPosition);
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
<@include Shadows_shared.slh@>
|
||||
|
||||
layout(std140, binding=RENDER_UTILS_BUFFER_SHADOW_PARAMS) uniform shadowTransformBuffer {
|
||||
ShadowParameters shadow;
|
||||
ShadowParameters shadow;
|
||||
};
|
||||
|
||||
int getShadowCascadeCount() {
|
||||
|
@ -30,26 +30,26 @@ float evalShadowFalloff(float depth) {
|
|||
}
|
||||
|
||||
mat4 getShadowReprojection(int cascadeIndex) {
|
||||
return shadow.cascades[cascadeIndex].reprojection;
|
||||
return shadow.cascades[cascadeIndex].reprojection;
|
||||
}
|
||||
|
||||
float getShadowScale() {
|
||||
return shadow.invMapSize;
|
||||
return shadow.invMapSize;
|
||||
}
|
||||
|
||||
float getShadowFixedBias(int cascadeIndex) {
|
||||
return shadow.cascades[cascadeIndex].fixedBias;
|
||||
return shadow.cascades[cascadeIndex].fixedBias;
|
||||
}
|
||||
|
||||
float getShadowSlopeBias(int cascadeIndex) {
|
||||
return shadow.cascades[cascadeIndex].slopeBias;
|
||||
return shadow.cascades[cascadeIndex].slopeBias;
|
||||
}
|
||||
|
||||
|
||||
// Compute the texture coordinates from world coordinates
|
||||
vec4 evalShadowTexcoord(int cascadeIndex, vec4 position) {
|
||||
vec4 shadowCoord = getShadowReprojection(cascadeIndex) * position;
|
||||
return vec4(shadowCoord.xyz, 1.0);
|
||||
vec4 shadowCoord = getShadowReprojection(cascadeIndex) * position;
|
||||
return vec4(shadowCoord.xyz, 1.0);
|
||||
}
|
||||
|
||||
bool isShadowCascadeProjectedOnPixel(vec4 cascadeTexCoords) {
|
||||
|
|
|
@ -8,8 +8,8 @@
|
|||
#define SHADOW_CASCADE_MAX_COUNT 4
|
||||
|
||||
struct ShadowTransform {
|
||||
MAT4 reprojection;
|
||||
float fixedBias;
|
||||
MAT4 reprojection;
|
||||
float fixedBias;
|
||||
float slopeBias;
|
||||
float _padding1;
|
||||
float _padding2;
|
||||
|
|
|
@ -70,9 +70,8 @@ void TextRenderer3D::draw(gpu::Batch& batch, float x, float y, const QString& st
|
|||
const glm::vec2& bounds, bool layered) {
|
||||
// The font does all the OpenGL work
|
||||
if (_font) {
|
||||
// Cache color so that the pointer stays valid.
|
||||
_color = color;
|
||||
_font->drawString(batch, x, y, str, &_color, _effectType, bounds, layered);
|
||||
_font->drawString(batch, _drawInfo, str, _color, _effectType, { x, y }, bounds, layered);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -15,12 +15,14 @@
|
|||
#include <memory>
|
||||
#include <glm/glm.hpp>
|
||||
#include <QColor>
|
||||
#include <gpu/Forward.h>
|
||||
|
||||
namespace gpu {
|
||||
class Batch;
|
||||
}
|
||||
class Font;
|
||||
|
||||
#include "text/Font.h"
|
||||
#include "text/EffectType.h"
|
||||
#include "text/FontFamilies.h"
|
||||
|
||||
|
@ -51,7 +53,7 @@ private:
|
|||
|
||||
// text color
|
||||
glm::vec4 _color;
|
||||
|
||||
Font::DrawInfo _drawInfo;
|
||||
std::shared_ptr<Font> _font;
|
||||
};
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
struct DebugParameters
|
||||
{
|
||||
INT32 _shadowCascadeIndex;
|
||||
INT32 _shadowCascadeIndex;
|
||||
};
|
||||
|
||||
// <@if 1@>
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
uniform vec4 sphereParam;
|
||||
// FIXME make into a uniform buffer or push constant if this shader ever comes into use
|
||||
vec4 sphereParam = vec4(0.0);
|
||||
|
||||
layout(location=RENDER_UTILS_ATTR_TEXCOORD01) out vec4 _texCoord01;
|
||||
|
||||
|
@ -41,7 +42,7 @@ void main(void) {
|
|||
}
|
||||
#endif
|
||||
#endif
|
||||
_texCoord01.xy = vec4(projected.xy, 0.0, 1.0) * gl_Position.w;
|
||||
_texCoord01 = vec4(projected.xy, 0.0, 1.0) * gl_Position.w;
|
||||
} else {
|
||||
const float depth = -1.0; //Draw at near plane
|
||||
const vec4 UNIT_QUAD[4] = vec4[4](
|
||||
|
@ -60,7 +61,7 @@ void main(void) {
|
|||
#endif
|
||||
#endif
|
||||
|
||||
_texCoord01.xy = vec4((pos.xy + 1.0) * 0.5, 0.0, 1.0);
|
||||
_texCoord01 = vec4((pos.xy + 1.0) * 0.5, 0.0, 1.0);
|
||||
|
||||
#ifdef GPU_TRANSFORM_IS_STEREO
|
||||
#ifdef GPU_TRANSFORM_STEREO_SPLIT_SCREEN
|
||||
|
|
|
@ -13,9 +13,9 @@
|
|||
<@include DeferredBufferWrite.slh@>
|
||||
<@include gpu/Paint.slh@>
|
||||
|
||||
in vec4 varColor;
|
||||
in vec3 varTexcoord;
|
||||
in vec3 varEyePos;
|
||||
layout(location=0) in vec4 varColor;
|
||||
layout(location=1) in vec3 varTexcoord;
|
||||
layout(location=2) in vec3 varEyePos;
|
||||
|
||||
void main(void) {
|
||||
if (varColor.w > 0.0) {
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
<$declareWorkloadProxies()$>
|
||||
|
||||
|
||||
out vec4 varColor;
|
||||
out vec3 varTexcoord;
|
||||
out vec3 varEyePos;
|
||||
layout(location=0) out vec4 varColor;
|
||||
layout(location=1) out vec3 varTexcoord;
|
||||
layout(location=2) out vec3 varEyePos;
|
||||
|
||||
void main(void) {
|
||||
const vec4 UNIT_SPRITE[3] = vec4[3](
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
<@include DeferredBufferWrite.slh@>
|
||||
<@include gpu/Paint.slh@>
|
||||
|
||||
in vec4 varColor;
|
||||
in vec3 varTexcoord;
|
||||
in vec3 varEyePos;
|
||||
layout(location=0) in vec4 varColor;
|
||||
layout(location=1) in vec3 varTexcoord;
|
||||
layout(location=2) in vec3 varEyePos;
|
||||
|
||||
void main(void) {
|
||||
if (varColor.w > 0.0) {
|
||||
|
|
|
@ -18,9 +18,9 @@
|
|||
<@include WorkloadResource.slh@>
|
||||
<$declareWorkloadViews()$>
|
||||
|
||||
out vec4 varColor;
|
||||
out vec3 varTexcoord;
|
||||
out vec3 varEyePos;
|
||||
layout(location=0) out vec4 varColor;
|
||||
layout(location=1) out vec3 varTexcoord;
|
||||
layout(location=2) out vec3 varEyePos;
|
||||
|
||||
const int NUM_VERTICES_PER_SEGMENT = 2;
|
||||
const int NUM_SEGMENT_PER_VIEW_REGION = 65;
|
||||
|
@ -79,7 +79,7 @@ void main(void) {
|
|||
<$transformModelToEyeDir(cam, obj, originSpaceTan, tanEye)$>
|
||||
|
||||
lateralDir = normalize(cross(vec3(0.0, 0.0, 1.0), normalize(tanEye)));
|
||||
posEye.xyz += (0.005 * abs(posEye.z) * (regionID + 1)) * (-1.0 + 2.0 * float(segmentVertexID)) * lateralDir;
|
||||
posEye.xyz += (0.005 * abs(posEye.z) * float(regionID + 1)) * (-1.0 + 2.0 * float(segmentVertexID)) * lateralDir;
|
||||
varEyePos = posEye.xyz;
|
||||
|
||||
<$transformEyeToClipPos(cam, posEye, gl_Position)$>
|
||||
|
|
|
@ -24,7 +24,9 @@ precision mediump int;
|
|||
|
||||
layout(binding=0) uniform sampler2D colorTexture;
|
||||
//uniform sampler2D historyTexture;
|
||||
layout(location=0) uniform vec2 texcoordOffset;
|
||||
|
||||
// FIXME make into a uniform buffer or push constant if this shader ever comes into use
|
||||
vec2 texcoordOffset = vec2(0.0);
|
||||
|
||||
layout(location=0) in vec2 varTexCoord0;
|
||||
layout(location=0) out vec4 outFragColor;
|
||||
|
@ -66,7 +68,7 @@ void main() {
|
|||
outFragColor.w = 1.0;
|
||||
|
||||
}*/
|
||||
if (gl_FragCoord.x > 800) {
|
||||
if (gl_FragCoord.x > 800.0) {
|
||||
/* // filter width limit for dependent "two-tap" texture samples
|
||||
float FXAA_SPAN_MAX = 8.0;
|
||||
|
||||
|
|
|
@ -18,7 +18,14 @@ layout(location=0) in vec2 varTexCoord0;
|
|||
layout(location=0) out vec4 outFragColor;
|
||||
|
||||
layout(binding=0) uniform sampler2D colorTexture;
|
||||
layout(location=GPU_UNIFORM_EXTRA0) uniform float sharpenIntensity;
|
||||
|
||||
struct FxaaBlendParams {
|
||||
vec4 sharpenIntensity;
|
||||
};
|
||||
|
||||
layout(binding=0) uniform fxaaBlendParamsBuffer {
|
||||
FxaaBlendParams params;
|
||||
};
|
||||
|
||||
void main(void) {
|
||||
vec4 pixels[9];
|
||||
|
@ -37,7 +44,7 @@ void main(void) {
|
|||
|
||||
sharpenedPixel = pixels[4]*6.8 - (pixels[1]+pixels[3]+pixels[5]+pixels[7]) - (pixels[0]+pixels[2]+pixels[6]+pixels[8])*0.7;
|
||||
|
||||
vec4 minColor = max(vec4(0), pixels[4]-vec4(0.5));
|
||||
vec4 maxColor = pixels[4]+vec4(0.5);
|
||||
outFragColor = clamp(pixels[4] + sharpenedPixel * sharpenIntensity, minColor, maxColor);
|
||||
vec4 minColor = max(vec4(0), pixels[4]-vec4(0.5));
|
||||
vec4 maxColor = pixels[4]+vec4(0.5);
|
||||
outFragColor = clamp(pixels[4] + sharpenedPixel * params.sharpenIntensity.x, minColor, maxColor);
|
||||
}
|
||||
|
|
|
@ -22,9 +22,9 @@ void main(void) {
|
|||
float alpha = 1.0 - abs(distanceFromCenter);
|
||||
|
||||
// Convert from a linear alpha curve to a sharp peaked one
|
||||
alpha = _color.a * pow(alpha, 10.0);
|
||||
|
||||
// Drop everything where the curve falls off to nearly nothing
|
||||
alpha = _color.a * pow(alpha, 10.0);
|
||||
|
||||
// Drop everything where the curve falls off to nearly nothing
|
||||
if (alpha <= 0.05) {
|
||||
discard;
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ void main(void) {
|
|||
|
||||
ivec3 cluster = clusterGrid_getCluster(clusterIndex);
|
||||
int numLights = cluster.x + cluster.y;
|
||||
float numLightsScale = clamp(numLights * 0.05, 0.01, 1.0);
|
||||
float numLightsScale = clamp(float(numLights) * 0.05, 0.01, 1.0);
|
||||
|
||||
int clusterOffset = cluster.z;
|
||||
|
||||
|
@ -90,6 +90,6 @@ void main(void) {
|
|||
numLightTouching++;
|
||||
}
|
||||
|
||||
_fragColor = vec4(colorRamp(1.0 - (numLightTouching / 12.0f)), (numLightTouching > 0 ? 0.5 + 0.5 * numLightsScale : 0.0));
|
||||
_fragColor = vec4(colorRamp(1.0 - (float(numLightTouching) / 12.0f)), (numLightTouching > 0 ? 0.5 + 0.5 * numLightsScale : 0.0));
|
||||
}
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ void main(void) {
|
|||
ivec3 cluster = clusterGrid_getCluster(gpu_InstanceID());
|
||||
int numLights = cluster.x + cluster.y;
|
||||
|
||||
float numLightsScale = clamp(numLights * 0.1, 0.0, 1.0);
|
||||
float numLightsScale = clamp(float(numLights) * 0.1, 0.0, 1.0);
|
||||
|
||||
ivec3 clusterPos = frustumGrid_indexToCluster(gpu_InstanceID());
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ void main(void) {
|
|||
|
||||
ivec3 cluster = clusterGrid_getCluster(frustumGrid_clusterToIndex(clusterPos));
|
||||
int numLights = cluster.x + cluster.y;
|
||||
float numLightsScale = clamp(numLights * 0.05, 0.01, 1.0);
|
||||
float numLightsScale = clamp(float(numLights) * 0.05, 0.01, 1.0);
|
||||
|
||||
|
||||
ivec3 dims = frustumGrid.dims.xyz;
|
||||
|
|
|
@ -87,7 +87,7 @@ void main(void) {
|
|||
1.0,
|
||||
occlusionTex,
|
||||
fragPositionES,
|
||||
fragPositionWS,
|
||||
fragPositionWS,
|
||||
albedo,
|
||||
fresnel,
|
||||
metallic,
|
||||
|
|
|
@ -97,7 +97,7 @@ void main(void) {
|
|||
1.0,
|
||||
occlusionTex,
|
||||
fragPositionES,
|
||||
fragPositionWS,
|
||||
fragPositionWS,
|
||||
albedo,
|
||||
fresnel,
|
||||
metallic,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue