Merged with master

This commit is contained in:
Olivier Prat 2018-09-11 10:19:40 +02:00
commit f12cd2e55a
194 changed files with 2149 additions and 1156 deletions

View file

@ -96,7 +96,6 @@ Agent::Agent(ReceivedMessage& message) :
DependencyManager::set<recording::Recorder>();
DependencyManager::set<recording::ClipCache>();
DependencyManager::set<ScriptCache>();
DependencyManager::set<RecordingScriptingInterface>();
DependencyManager::set<UsersScriptingInterface>();
@ -177,6 +176,8 @@ void Agent::run() {
// Create ScriptEngines on threaded-assignment thread then move to main thread.
DependencyManager::set<ScriptEngines>(ScriptEngine::AGENT_SCRIPT)->moveToThread(qApp->thread());
DependencyManager::set<ScriptCache>();
// make sure we request our script once the agent connects to the domain
auto nodeList = DependencyManager::get<NodeList>();
@ -360,154 +361,178 @@ void Agent::scriptRequestFinished() {
}
void Agent::executeScript() {
_scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
// the following block is scoped so that any shared pointers we take here
// are cleared before we call setFinished at the end of the function
{
_scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
// setup an Avatar for the script to use
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
// setup an Avatar for the script to use
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
scriptedAvatar->setID(getSessionUUID());
scriptedAvatar->setID(getSessionUUID());
connect(_scriptEngine.data(), SIGNAL(update(float)),
scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
scriptedAvatar->setForceFaceTrackerConnected(true);
connect(_scriptEngine.data(), SIGNAL(update(float)),
scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
scriptedAvatar->setForceFaceTrackerConnected(true);
// call model URL setters with empty URLs so our avatar, if user, will have the default models
scriptedAvatar->setSkeletonModelURL(QUrl());
// call model URL setters with empty URLs so our avatar, if user, will have the default models
scriptedAvatar->setSkeletonModelURL(QUrl());
// force lazy initialization of the head data for the scripted avatar
// since it is referenced below by computeLoudness and getAudioLoudness
scriptedAvatar->getHeadOrientation();
// force lazy initialization of the head data for the scripted avatar
// since it is referenced below by computeLoudness and getAudioLoudness
scriptedAvatar->getHeadOrientation();
// give this AvatarData object to the script engine
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
// give this AvatarData object to the script engine
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
// give scripts access to the Users object
_scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
// give scripts access to the Users object
_scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
auto player = DependencyManager::get<recording::Deck>();
connect(player.data(), &recording::Deck::playbackStateChanged, [=] {
if (player->isPlaying()) {
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
if (recordingInterface->getPlayFromCurrentLocation()) {
scriptedAvatar->setRecordingBasis();
auto player = DependencyManager::get<recording::Deck>();
connect(player.data(), &recording::Deck::playbackStateChanged, [&player, &scriptedAvatar] {
if (player->isPlaying()) {
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
if (recordingInterface->getPlayFromCurrentLocation()) {
scriptedAvatar->setRecordingBasis();
}
// these procedural movements are included in the recordings
scriptedAvatar->setHasProceduralEyeFaceMovement(false);
scriptedAvatar->setHasProceduralBlinkFaceMovement(false);
scriptedAvatar->setHasAudioEnabledFaceMovement(false);
} else {
scriptedAvatar->clearRecordingBasis();
// restore procedural blendshape movement
scriptedAvatar->setHasProceduralEyeFaceMovement(true);
scriptedAvatar->setHasProceduralBlinkFaceMovement(true);
scriptedAvatar->setHasAudioEnabledFaceMovement(true);
}
} else {
scriptedAvatar->clearRecordingBasis();
}
});
});
using namespace recording;
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
using namespace recording;
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
if (!useFrameSkeleton) {
static std::once_flag warning;
std::call_once(warning, [] {
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
});
}
AvatarData::fromFrame(frame->data, *scriptedAvatar);
});
using namespace recording;
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
static quint16 audioSequenceNumber{ 0 };
QByteArray audio(frame->data);
if (_isNoiseGateEnabled) {
int16_t* samples = reinterpret_cast<int16_t*>(audio.data());
int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
_audioGate.render(samples, samples, numSamples);
}
computeLoudness(&audio, scriptedAvatar);
// state machine to detect gate opening and closing
bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
_audioGateOpen = audioGateOpen;
Q_UNUSED(openedInLastBlock);
// the codec must be flushed to silence before sending silent packets,
// so delay the transition to silent packets by one packet after becoming silent.
auto packetType = PacketType::MicrophoneAudioNoEcho;
if (!audioGateOpen && !closedInLastBlock) {
packetType = PacketType::SilentAudioFrame;
}
Transform audioTransform;
auto headOrientation = scriptedAvatar->getHeadOrientation();
audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
audioTransform.setRotation(headOrientation);
QByteArray encodedBuffer;
if (_encoder) {
_encoder->encode(audio, encodedBuffer);
} else {
encodedBuffer = audio;
}
AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
packetType, _selectedCodecName);
});
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
// register ourselves to the script engine
_scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
_scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCacheScriptingInterface>().data());
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
_scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
LocationScriptingInterface::locationSetter);
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
if (!useFrameSkeleton) {
static std::once_flag warning;
std::call_once(warning, [] {
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
});
entityScriptingInterface->init();
_entityViewer.init();
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
_avatarAudioTimer.start();
// Agents should run at 45hz
static const int AVATAR_DATA_HZ = 45;
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
QTimer* avatarDataTimer = new QTimer(this);
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
avatarDataTimer->setSingleShot(false);
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
avatarDataTimer->setTimerType(Qt::PreciseTimer);
avatarDataTimer->start();
_scriptEngine->run();
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
if (recordingInterface->isPlaying()) {
recordingInterface->stopPlaying();
}
AvatarData::fromFrame(frame->data, *scriptedAvatar);
});
using namespace recording;
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
static quint16 audioSequenceNumber{ 0 };
QByteArray audio(frame->data);
if (_isNoiseGateEnabled) {
int16_t* samples = reinterpret_cast<int16_t*>(audio.data());
int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
_audioGate.render(samples, samples, numSamples);
if (recordingInterface->isRecording()) {
recordingInterface->stopRecording();
}
computeLoudness(&audio, scriptedAvatar);
avatarDataTimer->stop();
_avatarAudioTimer.stop();
}
// state machine to detect gate opening and closing
bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
_audioGateOpen = audioGateOpen;
Q_UNUSED(openedInLastBlock);
// the codec must be flushed to silence before sending silent packets,
// so delay the transition to silent packets by one packet after becoming silent.
auto packetType = PacketType::MicrophoneAudioNoEcho;
if (!audioGateOpen && !closedInLastBlock) {
packetType = PacketType::SilentAudioFrame;
}
Transform audioTransform;
auto headOrientation = scriptedAvatar->getHeadOrientation();
audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
audioTransform.setRotation(headOrientation);
QByteArray encodedBuffer;
if (_encoder) {
_encoder->encode(audio, encodedBuffer);
} else {
encodedBuffer = audio;
}
AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
packetType, _selectedCodecName);
});
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
// register ourselves to the script engine
_scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
_scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCacheScriptingInterface>().data());
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
_scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
LocationScriptingInterface::locationSetter);
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
entityScriptingInterface->init();
_entityViewer.init();
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
// Agents should run at 45hz
static const int AVATAR_DATA_HZ = 45;
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
QTimer* avatarDataTimer = new QTimer(this);
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
avatarDataTimer->setSingleShot(false);
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
avatarDataTimer->setTimerType(Qt::PreciseTimer);
avatarDataTimer->start();
_scriptEngine->run();
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
DependencyManager::destroy<RecordingScriptingInterface>();
setFinished(true);
}
@ -859,17 +884,25 @@ void Agent::aboutToFinish() {
DependencyManager::destroy<SoundCache>();
DependencyManager::destroy<AudioScriptingInterface>();
DependencyManager::destroy<RecordingScriptingInterface>();
DependencyManager::destroy<recording::Deck>();
DependencyManager::destroy<recording::Recorder>();
DependencyManager::destroy<recording::ClipCache>();
DependencyManager::destroy<ScriptEngine>();
// drop our shared pointer to the script engine, then ask ScriptEngines to shutdown scripting
// this ensures that the ScriptEngine goes down before ScriptEngines
_scriptEngine.clear();
{
DependencyManager::get<ScriptEngines>()->shutdownScripting();
}
DependencyManager::destroy<ScriptEngines>();
DependencyManager::destroy<AssignmentDynamicFactory>();
DependencyManager::destroy<ScriptableAvatar>();
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
// cleanup codec & encoder
if (_codec && _encoder) {
_codec->releaseEncoder(_encoder);

View file

@ -654,6 +654,15 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
if (addToIgnore) {
senderNode->addIgnoredNode(ignoredUUID);
if (ignoredNode) {
// send a reliable kill packet to remove the sending avatar for the ignored avatar
auto killPacket = NLPacket::create(PacketType::KillAvatar,
NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
killPacket->write(senderNode->getUUID().toRfc4122());
killPacket->writePrimitive(KillAvatarReason::AvatarDisconnected);
nodeList->sendPacket(std::move(killPacket), *ignoredNode);
}
} else {
senderNode->removeIgnoredNode(ignoredUUID);
}

View file

@ -145,3 +145,15 @@ void ScriptableAvatar::update(float deltatime) {
_clientTraitsHandler->sendChangedTraitsToMixer();
}
void ScriptableAvatar::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
}
void ScriptableAvatar::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
}
void ScriptableAvatar::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
}

View file

@ -157,9 +157,16 @@ public:
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
bool getHasProceduralBlinkFaceMovement() const override { return _headData->getHasProceduralBlinkFaceMovement(); }
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
bool getHasProceduralEyeFaceMovement() const override { return _headData->getHasProceduralEyeFaceMovement(); }
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
private slots:
void update(float deltatime);
private:
AnimationPointer _animation;
AnimationDetails _animationDetails;

View file

@ -9,7 +9,6 @@
#
macro(AUTOSCRIBE_SHADER)
message(STATUS "Processing shader ${SHADER_FILE}")
unset(SHADER_INCLUDE_FILES)
# Grab include files
foreach(includeFile ${ARGN})

View file

@ -8,6 +8,5 @@
macro(TARGET_JSON)
add_dependency_external_projects(json)
find_package(JSON REQUIRED)
message("JSON_INCLUDE_DIRS ${JSON_INCLUDE_DIRS}")
target_include_directories(${TARGET_NAME} PUBLIC ${JSON_INCLUDE_DIRS})
endmacro()

Binary file not shown.

Binary file not shown.

View file

@ -585,149 +585,188 @@
"states": [
{
"id": "idle",
"interpTarget": 0,
"interpDuration": 4,
"interpTarget": 20,
"interpDuration": 8,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isMovingForward", "state": "idleToWalkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "idleToWalkFwd",
"interpTarget": 10,
"interpDuration": 4,
"interpType": "snapshotPrev",
"interpTarget": 12,
"interpDuration": 8,
"transitions": [
{ "var": "idleToWalkFwdOnDone", "state": "walkFwd" },
{ "var": "idleToWalkFwdOnDone", "state": "WALKFWD" },
{ "var": "isNotMoving", "state": "idle" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "idleSettle",
"interpTarget": 10,
"interpDuration": 10,
"interpTarget": 15,
"interpDuration": 8,
"interpType": "snapshotPrev",
"transitions": [
{"var": "idleSettleOnDone", "state": "idle" },
{"var": "isMovingForward", "state": "idleToWalkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{"var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" }
{ "var": "isInAirRun", "state": "INAIRRUN" }
]
},
{
"id": "walkFwd",
"interpTarget": 16,
"interpDuration": 6,
"id": "WALKFWD",
"interpTarget": 35,
"interpDuration": 10,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotMoving", "state": "idleSettle" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "walkBwd",
"interpTarget": 8,
"interpDuration": 6,
"id": "WALKBWD",
"interpTarget": 35,
"interpDuration": 10,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotMoving", "state": "idleSettle" },
{ "var": "isMovingForward", "state": "walkFwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "strafeRight",
"interpTarget": 5,
"id": "STRAFERIGHT",
"interpTarget": 25,
"interpDuration": 8,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotMoving", "state": "idleSettle" },
{ "var": "isMovingForward", "state": "walkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "strafeLeft",
"interpTarget": 5,
"id": "STRAFELEFT",
"interpTarget": 25,
"interpDuration": 8,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotMoving", "state": "idleSettle" },
{ "var": "isMovingForward", "state": "walkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "turnRight",
"interpTarget": 6,
"interpDuration": 8,
"transitions": [
{ "var": "isNotTurning", "state": "idle" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "turnLeft",
"interpTarget": 6,
"interpDuration": 8,
"transitions": [
{ "var": "isNotTurning", "state": "idle" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
@ -739,18 +778,18 @@
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotMoving", "state": "idleSettle" },
{ "var": "isMovingForward", "state": "walkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" }
{ "var": "isInAirRun", "state": "INAIRRUN" }
]
},
{
@ -760,60 +799,18 @@
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotMoving", "state": "idleSettle" },
{ "var": "isMovingForward", "state": "walkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" }
]
},
{
"id": "turnRight",
"interpTarget": 6,
"interpDuration": 8,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotTurning", "state": "idle" },
{ "var": "isMovingForward", "state": "walkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "turnLeft",
"interpTarget": 6,
"interpDuration": 8,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotTurning", "state": "idle" },
{ "var": "isMovingForward", "state": "walkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
{ "var": "isInAirRun", "state": "INAIRRUN" }
]
},
{
@ -826,79 +823,79 @@
},
{
"id": "takeoffStand",
"interpTarget": 0,
"interpDuration": 6,
"interpTarget": 2,
"interpDuration": 2,
"transitions": [
{ "var": "isNotTakeoff", "state": "inAirStand" }
]
},
{
"id": "takeoffRun",
"interpTarget": 0,
"interpDuration": 6,
"id": "TAKEOFFRUN",
"interpTarget": 2,
"interpDuration": 2,
"transitions": [
{ "var": "isNotTakeoff", "state": "inAirRun" }
{ "var": "isNotTakeoff", "state": "INAIRRUN" }
]
},
{
"id": "inAirStand",
"interpTarget": 0,
"interpDuration": 6,
"interpTarget": 3,
"interpDuration": 3,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotInAir", "state": "landStandImpact" }
]
},
{
"id": "inAirRun",
"interpTarget": 0,
"interpDuration": 6,
"id": "INAIRRUN",
"interpTarget": 3,
"interpDuration": 3,
"interpType": "snapshotPrev",
"transitions": [
{ "var": "isNotInAir", "state": "landRun" }
{ "var": "isNotInAir", "state": "WALKFWD" }
]
},
{
"id": "landStandImpact",
"interpTarget": 6,
"interpDuration": 4,
"interpTarget": 1,
"interpDuration": 1,
"transitions": [
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "landStandImpactOnDone", "state": "landStand" }
]
},
{
"id": "landStand",
"interpTarget": 0,
"interpTarget": 1,
"interpDuration": 1,
"transitions": [
{ "var": "isMovingForward", "state": "idleToWalkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isMovingForward", "state": "WALKFWD" },
{ "var": "isMovingBackward", "state": "WALKBWD" },
{ "var": "isMovingRight", "state": "STRAFERIGHT" },
{ "var": "isMovingLeft", "state": "STRAFELEFT" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "isInAirStand", "state": "inAirStand" },
{ "var": "isInAirRun", "state": "inAirRun" },
{ "var": "isInAirRun", "state": "INAIRRUN" },
{ "var": "landStandOnDone", "state": "idle" },
{ "var": "isMovingRightHmd", "state": "strafeRightHmd" },
{ "var": "isMovingLeftHmd", "state": "strafeLeftHmd" }
]
},
{
"id": "landRun",
"interpTarget": 1,
"interpDuration": 7,
"id": "LANDRUN",
"interpTarget": 2,
"interpDuration": 2,
"transitions": [
{ "var": "isFlying", "state": "fly" },
{ "var": "isTakeoffStand", "state": "takeoffStand" },
{ "var": "isTakeoffRun", "state": "takeoffRun" },
{ "var": "landRunOnDone", "state": "walkFwd" }
{ "var": "isTakeoffRun", "state": "TAKEOFFRUN" },
{ "var": "landRunOnDone", "state": "WALKFWD" }
]
}
]
@ -913,7 +910,7 @@
{
"id": "idleStand",
"interpTarget": 6,
"interpDuration": 6,
"interpDuration": 10,
"transitions": [
{ "var": "isTalking", "state": "idleTalk" }
]
@ -921,7 +918,7 @@
{
"id": "idleTalk",
"interpTarget": 6,
"interpDuration": 6,
"interpDuration": 10,
"transitions": [
{ "var": "notIsTalking", "state": "idleStand" }
]
@ -956,12 +953,12 @@
]
},
{
"id": "walkFwd",
"id": "WALKFWD",
"type": "blendLinearMove",
"data": {
"alpha": 0.0,
"desiredSpeed": 1.4,
"characteristicSpeeds": [0.5, 1.5, 2.5, 3.2, 4.5],
"characteristicSpeeds": [0.5, 1.8, 2.3, 3.2, 4.5],
"alphaVar": "moveForwardAlpha",
"desiredSpeedVar": "moveForwardSpeed"
},
@ -984,7 +981,7 @@
"data": {
"url": "qrc:///avatar/animations/walk_fwd.fbx",
"startFrame": 0.0,
"endFrame": 35.0,
"endFrame": 30.0,
"timeScale": 1.0,
"loopFlag": true
},
@ -1046,25 +1043,25 @@
"data": {
"url": "qrc:///avatar/animations/settle_to_idle.fbx",
"startFrame": 1.0,
"endFrame": 48.0,
"endFrame": 59.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
},
{
"id": "walkBwd",
"id": "WALKBWD",
"type": "blendLinearMove",
"data": {
"alpha": 0.0,
"desiredSpeed": 1.4,
"characteristicSpeeds": [0.6, 1.7],
"characteristicSpeeds": [0.6, 1.6, 2.3, 3.1],
"alphaVar": "moveBackwardAlpha",
"desiredSpeedVar": "moveBackwardSpeed"
},
"children": [
{
"id": "walkBwdShort",
"id": "walkBwdShort_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/walk_short_bwd.fbx",
@ -1076,7 +1073,7 @@
"children": []
},
{
"id": "walkBwdNormal",
"id": "walkBwdFast_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/walk_bwd_fast.fbx",
@ -1086,6 +1083,30 @@
"loopFlag": true
},
"children": []
},
{
"id": "jogBwd_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jog_bwd.fbx",
"startFrame": 0.0,
"endFrame": 24.0,
"timeScale": 1.0,
"loopFlag": true
},
"children": []
},
{
"id": "runBwd_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/run_bwd.fbx",
"startFrame": 0.0,
"endFrame": 16.0,
"timeScale": 1.0,
"loopFlag": true
},
"children": []
}
]
},
@ -1115,18 +1136,18 @@
"children": []
},
{
"id": "strafeLeft",
"id": "STRAFELEFT",
"type": "blendLinearMove",
"data": {
"alpha": 0.0,
"desiredSpeed": 1.4,
"characteristicSpeeds": [0, 0.5, 1.5, 2.6, 3.0],
"characteristicSpeeds": [0.1, 0.5, 1.0, 2.6, 3.0],
"alphaVar": "moveLateralAlpha",
"desiredSpeedVar": "moveLateralSpeed"
},
"children": [
{
"id": "strafeLeftShort_c",
"id": "strafeLeftShortStep_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/side_step_short_left.fbx",
@ -1138,7 +1159,7 @@
"children": []
},
{
"id": "strafeLeft_c",
"id": "strafeLeftStep_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/side_step_left.fbx",
@ -1150,19 +1171,19 @@
"children": []
},
{
"id": "strafeLeftAnim_c",
"id": "strafeLeftWalk_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/walk_left.fbx",
"startFrame": 0.0,
"endFrame": 33.0,
"endFrame": 35.0,
"timeScale": 1.0,
"loopFlag": true
},
"children": []
},
{
"id": "strafeLeftFast_c",
"id": "strafeLeftWalkFast_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/walk_left_fast.fbx",
@ -1188,17 +1209,17 @@
]
},
{
"id": "strafeRight",
"id": "STRAFERIGHT",
"type": "blendLinearMove",
"data": {
"alpha": 0.0,
"desiredSpeed": 1.4,
"characteristicSpeeds": [0, 0.5, 1.5, 2.6, 3.0],
"characteristicSpeeds": [0.1, 0.5, 1.0, 2.6, 3.0],
"alphaVar": "moveLateralAlpha",
"desiredSpeedVar": "moveLateralSpeed"
},
"children": [ {
"id": "stepRightShort_c",
"id": "strafeRightShortStep_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/side_step_short_left.fbx",
@ -1211,7 +1232,7 @@
"children": []
},
{
"id": "stepRight_c",
"id": "strafeRightStep_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/side_step_left.fbx",
@ -1224,12 +1245,12 @@
"children": []
},
{
"id": "strafeRight_c",
"id": "strafeRightWalk_c",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/walk_left.fbx",
"startFrame": 0.0,
"endFrame": 33.0,
"endFrame": 35.0,
"timeScale": 1.0,
"loopFlag": true,
"mirrorFlag": true
@ -1381,22 +1402,22 @@
"id": "takeoffStand",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jump_standing_takeoff.fbx",
"startFrame": 17.0,
"endFrame": 25.0,
"url": "qrc:///avatar/animations/jump_standing_launch.fbx",
"startFrame": 2.0,
"endFrame": 16.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
},
{
"id": "takeoffRun",
"id": "TAKEOFFRUN",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jump_takeoff.fbx",
"startFrame": 1.0,
"endFrame": 2.5,
"timeScale": 0.01,
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
"startFrame": 4.0,
"endFrame": 15.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
@ -1416,7 +1437,7 @@
"url": "qrc:///avatar/animations/jump_standing_apex.fbx",
"startFrame": 0.0,
"endFrame": 0.0,
"timeScale": 0.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
@ -1448,7 +1469,7 @@
]
},
{
"id": "inAirRun",
"id": "INAIRRUN",
"type": "blendLinear",
"data": {
"alpha": 0.0,
@ -1459,10 +1480,10 @@
"id": "inAirRunPreApex",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jump_in_air.fbx",
"startFrame": 0.0,
"endFrame": 0.0,
"timeScale": 0.0,
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
"startFrame": 16.0,
"endFrame": 16.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
@ -1471,9 +1492,9 @@
"id": "inAirRunApex",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jump_in_air.fbx",
"startFrame": 6.0,
"endFrame": 6.0,
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
"startFrame": 22.0,
"endFrame": 22.0,
"timeScale": 1.0,
"loopFlag": false
},
@ -1483,9 +1504,9 @@
"id": "inAirRunPostApex",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jump_in_air.fbx",
"startFrame": 11.0,
"endFrame": 11.0,
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
"startFrame": 33.0,
"endFrame": 33.0,
"timeScale": 1.0,
"loopFlag": false
},
@ -1497,7 +1518,7 @@
"id": "landStandImpact",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jump_standing_land.fbx",
"url": "qrc:///avatar/animations/jump_standing_land_settle.fbx",
"startFrame": 1.0,
"endFrame": 6.0,
"timeScale": 1.0,
@ -1509,22 +1530,22 @@
"id": "landStand",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jump_standing_land.fbx",
"url": "qrc:///avatar/animations/jump_standing_land_settle.fbx",
"startFrame": 6.0,
"endFrame": 28.0,
"endFrame": 68.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
},
{
"id": "landRun",
"id": "LANDRUN",
"type": "clip",
"data": {
"url": "qrc:///avatar/animations/jump_land.fbx",
"startFrame": 1.0,
"endFrame": 6.0,
"timeScale": 0.65,
"url": "qrc:///avatar/animations/jump_running_launch_land.fbx",
"startFrame": 29.0,
"endFrame": 40.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []

View file

@ -0,0 +1,157 @@
import Hifi 1.0 as Hifi
import QtQuick 2.3
import '.'
Item {
id: animStats
anchors.leftMargin: 300
objectName: "StatsItem"
property int modality: Qt.NonModal
implicitHeight: row.height
implicitWidth: row.width
Component.onCompleted: {
animStats.parentChanged.connect(fill);
fill();
}
Component.onDestruction: {
animStats.parentChanged.disconnect(fill);
}
function fill() {
// This will cause a warning at shutdown, need to find another way to remove
// the warning other than filling the anchors to the parent
anchors.horizontalCenter = parent.horizontalCenter
}
Hifi.AnimStats {
id: root
objectName: "AnimStats"
implicitHeight: row.height
implicitWidth: row.width
anchors.horizontalCenter: parent.horizontalCenter
readonly property string bgColor: "#AA111111"
Row {
id: row
spacing: 8
Rectangle {
width: firstCol.width + 8;
height: firstCol.height + 8;
color: root.bgColor;
Column {
id: firstCol
spacing: 4; x: 4; y: 4;
StatText {
text: "State Machines:---------------------------------------------------------------------------"
}
ListView {
width: firstCol.width
height: root.animStateMachines.length * 15
visible: root.animStateMchines.length > 0;
model: root.animStateMachines
delegate: StatText {
text: {
return modelData;
}
}
}
}
}
Rectangle {
width: secondCol.width + 8
height: secondCol.height + 8
color: root.bgColor;
Column {
id: secondCol
spacing: 4; x: 4; y: 4;
StatText {
text: "Anim Vars:--------------------------------------------------------------------------------"
}
ListView {
width: secondCol.width
height: root.animVars.length * 15
visible: root.animVars.length > 0;
model: root.animVars
delegate: StatText {
text: {
var actualText = modelData.split("|")[1];
if (actualText) {
return actualText;
} else {
return modelData;
}
}
color: {
var grayScale = parseFloat(modelData.split("|")[0]);
return Qt.rgba(1.0, 1.0, 1.0, grayScale);
}
styleColor: {
var grayScale = parseFloat(modelData.split("|")[0]);
return Qt.rgba(0.0, 0.0, 0.0, grayScale);
}
}
}
}
}
Rectangle {
width: thirdCol.width + 8
height: thirdCol.height + 8
color: root.bgColor;
Column {
id: thirdCol
spacing: 4; x: 4; y: 4;
StatText {
text: "Alpha Values:--------------------------------------------------------------------------"
}
ListView {
width: thirdCol.width
height: root.animAlphaValues.length * 15
visible: root.animAlphaValues.length > 0;
model: root.animAlphaValues
delegate: StatText {
text: {
var actualText = modelData.split("|")[1];
if (actualText) {
return actualText;
} else {
return modelData;
}
}
color: {
var grayScale = parseFloat(modelData.split("|")[0]);
return Qt.rgba(1.0, 1.0, 1.0, grayScale);
}
styleColor: {
var grayScale = parseFloat(modelData.split("|")[0]);
return Qt.rgba(0.0, 0.0, 0.0, grayScale);
}
}
}
}
}
}
Connections {
target: root.parent
onWidthChanged: {
root.x = root.parent.width - root.width;
}
}
}
}

View file

@ -146,7 +146,8 @@ Windows.Window {
Qt.WindowCloseButtonHint |
Qt.WindowMaximizeButtonHint |
Qt.WindowMinimizeButtonHint;
if ((flags & Desktop.ALWAYS_ON_TOP) === Desktop.ALWAYS_ON_TOP) {
// only use the always on top feature for non Windows OS
if (Qt.platform.os !== "windows" && (flags & Desktop.ALWAYS_ON_TOP)) {
nativeWindowFlags |= Qt.WindowStaysOnTopHint;
}
nativeWindow.flags = nativeWindowFlags;

View file

@ -192,21 +192,6 @@ Item {
StatText {
text: "Yaw: " + root.yaw.toFixed(1)
}
StatText {
visible: root.animStackNames.length > 0;
text: "Anim Stack Names:"
}
ListView {
width: geoCol.width
height: root.animStackNames.length * 15
visible: root.animStackNames.length > 0;
model: root.animStackNames
delegate: StatText {
text: modelData.length > 30
? modelData.substring(0, 5) + "..." + modelData.substring(modelData.length - 22)
: modelData
}
}
StatText {
visible: root.expanded;
text: "Avatar Mixer In: " + root.avatarMixerInKbps + " kbps, " +

View file

@ -530,9 +530,7 @@ Item {
maximumValue: 20.0
stepSize: 5
updateValueWhileDragging: true
Component.onCompleted: {
value = Users.getAvatarGain(uuid);
}
value: Users.getAvatarGain(uuid)
onValueChanged: {
updateGainFromQML(uuid, value, false);
}

View file

@ -780,6 +780,12 @@ Rectangle {
headerVisible: true;
sortIndicatorColumn: settings.connectionsSortIndicatorColumn;
sortIndicatorOrder: settings.connectionsSortIndicatorOrder;
onSortIndicatorColumnChanged: {
settings.connectionsSortIndicatorColumn = sortIndicatorColumn;
}
onSortIndicatorOrderChanged: {
settings.connectionsSortIndicatorOrder = sortIndicatorOrder;
}
TableViewColumn {
id: connectionsUserNameHeader;

View file

@ -26,7 +26,7 @@ Rectangle {
HifiConstants { id: hifi; }
property var eventBridge;
property string title: "Audio Settings - " + AudioScriptingInterface.context;
property string title: "Audio Settings"
signal sendToScript(var message);
color: hifi.colors.baseGray;

View file

@ -18,6 +18,7 @@ import "../../windows"
Rectangle {
id: root
objectName: "DCConectionTiming"
property string title: "Domain Connection Timing"
signal sendToScript(var message);
property bool isHMD: false
@ -33,7 +34,7 @@ Rectangle {
Row {
id: header
anchors.top: parent.top
anchors.topMargin: hifi.dimensions.tabletMenuHeader
anchors.topMargin: hifi.dimensions.contentMargin.y
anchors.leftMargin: 5
anchors.rightMargin: 5
anchors.left: parent.left

View file

@ -18,6 +18,7 @@ import "../../windows"
Rectangle {
id: root
objectName: "EntityStatistics"
property string title: "Entity Statistics"
signal sendToScript(var message);
property bool isHMD: false
@ -40,6 +41,7 @@ Rectangle {
id: scrollView
width: parent.width
anchors.top: parent.top
anchors.topMargin: hifi.dimensions.contentMargin.y
anchors.bottom: parent.bottom
anchors.bottomMargin: hifi.dimensions.tabletMenuHeader
contentWidth: column.implicitWidth
@ -48,10 +50,15 @@ Rectangle {
Column {
id: column
anchors.margins: 10
anchors.top: parent.top
anchors.left: parent.left
anchors.right: parent.right
y: hifi.dimensions.tabletMenuHeader //-bgNavBar
anchors {
topMargin: 0
leftMargin: 10
rightMargin: 10
bottomMargin: 0
}
spacing: 20
TabletEntityStatisticsItem {

View file

@ -24,6 +24,8 @@ Item {
height: parent.height
width: parent.width
property string title: "Controls"
HifiConstants { id: hifi }
TabBar {

View file

@ -105,7 +105,6 @@ StackView {
propagateComposedEvents: true
onPressed: {
parent.forceActiveFocus();
addressBarDialog.keyboardEnabled = false;
mouse.accepted = false;
}
}
@ -223,7 +222,6 @@ StackView {
updateLocationText(text.length > 0);
}
onAccepted: {
addressBarDialog.keyboardEnabled = false;
toggleOrGo();
}
@ -378,7 +376,7 @@ StackView {
HifiControls.Keyboard {
id: keyboard
raised: parent.keyboardEnabled
raised: parent.keyboardEnabled && parent.keyboardRaised
numeric: parent.punctuationMode
anchors {
bottom: parent.bottom

View file

@ -23,6 +23,8 @@ FocusScope {
property string subMenu: ""
signal sendToScript(var message);
HifiConstants { id: hifi }
Rectangle {
id: bgNavBar
height: 90
@ -45,24 +47,22 @@ FocusScope {
anchors.topMargin: 0
anchors.top: parent.top
Image {
HiFiGlyphs {
id: menuRootIcon
width: 40
height: 40
source: "../../../icons/tablet-icons/menu-i.svg"
text: breadcrumbText.text !== "Menu" ? hifi.glyphs.backward : ""
size: 72
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
anchors.leftMargin: 15
width: breadcrumbText.text === "Menu" ? 32 : 50
visible: breadcrumbText.text !== "Menu"
MouseArea {
anchors.fill: parent
hoverEnabled: true
onEntered: iconColorOverlay.color = "#1fc6a6";
onExited: iconColorOverlay.color = "#34a2c7";
// navigate back to root level menu
onClicked: {
buildMenu();
breadcrumbText.text = "Menu";
menuPopperUpper.closeLastMenu();
tabletRoot.playButtonClickSound();
}
}
@ -79,23 +79,10 @@ FocusScope {
id: breadcrumbText
text: "Menu"
size: 26
color: "#34a2c7"
color: "#e3e3e3"
anchors.verticalCenter: parent.verticalCenter
anchors.left: menuRootIcon.right
anchors.leftMargin: 15
MouseArea {
anchors.fill: parent
hoverEnabled: true
onEntered: breadcrumbText.color = "#1fc6a6";
onExited: breadcrumbText.color = "#34a2c7";
// navigate back to parent level menu if there is one
onClicked: {
if (breadcrumbText.text !== "Menu") {
menuPopperUpper.closeLastMenu();
}
tabletRoot.playButtonClickSound();
}
}
}
}
@ -103,7 +90,6 @@ FocusScope {
menuPopperUpper.closeLastMenu();
}
function setRootMenu(rootMenu, subMenu) {
tabletMenu.subMenu = subMenu;
tabletMenu.rootMenu = rootMenu;

View file

@ -1,5 +1,5 @@
//
// MessageDialog.qml
// TabletMenuStack.qml
//
// Created by Dante Ruiz on 13 Feb 2017
// Copyright 2016 High Fidelity, Inc.
@ -66,7 +66,7 @@ Item {
function popSource() {
console.log("trying to pop page");
d.pop();
closeLastMenu();
}
function toModel(items, newMenu) {

View file

@ -41,7 +41,11 @@ Item {
section.saveAll();
}
closeDialog();
if (HMD.active) {
tablet.popFromStack();
} else {
closeDialog();
}
}
function restoreAll() {
@ -50,7 +54,11 @@ Item {
section.restoreAll();
}
closeDialog();
if (HMD.active) {
tablet.popFromStack();
} else {
closeDialog();
}
}
function closeDialog() {

View file

@ -195,6 +195,7 @@
#include "ui/SnapshotAnimated.h"
#include "ui/StandAloneJSConsole.h"
#include "ui/Stats.h"
#include "ui/AnimStats.h"
#include "ui/UpdateDialog.h"
#include "ui/overlays/Overlays.h"
#include "ui/DomainConnectionModel.h"
@ -2663,6 +2664,10 @@ Application::~Application() {
void Application::initializeGL() {
qCDebug(interfaceapp) << "Created Display Window.";
#ifdef DISABLE_QML
setAttribute(Qt::AA_DontCheckOpenGLContextThreadAffinity);
#endif
// initialize glut for shape drawing; Qt apparently initializes it on OS X
if (_isGLInitialized) {
return;
@ -3081,8 +3086,10 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
void Application::onDesktopRootItemCreated(QQuickItem* rootItem) {
Stats::show();
AnimStats::show();
auto surfaceContext = DependencyManager::get<OffscreenUi>()->getSurfaceContext();
surfaceContext->setContextProperty("Stats", Stats::getInstance());
surfaceContext->setContextProperty("AnimStats", AnimStats::getInstance());
#if !defined(Q_OS_ANDROID)
auto offscreenUi = DependencyManager::get<OffscreenUi>();
@ -4618,6 +4625,7 @@ void Application::idle() {
checkChangeCursor();
Stats::getInstance()->updateStats();
AnimStats::getInstance()->updateStats();
// Normally we check PipelineWarnings, but since idle will often take more than 10ms we only show these idle timing
// details if we're in ExtraDebugging mode. However, the ::update() and its subcomponents will show their timing
@ -5855,9 +5863,7 @@ void Application::update(float deltaTime) {
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::update()");
#if !defined(Q_OS_ANDROID)
updateLOD(deltaTime);
#endif
// TODO: break these out into distinct perfTimers when they prove interesting
{
@ -6870,6 +6876,9 @@ bool Application::askToLoadScript(const QString& scriptFilenameOrURL) {
shortName = shortName.mid(startIndex, endIndex - startIndex);
}
#ifdef DISABLE_QML
DependencyManager::get<ScriptEngines>()->loadScript(scriptFilenameOrURL);
#else
QString message = "Would you like to run this script:\n" + shortName;
ModalDialogListener* dlg = OffscreenUi::asyncQuestion(getWindow(), "Run Script", message,
QMessageBox::Yes | QMessageBox::No);
@ -6884,7 +6893,7 @@ bool Application::askToLoadScript(const QString& scriptFilenameOrURL) {
}
QObject::disconnect(dlg, &ModalDialogListener::response, this, nullptr);
});
#endif
return true;
}

View file

@ -19,8 +19,11 @@
#include <SimpleMovingAverage.h>
#include <render/Args.h>
#ifdef Q_OS_ANDROID
const float LOD_DEFAULT_QUALITY_LEVEL = 0.75f; // default quality level setting is High (lower framerate)
#else
const float LOD_DEFAULT_QUALITY_LEVEL = 0.5f; // default quality level setting is Mid
#endif
const float LOD_MAX_LIKELY_DESKTOP_FPS = 60.0f; // this is essentially, V-synch fps
const float LOD_MAX_LIKELY_HMD_FPS = 90.0f; // this is essentially, V-synch fps
const float LOD_OFFSET_FPS = 5.0f; // offset of FPS to add for computing the target framerate

View file

@ -255,7 +255,7 @@ Menu::Menu() {
connect(action, &QAction::triggered, [] {
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
auto hmd = DependencyManager::get<HMDScriptingInterface>();
tablet->loadQMLSource("hifi/tablet/ControllerSettings.qml");
tablet->pushOntoStack("hifi/tablet/ControllerSettings.qml");
if (!hmd->getShouldShowTablet()) {
hmd->toggleShouldShowTablet();
@ -737,6 +737,7 @@ Menu::Menu() {
// Developer > Stats
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::Stats);
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::AnimStats);
// Settings > Enable Speech Control API
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)

View file

@ -197,6 +197,7 @@ namespace MenuOption {
const QString SMIEyeTracking = "SMI Eye Tracking";
const QString SparseTextureManagement = "Enable Sparse Texture Management";
const QString Stats = "Show Statistics";
const QString AnimStats = "Show Animation Stats";
const QString StopAllScripts = "Stop All Scripts";
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
const QString ThirdPerson = "Third Person";

View file

@ -516,6 +516,10 @@ void MyAvatar::update(float deltaTime) {
head->relax(deltaTime);
updateFromTrackers(deltaTime);
if (getIsInWalkingState() && glm::length(getControllerPoseInAvatarFrame(controller::Action::HEAD).getVelocity()) < DEFAULT_AVATAR_WALK_SPEED_THRESHOLD) {
setIsInWalkingState(false);
}
// Get audio loudness data from audio input device
// Also get the AudioClient so we can update the avatar bounding box data
// on the AudioClient side.
@ -3678,10 +3682,10 @@ static bool headAngularVelocityBelowThreshold(const controller::Pose& head) {
return isBelowThreshold;
}
static bool isWithinThresholdHeightMode(const controller::Pose& head,const float& newMode) {
static bool isWithinThresholdHeightMode(const controller::Pose& head, const float& newMode, const float& scale) {
bool isWithinThreshold = true;
if (head.isValid()) {
isWithinThreshold = (head.getTranslation().y - newMode) > DEFAULT_AVATAR_MODE_HEIGHT_STEPPING_THRESHOLD;
isWithinThreshold = (head.getTranslation().y - newMode) > (DEFAULT_AVATAR_MODE_HEIGHT_STEPPING_THRESHOLD * scale);
}
return isWithinThreshold;
}
@ -3802,6 +3806,10 @@ float MyAvatar::getUserEyeHeight() const {
return userHeight - userHeight * ratio;
}
bool MyAvatar::getIsInWalkingState() const {
return _isInWalkingState;
}
float MyAvatar::getWalkSpeed() const {
return _walkSpeed.get() * _walkSpeedScalar;
}
@ -3818,6 +3826,10 @@ void MyAvatar::setSprintMode(bool sprint) {
_walkSpeedScalar = sprint ? _sprintSpeed.get() : AVATAR_WALK_SPEED_SCALAR;
}
void MyAvatar::setIsInWalkingState(bool isWalking) {
_isInWalkingState = isWalking;
}
void MyAvatar::setWalkSpeed(float value) {
_walkSpeed.set(value);
}
@ -3912,7 +3924,6 @@ void MyAvatar::lateUpdatePalms() {
Avatar::updatePalms();
}
static const float FOLLOW_TIME = 0.5f;
MyAvatar::FollowHelper::FollowHelper() {
@ -4004,24 +4015,36 @@ bool MyAvatar::FollowHelper::shouldActivateHorizontalCG(MyAvatar& myAvatar) cons
controller::Pose currentRightHandPose = myAvatar.getControllerPoseInAvatarFrame(controller::Action::RIGHT_HAND);
bool stepDetected = false;
if (!withinBaseOfSupport(currentHeadPose) &&
float myScale = myAvatar.getAvatarScale();
if (myAvatar.getIsInWalkingState()) {
stepDetected = true;
} else {
if (!withinBaseOfSupport(currentHeadPose) &&
headAngularVelocityBelowThreshold(currentHeadPose) &&
isWithinThresholdHeightMode(currentHeadPose, myAvatar.getCurrentStandingHeight()) &&
isWithinThresholdHeightMode(currentHeadPose, myAvatar.getCurrentStandingHeight(), myScale) &&
handDirectionMatchesHeadDirection(currentLeftHandPose, currentRightHandPose, currentHeadPose) &&
handAngularVelocityBelowThreshold(currentLeftHandPose, currentRightHandPose) &&
headVelocityGreaterThanThreshold(currentHeadPose) &&
isHeadLevel(currentHeadPose, myAvatar.getAverageHeadRotation())) {
// a step is detected
stepDetected = true;
} else {
glm::vec3 defaultHipsPosition = myAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(myAvatar.getJointIndex("Hips"));
glm::vec3 defaultHeadPosition = myAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(myAvatar.getJointIndex("Head"));
glm::vec3 currentHeadPosition = currentHeadPose.getTranslation();
float anatomicalHeadToHipsDistance = glm::length(defaultHeadPosition - defaultHipsPosition);
if (!isActive(Horizontal) &&
(glm::length(currentHeadPosition - defaultHipsPosition) > (anatomicalHeadToHipsDistance + (DEFAULT_AVATAR_SPINE_STRETCH_LIMIT * anatomicalHeadToHipsDistance)))) {
myAvatar.setResetMode(true);
// a step is detected
stepDetected = true;
if (glm::length(currentHeadPose.velocity) > DEFAULT_AVATAR_WALK_SPEED_THRESHOLD) {
myAvatar.setIsInWalkingState(true);
}
} else {
glm::vec3 defaultHipsPosition = myAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(myAvatar.getJointIndex("Hips"));
glm::vec3 defaultHeadPosition = myAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(myAvatar.getJointIndex("Head"));
glm::vec3 currentHeadPosition = currentHeadPose.getTranslation();
float anatomicalHeadToHipsDistance = glm::length(defaultHeadPosition - defaultHipsPosition);
if (!isActive(Horizontal) &&
(glm::length(currentHeadPosition - defaultHipsPosition) > (anatomicalHeadToHipsDistance + (DEFAULT_AVATAR_SPINE_STRETCH_LIMIT * anatomicalHeadToHipsDistance)))) {
myAvatar.setResetMode(true);
stepDetected = true;
if (glm::length(currentHeadPose.velocity) > DEFAULT_AVATAR_WALK_SPEED_THRESHOLD) {
myAvatar.setIsInWalkingState(true);
}
}
}
}
return stepDetected;

View file

@ -1086,6 +1086,8 @@ public:
const QUuid& getSelfID() const { return AVATAR_SELF_ID; }
void setIsInWalkingState(bool isWalking);
bool getIsInWalkingState() const;
void setWalkSpeed(float value);
float getWalkSpeed() const;
void setWalkBackwardSpeed(float value);
@ -1788,6 +1790,7 @@ private:
ThreadSafeValueCache<float> _walkBackwardSpeed { DEFAULT_AVATAR_MAX_WALKING_BACKWARD_SPEED };
ThreadSafeValueCache<float> _sprintSpeed { AVATAR_SPRINT_SPEED_SCALAR };
float _walkSpeedScalar { AVATAR_WALK_SPEED_SCALAR };
bool _isInWalkingState { false };
// load avatar scripts once when rig is ready
bool _shouldLoadScripts { false };

View file

@ -16,8 +16,9 @@ Transform MyAvatarHeadTransformNode::getTransform() {
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::vec3 pos = myAvatar->getHeadPosition();
glm::vec3 scale = glm::vec3(myAvatar->scaleForChildren());
glm::quat headOri = myAvatar->getHeadOrientation();
glm::quat ori = headOri * glm::angleAxis(-PI / 2.0f, Vectors::RIGHT);
return Transform(ori, glm::vec3(1.0f), pos);
return Transform(ori, scale, pos);
}

View file

@ -46,7 +46,7 @@ static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
}
glm::mat4 hipsMat;
if (myAvatar->getCenterOfGravityModelEnabled() && !isFlying) {
if (myAvatar->getCenterOfGravityModelEnabled() && !isFlying && !(myAvatar->getIsInWalkingState())) {
// then we use center of gravity model
hipsMat = myAvatar->deriveBodyUsingCgModel();
} else {

View file

@ -75,7 +75,6 @@ void SafeLanding::addTrackedEntity(const EntityItemID& entityID) {
if (hasAABox && downloadedCollisionTypes.count(modelEntity->getShapeType()) != 0) {
// Only track entities with downloaded collision bodies.
_trackedEntities.emplace(entityID, entity);
qCDebug(interfaceapp) << "Safe Landing: Tracking entity " << entity->getItemName();
}
}
}
@ -110,7 +109,6 @@ bool SafeLanding::isLoadSequenceComplete() {
_initialEnd = INVALID_SEQUENCE;
_entityTree = nullptr;
EntityTreeRenderer::setEntityLoadingPriorityFunction(StandardPriority);
qCDebug(interfaceapp) << "Safe Landing: load sequence complete";
}
return !_trackingEntities;

View file

@ -86,23 +86,23 @@ bool CollisionPick::isLoaded() const {
return !_mathPick.shouldComputeShapeInfo() || (_cachedResource && _cachedResource->isLoaded());
}
bool CollisionPick::getShapeInfoReady() {
bool CollisionPick::getShapeInfoReady(const CollisionRegion& pick) {
if (_mathPick.shouldComputeShapeInfo()) {
if (_cachedResource && _cachedResource->isLoaded()) {
computeShapeInfo(_mathPick, *_mathPick.shapeInfo, _cachedResource);
computeShapeInfo(pick, *_mathPick.shapeInfo, _cachedResource);
_mathPick.loaded = true;
} else {
_mathPick.loaded = false;
}
} else {
computeShapeInfoDimensionsOnly(_mathPick, *_mathPick.shapeInfo, _cachedResource);
computeShapeInfoDimensionsOnly(pick, *_mathPick.shapeInfo, _cachedResource);
_mathPick.loaded = true;
}
return _mathPick.loaded;
}
void CollisionPick::computeShapeInfoDimensionsOnly(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
void CollisionPick::computeShapeInfoDimensionsOnly(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
ShapeType type = shapeInfo.getType();
glm::vec3 dimensions = pick.transform.getScale();
QString modelURL = (resource ? resource->getURL().toString() : "");
@ -115,7 +115,7 @@ void CollisionPick::computeShapeInfoDimensionsOnly(CollisionRegion& pick, ShapeI
}
}
void CollisionPick::computeShapeInfo(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
// This code was copied and modified from RenderableModelEntityItem::computeShapeInfo
// TODO: Move to some shared code area (in entities-renderer? model-networking?)
// after we verify this is working and do a diff comparison with RenderableModelEntityItem::computeShapeInfo
@ -357,12 +357,14 @@ CollisionPick::CollisionPick(const PickFilter& filter, float maxDistance, bool e
CollisionRegion CollisionPick::getMathematicalPick() const {
CollisionRegion mathPick = _mathPick;
mathPick.loaded = isLoaded();
if (!parentTransform) {
return mathPick;
} else {
mathPick.transform = parentTransform->getTransform().worldTransform(mathPick.transform);
return mathPick;
if (parentTransform) {
Transform parentTransformValue = parentTransform->getTransform();
mathPick.transform = parentTransformValue.worldTransform(mathPick.transform);
glm::vec3 scale = parentTransformValue.getScale();
float largestDimension = glm::max(glm::max(scale.x, scale.y), scale.z);
mathPick.threshold *= largestDimension;
}
return mathPick;
}
void CollisionPick::filterIntersections(std::vector<ContactTestResult>& intersections) const {
@ -393,9 +395,9 @@ PickResultPointer CollisionPick::getEntityIntersection(const CollisionRegion& pi
// Cannot compute result
return std::make_shared<CollisionPickResult>(pick.toVariantMap(), std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
}
getShapeInfoReady();
getShapeInfoReady(pick);
auto entityIntersections = _physicsEngine->contactTest(USER_COLLISION_MASK_ENTITIES, *pick.shapeInfo, pick.transform, USER_COLLISION_GROUP_DYNAMIC, pick.threshold);
auto entityIntersections = _physicsEngine->contactTest(USER_COLLISION_MASK_ENTITIES, *_mathPick.shapeInfo, pick.transform, USER_COLLISION_GROUP_DYNAMIC, pick.threshold);
filterIntersections(entityIntersections);
return std::make_shared<CollisionPickResult>(pick, entityIntersections, std::vector<ContactTestResult>());
}
@ -409,9 +411,9 @@ PickResultPointer CollisionPick::getAvatarIntersection(const CollisionRegion& pi
// Cannot compute result
return std::make_shared<CollisionPickResult>(pick, std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
}
getShapeInfoReady();
getShapeInfoReady(pick);
auto avatarIntersections = _physicsEngine->contactTest(USER_COLLISION_MASK_AVATARS, *pick.shapeInfo, pick.transform, USER_COLLISION_GROUP_DYNAMIC, pick.threshold);
auto avatarIntersections = _physicsEngine->contactTest(USER_COLLISION_MASK_AVATARS, *_mathPick.shapeInfo, pick.transform, USER_COLLISION_GROUP_DYNAMIC, pick.threshold);
filterIntersections(avatarIntersections);
return std::make_shared<CollisionPickResult>(pick, std::vector<ContactTestResult>(), avatarIntersections);
}

View file

@ -62,9 +62,9 @@ protected:
// Returns true if the resource for _mathPick.shapeInfo is loaded or if a resource is not needed.
bool isLoaded() const;
// Returns true if _mathPick.shapeInfo is valid. Otherwise, attempts to get the _mathPick ready for use.
bool getShapeInfoReady();
void computeShapeInfo(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
void computeShapeInfoDimensionsOnly(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
bool getShapeInfoReady(const CollisionRegion& pick);
void computeShapeInfo(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
void computeShapeInfoDimensionsOnly(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
void filterIntersections(std::vector<ContactTestResult>& intersections) const;
CollisionRegion _mathPick;

View file

@ -24,11 +24,14 @@
#include "CollisionPick.h"
#include "SpatialParentFinder.h"
#include "NestableTransformNode.h"
#include "PickTransformNode.h"
#include "MouseTransformNode.h"
#include "avatar/MyAvatarHeadTransformNode.h"
#include "avatar/AvatarManager.h"
#include "NestableTransformNode.h"
#include "avatars-renderer/AvatarTransformNode.h"
#include "ui/overlays/OverlayTransformNode.h"
#include "EntityTransformNode.h"
#include <ScriptEngine.h>
@ -260,9 +263,16 @@ unsigned int PickScriptingInterface::createParabolaPick(const QVariant& properti
* A set of properties that can be passed to {@link Picks.createPick} to create a new Collision Pick.
* @typedef {object} Picks.CollisionPickProperties
* @property {Shape} shape - The information about the collision region's size and shape.
* @property {Vec3} position - The position of the collision region.
* @property {Quat} orientation - The orientation of the collision region.
* @property {boolean} [enabled=false] If this Pick should start enabled or not. Disabled Picks do not updated their pick results.
* @property {number} [filter=Picks.PICK_NOTHING] The filter for this Pick to use, constructed using filter flags combined using bitwise OR.
* @property {Shape} shape - The information about the collision region's size and shape. Dimensions are in world space, but will scale with the parent if defined.
* @property {Vec3} position - The position of the collision region, relative to a parent if defined.
* @property {Quat} orientation - The orientation of the collision region, relative to a parent if defined.
* @property {float} threshold - The approximate minimum penetration depth for a test object to be considered in contact with the collision region.
* The depth is measured in world space, but will scale with the parent if defined.
* @property {Uuid} parentID - The ID of the parent, either an avatar, an entity, or an overlay.
* @property {number} parentJointIndex - The joint of the parent to parent to, for example, the joints on the model of an avatar. (default = 0, no joint)
* @property {string} joint - If "Mouse," parents the pick to the mouse. If "Avatar," parents the pick to MyAvatar's head. Otherwise, parents to the joint of the given name on MyAvatar.
*/
unsigned int PickScriptingInterface::createCollisionPick(const QVariant& properties) {
QVariantMap propMap = properties.toMap();
@ -375,7 +385,16 @@ std::shared_ptr<TransformNode> PickScriptingInterface::createTransformNode(const
}
auto sharedNestablePointer = nestablePointer.lock();
if (success && sharedNestablePointer) {
return std::make_shared<NestableTransformNode>(nestablePointer, parentJointIndex);
NestableType nestableType = sharedNestablePointer->getNestableType();
if (nestableType == NestableType::Avatar) {
return std::make_shared<AvatarTransformNode>(std::static_pointer_cast<Avatar>(sharedNestablePointer), parentJointIndex);
} else if (nestableType == NestableType::Overlay) {
return std::make_shared<OverlayTransformNode>(std::static_pointer_cast<Base3DOverlay>(sharedNestablePointer), parentJointIndex);
} else if (nestableType == NestableType::Entity) {
return std::make_shared<EntityTransformNode>(std::static_pointer_cast<EntityItem>(sharedNestablePointer), parentJointIndex);
} else {
return std::make_shared<NestableTransformNode>(nestablePointer, parentJointIndex);
}
}
}
@ -394,7 +413,7 @@ std::shared_ptr<TransformNode> PickScriptingInterface::createTransformNode(const
} else if (!joint.isNull()) {
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
int jointIndex = myAvatar->getJointIndex(joint);
return std::make_shared<NestableTransformNode>(myAvatar, jointIndex);
return std::make_shared<AvatarTransformNode>(myAvatar, jointIndex);
}
}

View file

@ -0,0 +1,141 @@
//
// Created by Anthony J. Thibault 2018/08/06
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AnimStats.h"
#include <avatar/AvatarManager.h>
#include <OffscreenUi.h>
#include "Menu.h"
HIFI_QML_DEF(AnimStats)
static AnimStats* INSTANCE{ nullptr };
AnimStats* AnimStats::getInstance() {
Q_ASSERT(INSTANCE);
return INSTANCE;
}
AnimStats::AnimStats(QQuickItem* parent) : QQuickItem(parent) {
INSTANCE = this;
}
void AnimStats::updateStats(bool force) {
QQuickItem* parent = parentItem();
if (!force) {
if (!Menu::getInstance()->isOptionChecked(MenuOption::AnimStats)) {
if (parent->isVisible()) {
parent->setVisible(false);
}
return;
} else if (!parent->isVisible()) {
parent->setVisible(true);
}
}
auto avatarManager = DependencyManager::get<AvatarManager>();
auto myAvatar = avatarManager->getMyAvatar();
auto debugAlphaMap = myAvatar->getSkeletonModel()->getRig().getDebugAlphaMap();
// update animation debug alpha values
QStringList newAnimAlphaValues;
qint64 now = usecTimestampNow();
for (auto& iter : debugAlphaMap) {
QString key = iter.first;
float alpha = std::get<0>(iter.second);
auto prevIter = _prevDebugAlphaMap.find(key);
if (prevIter != _prevDebugAlphaMap.end()) {
float prevAlpha = std::get<0>(iter.second);
if (prevAlpha != alpha) {
// change detected: reset timer
_animAlphaValueChangedTimers[key] = now;
}
} else {
// new value: start timer
_animAlphaValueChangedTimers[key] = now;
}
AnimNodeType type = std::get<1>(iter.second);
if (type == AnimNodeType::Clip) {
// figure out the grayScale color of this line.
const float LIT_TIME = 2.0f;
const float FADE_OUT_TIME = 1.0f;
float grayScale = 0.0f;
float secondsElapsed = (float)(now - _animAlphaValueChangedTimers[key]) / (float)USECS_PER_SECOND;
if (secondsElapsed < LIT_TIME) {
grayScale = 1.0f;
} else if (secondsElapsed < LIT_TIME + FADE_OUT_TIME) {
grayScale = (FADE_OUT_TIME - (secondsElapsed - LIT_TIME)) / FADE_OUT_TIME;
} else {
grayScale = 0.0f;
}
if (grayScale > 0.0f) {
// append grayScaleColor to start of debug string
newAnimAlphaValues << QString::number(grayScale, 'f', 2) + "|" + key + ": " + QString::number(alpha, 'f', 3);
}
}
}
_animAlphaValues = newAnimAlphaValues;
_prevDebugAlphaMap = debugAlphaMap;
emit animAlphaValuesChanged();
// update animation anim vars
_animVarsList.clear();
auto animVars = myAvatar->getSkeletonModel()->getRig().getAnimVars().toDebugMap();
for (auto& iter : animVars) {
QString key = iter.first;
QString value = iter.second;
auto prevIter = _prevAnimVars.find(key);
if (prevIter != _prevAnimVars.end()) {
QString prevValue = prevIter->second;
if (value != prevValue) {
// change detected: reset timer
_animVarChangedTimers[key] = now;
}
} else {
// new value: start timer
_animVarChangedTimers[key] = now;
}
// figure out the grayScale color of this line.
const float LIT_TIME = 2.0f;
const float FADE_OUT_TIME = 0.5f;
float grayScale = 0.0f;
float secondsElapsed = (float)(now - _animVarChangedTimers[key]) / (float)USECS_PER_SECOND;
if (secondsElapsed < LIT_TIME) {
grayScale = 1.0f;
} else if (secondsElapsed < LIT_TIME + FADE_OUT_TIME) {
grayScale = (FADE_OUT_TIME - (secondsElapsed - LIT_TIME)) / FADE_OUT_TIME;
} else {
grayScale = 0.0f;
}
if (grayScale > 0.0f) {
// append grayScaleColor to start of debug string
_animVarsList << QString::number(grayScale, 'f', 2) + "|" + key + ": " + value;
}
}
_prevAnimVars = animVars;
emit animVarsChanged();
// animation state machines
_animStateMachines.clear();
auto stateMachineMap = myAvatar->getSkeletonModel()->getRig().getStateMachineMap();
for (auto& iter : stateMachineMap) {
_animStateMachines << iter.second;
}
emit animStateMachinesChanged();
}

View file

@ -0,0 +1,55 @@
//
// Created by Anthony J. Thibault 2018/08/06
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AnimStats_h
#define hifi_AnimStats_h
#include <OffscreenQmlElement.h>
#include <AnimContext.h>
class AnimStats : public QQuickItem {
Q_OBJECT
HIFI_QML_DECL
Q_PROPERTY(QStringList animAlphaValues READ animAlphaValues NOTIFY animAlphaValuesChanged)
Q_PROPERTY(QStringList animVars READ animVars NOTIFY animVarsChanged)
Q_PROPERTY(QStringList animStateMachines READ animStateMachines NOTIFY animStateMachinesChanged)
public:
static AnimStats* getInstance();
AnimStats(QQuickItem* parent = nullptr);
void updateStats(bool force = false);
QStringList animAlphaValues() { return _animAlphaValues; }
QStringList animVars() { return _animVarsList; }
QStringList animStateMachines() { return _animStateMachines; }
public slots:
void forceUpdateStats() { updateStats(true); }
signals:
void animAlphaValuesChanged();
void animVarsChanged();
void animStateMachinesChanged();
private:
QStringList _animAlphaValues;
AnimContext::DebugAlphaMap _prevDebugAlphaMap; // alpha values from previous frame
std::map<QString, qint64> _animAlphaValueChangedTimers; // last time alpha value has changed
QStringList _animVarsList;
std::map<QString, QString> _prevAnimVars; // anim vars from previous frame
std::map<QString, qint64> _animVarChangedTimers; // last time animVar value has changed.
QStringList _animStateMachines;
};
#endif // hifi_AnimStats_h

View file

@ -207,14 +207,6 @@ void Stats::updateStats(bool force) {
// Third column, avatar stats
auto myAvatar = avatarManager->getMyAvatar();
auto animStack = myAvatar->getSkeletonModel()->getRig().getAnimStack();
_animStackNames.clear();
for (auto animStackIterator = animStack.begin(); animStackIterator != animStack.end(); ++animStackIterator) {
_animStackNames << animStackIterator->first + ": " + QString::number(animStackIterator->second,'f',3);
}
emit animStackNamesChanged();
glm::vec3 avatarPos = myAvatar->getWorldPosition();
STAT_UPDATE(position, QVector3D(avatarPos.x, avatarPos.y, avatarPos.z));
STAT_UPDATE_FLOAT(speed, glm::length(myAvatar->getWorldVelocity()), 0.01f);

View file

@ -134,7 +134,6 @@ private: \
* @property {number} batchFrameTime - <em>Read-only.</em>
* @property {number} engineFrameTime - <em>Read-only.</em>
* @property {number} avatarSimulationTime - <em>Read-only.</em>
* @property {string[]} animStackNames - <em>Read-only.</em>
*
*
* @property {number} x
@ -292,7 +291,6 @@ class Stats : public QQuickItem {
STATS_PROPERTY(float, batchFrameTime, 0)
STATS_PROPERTY(float, engineFrameTime, 0)
STATS_PROPERTY(float, avatarSimulationTime, 0)
Q_PROPERTY(QStringList animStackNames READ animStackNames NOTIFY animStackNamesChanged)
STATS_PROPERTY(int, stylusPicksCount, 0)
STATS_PROPERTY(int, rayPicksCount, 0)
@ -326,7 +324,6 @@ public:
}
QStringList downloadUrls () { return _downloadUrls; }
QStringList animStackNames() { return _animStackNames; }
public slots:
void forceUpdateStats() { updateStats(true); }
@ -1028,13 +1025,6 @@ signals:
*/
void avatarSimulationTimeChanged();
/**jsdoc
* Triggered when the value of the <code>animStackNames</code> property changes.
* @function Stats.animStackNamesChanged
* @returns {Signal}
*/
void animStackNamesChanged();
/**jsdoc
* Triggered when the value of the <code>rectifiedTextureCount</code> property changes.
* @function Stats.rectifiedTextureCountChanged
@ -1049,7 +1039,6 @@ signals:
*/
void decimatedTextureCountChanged();
// QQuickItem signals.
/**jsdoc
@ -1336,7 +1325,6 @@ private:
QString _monospaceFont;
const AudioIOStats* _audioStats;
QStringList _downloadUrls = QStringList();
QStringList _animStackNames = QStringList();
};
#endif // hifi_Stats_h

View file

@ -238,7 +238,9 @@ void Base3DOverlay::setProperties(const QVariantMap& originalProperties) {
*/
QVariant Base3DOverlay::getProperty(const QString& property) {
if (property == "name") {
return _name;
return _nameLock.resultWithReadLock<QString>([&] {
return _name;
});
}
if (property == "position" || property == "start" || property == "p1" || property == "point") {
return vec3toVariant(getWorldPosition());
@ -346,6 +348,20 @@ void Base3DOverlay::setVisible(bool visible) {
notifyRenderVariableChange();
}
QString Base3DOverlay::getName() const {
return _nameLock.resultWithReadLock<QString>([&] {
return QString("Overlay:") + _name;
});
}
void Base3DOverlay::setName(QString name) {
_nameLock.withWriteLock([&] {
_name = name;
});
}
render::ItemKey Base3DOverlay::getKey() {
auto builder = render::ItemKey::Builder(Overlay::getKey());
@ -364,4 +380,4 @@ render::ItemKey Base3DOverlay::getKey() {
}
return builder.build();
}
}

View file

@ -29,8 +29,8 @@ public:
virtual OverlayID getOverlayID() const override { return OverlayID(getID().toString()); }
void setOverlayID(OverlayID overlayID) override { setID(overlayID); }
virtual QString getName() const override { return QString("Overlay:") + _name; }
void setName(QString name) { _name = name; }
virtual QString getName() const override;
void setName(QString name);
// getters
virtual bool is3D() const override { return true; }
@ -107,6 +107,7 @@ protected:
mutable bool _renderVariableDirty { true };
QString _name;
mutable ReadWriteLockable _nameLock;
};
#endif // hifi_Base3DOverlay_h

View file

@ -0,0 +1,13 @@
//
// Created by Sabrina Shanman 9/5/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OverlayTransformNode.h"
template<>
glm::vec3 BaseNestableTransformNode<Base3DOverlay>::getActualScale(const std::shared_ptr<Base3DOverlay>& nestablePointer) const {
return nestablePointer->getBounds().getScale();
}

View file

@ -0,0 +1,21 @@
//
// Created by Sabrina Shanman 9/5/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_OverlayTransformNode_h
#define hifi_OverlayTransformNode_h
#include "NestableTransformNode.h"
#include "Base3DOverlay.h"
// For 3D overlays only
class OverlayTransformNode : public BaseNestableTransformNode<Base3DOverlay> {
public:
OverlayTransformNode(std::weak_ptr<Base3DOverlay> spatiallyNestable, int jointIndex) : BaseNestableTransformNode(spatiallyNestable, jointIndex) {};
};
#endif // hifi_OverlayTransformNode_h

View file

@ -27,7 +27,7 @@ AnimBlendLinear::~AnimBlendLinear() {
const AnimPoseVec& AnimBlendLinear::evaluate(const AnimVariantMap& animVars, const AnimContext& context, float dt, AnimVariantMap& triggersOut) {
_alpha = animVars.lookup(_alphaVar, _alpha);
float parentAlpha = _animStack[_id];
float parentDebugAlpha = context.getDebugAlpha(_id);
if (_children.size() == 0) {
for (auto&& pose : _poses) {
@ -35,7 +35,7 @@ const AnimPoseVec& AnimBlendLinear::evaluate(const AnimVariantMap& animVars, con
}
} else if (_children.size() == 1) {
_poses = _children[0]->evaluate(animVars, context, dt, triggersOut);
_animStack[_children[0]->getID()] = parentAlpha;
context.setDebugAlpha(_children[0]->getID(), parentDebugAlpha, _children[0]->getType());
} else {
float clampedAlpha = glm::clamp(_alpha, 0.0f, (float)(_children.size() - 1));
size_t prevPoseIndex = glm::floor(clampedAlpha);
@ -48,12 +48,12 @@ const AnimPoseVec& AnimBlendLinear::evaluate(const AnimVariantMap& animVars, con
float weight2 = 0.0f;
if (prevPoseIndex == nextPoseIndex) {
weight2 = 1.0f;
_animStack[_children[nextPoseIndex]->getID()] = weight2 * parentAlpha;
context.setDebugAlpha(_children[nextPoseIndex]->getID(), weight2 * parentDebugAlpha, _children[nextPoseIndex]->getType());
} else {
weight2 = alpha;
weight1 = 1.0f - weight2;
_animStack[_children[prevPoseIndex]->getID()] = weight1 * parentAlpha;
_animStack[_children[nextPoseIndex]->getID()] = weight2 * parentAlpha;
context.setDebugAlpha(_children[prevPoseIndex]->getID(), weight1 * parentDebugAlpha, _children[prevPoseIndex]->getType());
context.setDebugAlpha(_children[nextPoseIndex]->getID(), weight2 * parentDebugAlpha, _children[nextPoseIndex]->getType());
}
}
processOutputJoints(triggersOut);

View file

@ -62,9 +62,7 @@ const AnimPoseVec& AnimBlendLinearMove::evaluate(const AnimVariantMap& animVars,
speed = animVars.lookup("moveForwardSpeed", speed);
}
_alpha = calculateAlpha(speed, _characteristicSpeeds);
float parentAlpha = _animStack[_id];
_animStack["speed"] = speed;
float parentDebugAlpha = context.getDebugAlpha(_id);
if (_children.size() == 0) {
for (auto&& pose : _poses) {
@ -77,7 +75,7 @@ const AnimPoseVec& AnimBlendLinearMove::evaluate(const AnimVariantMap& animVars,
float prevDeltaTime, nextDeltaTime;
setFrameAndPhase(dt, alpha, prevPoseIndex, nextPoseIndex, &prevDeltaTime, &nextDeltaTime, triggersOut);
evaluateAndBlendChildren(animVars, context, triggersOut, alpha, prevPoseIndex, nextPoseIndex, prevDeltaTime, nextDeltaTime);
_animStack[_children[0]->getID()] = parentAlpha;
context.setDebugAlpha(_children[0]->getID(), parentDebugAlpha, _children[0]->getType());
} else {
auto clampedAlpha = glm::clamp(_alpha, 0.0f, (float)(_children.size() - 1));
auto prevPoseIndex = glm::floor(clampedAlpha);
@ -87,17 +85,11 @@ const AnimPoseVec& AnimBlendLinearMove::evaluate(const AnimVariantMap& animVars,
setFrameAndPhase(dt, alpha, prevPoseIndex, nextPoseIndex, &prevDeltaTime, &nextDeltaTime, triggersOut);
evaluateAndBlendChildren(animVars, context, triggersOut, alpha, prevPoseIndex, nextPoseIndex, prevDeltaTime, nextDeltaTime);
// weights are for animation stack debug purposes only.
float weight1 = 0.0f;
float weight2 = 0.0f;
if (prevPoseIndex == nextPoseIndex) {
weight2 = 1.0f;
_animStack[_children[nextPoseIndex]->getID()] = weight2 * parentAlpha;
context.setDebugAlpha(_children[nextPoseIndex]->getID(), parentDebugAlpha, _children[nextPoseIndex]->getType());
} else {
weight2 = alpha;
weight1 = 1.0f - weight2;
_animStack[_children[prevPoseIndex]->getID()] = weight1 * parentAlpha;
_animStack[_children[nextPoseIndex]->getID()] = weight2 * parentAlpha;
context.setDebugAlpha(_children[prevPoseIndex]->getID(), (1.0f - alpha) * parentDebugAlpha, _children[prevPoseIndex]->getType());
context.setDebugAlpha(_children[nextPoseIndex]->getID(), alpha * parentDebugAlpha, _children[nextPoseIndex]->getType());
}
}

View file

@ -14,8 +14,27 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <QString>
#include <QStringList>
#include <map>
enum class AnimNodeType {
Clip = 0,
BlendLinear,
BlendLinearMove,
Overlay,
StateMachine,
Manipulator,
InverseKinematics,
DefaultPose,
TwoBoneIK,
PoleVectorConstraint,
NumTypes
};
class AnimContext {
public:
AnimContext() {}
AnimContext(bool enableDebugDrawIKTargets, bool enableDebugDrawIKConstraints, bool enableDebugDrawIKChains,
const glm::mat4& geometryToRigMatrix, const glm::mat4& rigToWorldMatrix);
@ -25,6 +44,39 @@ public:
const glm::mat4& getGeometryToRigMatrix() const { return _geometryToRigMatrix; }
const glm::mat4& getRigToWorldMatrix() const { return _rigToWorldMatrix; }
float getDebugAlpha(const QString& key) const {
auto it = _debugAlphaMap.find(key);
if (it != _debugAlphaMap.end()) {
return std::get<0>(it->second);
} else {
return 1.0f;
}
}
using DebugAlphaMapValue = std::tuple<float, AnimNodeType>;
using DebugAlphaMap = std::map<QString, DebugAlphaMapValue>;
void setDebugAlpha(const QString& key, float alpha, AnimNodeType type) const {
_debugAlphaMap[key] = DebugAlphaMapValue(alpha, type);
}
const DebugAlphaMap& getDebugAlphaMap() const {
return _debugAlphaMap;
}
using DebugStateMachineMapValue = QString;
using DebugStateMachineMap = std::map<QString, DebugStateMachineMapValue>;
void addStateMachineInfo(const QString& stateMachineName, const QString& currentState, const QString& previousState, bool duringInterp, float alpha) const {
if (duringInterp) {
_stateMachineMap[stateMachineName] = QString("%1: %2 -> %3 (%4)").arg(stateMachineName).arg(previousState).arg(currentState).arg(QString::number(alpha, 'f', 2));
} else {
_stateMachineMap[stateMachineName] = QString("%1: %2").arg(stateMachineName).arg(currentState);
}
}
const DebugStateMachineMap& getStateMachineMap() const { return _stateMachineMap; }
protected:
bool _enableDebugDrawIKTargets { false };
@ -32,6 +84,10 @@ protected:
bool _enableDebugDrawIKChains { false };
glm::mat4 _geometryToRigMatrix;
glm::mat4 _rigToWorldMatrix;
// used for debugging internal state of animation system.
mutable DebugAlphaMap _debugAlphaMap;
mutable DebugStateMachineMap _stateMachineMap;
};
#endif // hifi_AnimContext_h

View file

@ -12,10 +12,6 @@
#include <QtGlobal>
std::map<QString, float> AnimNode::_animStack = {
{"none", 0.0f}
};
AnimNode::Pointer AnimNode::getParent() {
return _parent.lock();
}

View file

@ -36,19 +36,7 @@ class QJsonObject;
class AnimNode : public std::enable_shared_from_this<AnimNode> {
public:
enum class Type {
Clip = 0,
BlendLinear,
BlendLinearMove,
Overlay,
StateMachine,
Manipulator,
InverseKinematics,
DefaultPose,
TwoBoneIK,
PoleVectorConstraint,
NumTypes
};
using Type = AnimNodeType;
using Pointer = std::shared_ptr<AnimNode>;
using ConstPointer = std::shared_ptr<const AnimNode>;
@ -84,7 +72,6 @@ public:
}
void setCurrentFrame(float frame);
const std::map<QString, float> getAnimStack() { return _animStack; }
template <typename F>
bool traverse(F func) {
@ -127,9 +114,6 @@ protected:
std::weak_ptr<AnimNode> _parent;
std::vector<QString> _outputJointNames;
// global available to Stats.h
static std::map<QString, float> _animStack;
// no copies
AnimNode(const AnimNode&) = delete;
AnimNode& operator=(const AnimNode&) = delete;

View file

@ -23,9 +23,7 @@ AnimStateMachine::~AnimStateMachine() {
const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, const AnimContext& context, float dt, AnimVariantMap& triggersOut) {
if (_id.contains("userAnimStateMachine")) {
_animStack.clear();
}
float parentDebugAlpha = context.getDebugAlpha(_id);
QString desiredStateID = animVars.lookup(_currentStateVar, _currentState->getID());
if (_currentState->getID() != desiredStateID) {
@ -33,8 +31,6 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
bool foundState = false;
for (auto& state : _states) {
if (state->getID() == desiredStateID) {
// parenthesis means previous state, which is a snapshot.
_previousStateID = "(" + _currentState->getID() + ")";
switchState(animVars, context, state);
foundState = true;
break;
@ -48,8 +44,6 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
// evaluate currentState transitions
auto desiredState = evaluateTransitions(animVars);
if (desiredState != _currentState) {
// parenthesis means previous state, which is a snapshot.
_previousStateID = "(" + _currentState->getID() + ")";
switchState(animVars, context, desiredState);
}
@ -57,17 +51,8 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
auto currentStateNode = _children[_currentState->getChildIndex()];
assert(currentStateNode);
if (!_previousStateID.contains("none")) {
_animStack[_previousStateID] = 1.0f - _alpha;
}
if (_duringInterp) {
_alpha += _alphaVel * dt;
if (_alpha > 1.0f) {
_animStack[_currentState->getID()] = 1.0f;
} else {
_animStack[_currentState->getID()] = _alpha;
}
if (_alpha < 1.0f) {
AnimPoseVec* nextPoses = nullptr;
AnimPoseVec* prevPoses = nullptr;
@ -88,26 +73,27 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
if (_poses.size() > 0 && nextPoses && prevPoses && nextPoses->size() > 0 && prevPoses->size() > 0) {
::blend(_poses.size(), &(prevPoses->at(0)), &(nextPoses->at(0)), _alpha, &_poses[0]);
}
context.setDebugAlpha(_currentState->getID(), _alpha * parentDebugAlpha, _children[_currentState->getChildIndex()]->getType());
} else {
_duringInterp = false;
if (_animStack.count(_previousStateID) > 0) {
_animStack.erase(_previousStateID);
}
_previousStateID = "none";
_prevPoses.clear();
_nextPoses.clear();
}
}
if (!_duringInterp) {
_animStack[_currentState->getID()] = 1.0f;
context.setDebugAlpha(_currentState->getID(), parentDebugAlpha, _children[_currentState->getChildIndex()]->getType());
_poses = currentStateNode->evaluate(animVars, context, dt, triggersOut);
}
processOutputJoints(triggersOut);
context.addStateMachineInfo(_id, _currentState->getID(), _previousState->getID(), _duringInterp, _alpha);
return _poses;
}
void AnimStateMachine::setCurrentState(State::Pointer state) {
_previousState = _currentState ? _currentState : state;
_currentState = state;
}
@ -152,7 +138,7 @@ void AnimStateMachine::switchState(const AnimVariantMap& animVars, const AnimCon
qCDebug(animation) << "AnimStateMachine::switchState:" << _currentState->getID() << "->" << desiredState->getID() << "duration =" << duration << "targetFrame =" << desiredState->_interpTarget << "interpType = " << (int)_interpType;
#endif
_currentState = desiredState;
setCurrentState(desiredState);
}
AnimStateMachine::State::Pointer AnimStateMachine::evaluateTransitions(const AnimVariantMap& animVars) const {

View file

@ -138,9 +138,9 @@ protected:
float _alpha = 0.0f;
AnimPoseVec _prevPoses;
AnimPoseVec _nextPoses;
QString _previousStateID { "none" };
State::Pointer _currentState;
State::Pointer _previousState;
std::vector<State::Pointer> _states;
QString _currentStateVar;

View file

@ -67,6 +67,7 @@ QScriptValue AnimVariantMap::animVariantMapToScriptValue(QScriptEngine* engine,
}
return target;
}
void AnimVariantMap::copyVariantsFrom(const AnimVariantMap& other) {
for (auto& pair : other._map) {
_map[pair.first] = pair.second;
@ -124,3 +125,43 @@ void AnimVariantMap::animVariantMapFromScriptValue(const QScriptValue& source) {
}
}
}
std::map<QString, QString> AnimVariantMap::toDebugMap() const {
std::map<QString, QString> result;
for (auto& pair : _map) {
switch (pair.second.getType()) {
case AnimVariant::Type::Bool:
result[pair.first] = QString("%1").arg(pair.second.getBool());
break;
case AnimVariant::Type::Int:
result[pair.first] = QString("%1").arg(pair.second.getInt());
break;
case AnimVariant::Type::Float:
result[pair.first] = QString::number(pair.second.getFloat(), 'f', 3);
break;
case AnimVariant::Type::Vec3: {
glm::vec3 value = pair.second.getVec3();
result[pair.first] = QString("(%1, %2, %3)").
arg(QString::number(value.x, 'f', 3)).
arg(QString::number(value.y, 'f', 3)).
arg(QString::number(value.z, 'f', 3));
break;
}
case AnimVariant::Type::Quat: {
glm::quat value = pair.second.getQuat();
result[pair.first] = QString("(%1, %2, %3, %4)").
arg(QString::number(value.x, 'f', 3)).
arg(QString::number(value.y, 'f', 3)).
arg(QString::number(value.z, 'f', 3)).
arg(QString::number(value.w, 'f', 3));
break;
}
case AnimVariant::Type::String:
result[pair.first] = pair.second.getString();
break;
default:
assert(("invalid AnimVariant::Type", false));
}
}
return result;
}

View file

@ -235,6 +235,9 @@ public:
void animVariantMapFromScriptValue(const QScriptValue& object);
void copyVariantsFrom(const AnimVariantMap& other);
// For stat debugging.
std::map<QString, QString> toDebugMap() const;
#ifdef NDEBUG
void dump() const {
qCDebug(animation) << "AnimVariantMap =";

View file

@ -1061,8 +1061,10 @@ void Rig::updateAnimations(float deltaTime, const glm::mat4& rootTransform, cons
// animations haven't fully loaded yet.
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
}
_lastAnimVars = _animVars;
_animVars.clearTriggers();
_animVars = triggersOut;
_lastContext = context;
}
applyOverridePoses();
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);

View file

@ -222,7 +222,10 @@ public:
// input assumed to be in rig space
void computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const;
const std::map<QString, float> getAnimStack() { return _animNode->getAnimStack(); }
// used to debug animation playback
const AnimContext::DebugAlphaMap& getDebugAlphaMap() const { return _lastContext.getDebugAlphaMap(); }
const AnimVariantMap& getAnimVars() const { return _lastAnimVars; }
const AnimContext::DebugStateMachineMap& getStateMachineMap() const { return _lastContext.getStateMachineMap(); }
void toggleSmoothPoleVectors() { _smoothPoleVectors = !_smoothPoleVectors; };
signals:
@ -388,6 +391,9 @@ protected:
int _rigId;
bool _headEnabled { false };
AnimContext _lastContext;
AnimVariantMap _lastAnimVars;
};
#endif /* defined(__hifi__Rig__) */

View file

@ -0,0 +1,13 @@
//
// Created by Sabrina Shanman 9/5/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AvatarTransformNode.h"
template<>
glm::vec3 BaseNestableTransformNode<Avatar>::getActualScale(const std::shared_ptr<Avatar>& nestablePointer) const {
return nestablePointer->scaleForChildren();
}

View file

@ -0,0 +1,20 @@
//
// Created by Sabrina Shanman 9/5/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AvatarTransformNode_h
#define hifi_AvatarTransformNode_h
#include "NestableTransformNode.h"
#include "Avatar.h"
class AvatarTransformNode : public BaseNestableTransformNode<Avatar> {
public:
AvatarTransformNode(std::weak_ptr<Avatar> spatiallyNestable, int jointIndex) : BaseNestableTransformNode(spatiallyNestable, jointIndex) {};
};
#endif // hifi_AvatarTransformNode_h

View file

@ -14,7 +14,6 @@
#include <gpu/Batch.h>
#include <NodeList.h>
#include <recording/Deck.h>
#include <DependencyManager.h>
#include <GeometryUtil.h>
#include <trackers/FaceTracker.h>

View file

@ -21,6 +21,7 @@
#include "AvatarLogging.h"
#include "AvatarTraits.h"
#include "Profile.h"
void AvatarReplicas::addReplica(const QUuid& parentID, AvatarSharedPointer replica) {
if (parentID == QUuid()) {
@ -214,6 +215,7 @@ AvatarSharedPointer AvatarHashMap::findAvatar(const QUuid& sessionUUID) const {
}
void AvatarHashMap::processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
DETAILED_PROFILE_RANGE(network, __FUNCTION__);
PerformanceTimer perfTimer("receiveAvatar");
// enumerate over all of the avatars in this packet
// only add them if mixerWeakPointer points to something (meaning that mixer is still around)

View file

@ -11,7 +11,7 @@ layout(location=0) in vec2 varTexCoord0;
layout(location=0) out vec4 outFragColor;
void main(void) {
ivec2 texCoord = ivec2(floor(varTexCoord0 * textureData.textureSize));
ivec2 texCoord = ivec2(floor(varTexCoord0 * vec2(textureData.textureSize)));
texCoord.x /= 2;
int row = int(floor(gl_FragCoord.y));
if (row % 2 > 0) {

View file

@ -1609,6 +1609,7 @@ PolyVoxEntityRenderer::PolyVoxEntityRenderer(const EntityItemPointer& entity) :
_vertexFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
_vertexFormat->setAttribute(gpu::Stream::NORMAL, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 12);
});
_params = std::make_shared<gpu::Buffer>(sizeof(glm::vec4), nullptr);
}
ShapeKey PolyVoxEntityRenderer::getShapeKey() {
@ -1671,9 +1672,12 @@ void PolyVoxEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& s
void PolyVoxEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) {
_lastVoxelToWorldMatrix = entity->voxelToWorldMatrix();
_lastVoxelVolumeSize = entity->getVoxelVolumeSize();
_params->setSubData(0, vec4(_lastVoxelVolumeSize, 0.0));
graphics::MeshPointer newMesh;
entity->withReadLock([&] {
newMesh = entity->_mesh;
});
if (newMesh && newMesh->getIndexBuffer()._buffer) {
@ -1686,6 +1690,7 @@ void PolyVoxEntityRenderer::doRender(RenderArgs* args) {
return;
}
PerformanceTimer perfTimer("RenderablePolyVoxEntityItem::render");
gpu::Batch& batch = *args->_batch;
@ -1695,6 +1700,7 @@ void PolyVoxEntityRenderer::doRender(RenderArgs* args) {
batch.setInputBuffer(gpu::Stream::POSITION, _mesh->getVertexBuffer()._buffer, 0,
sizeof(PolyVox::PositionMaterialNormal));
// TODO -- should we be setting this?
// batch.setInputBuffer(gpu::Stream::NORMAL, mesh->getVertexBuffer()._buffer,
// 12,
@ -1710,7 +1716,7 @@ void PolyVoxEntityRenderer::doRender(RenderArgs* args) {
}
}
batch._glUniform3f(entities_renderer::slot::uniform::PolyvoxVoxelSize, _lastVoxelVolumeSize.x, _lastVoxelVolumeSize.y, _lastVoxelVolumeSize.z);
batch.setUniformBuffer(0, _params);
batch.drawIndexed(gpu::TRIANGLES, (gpu::uint32)_mesh->getNumIndices(), 0);
}

View file

@ -187,6 +187,7 @@ private:
#endif
graphics::MeshPointer _mesh;
gpu::BufferPointer _params;
std::array<NetworkTexturePointer, 3> _xyzTextures;
glm::vec3 _lastVoxelVolumeSize;
glm::mat4 _lastVoxelToWorldMatrix;

View file

@ -15,7 +15,6 @@
#define ENTITIES_SHADER_CONSTANTS_H
// Polyvox
#define ENTITIES_UNIFORM_POLYVOX_VOXEL_SIZE 0
#define ENTITIES_TEXTURE_POLYVOX_XMAP 0
#define ENTITIES_TEXTURE_POLYVOX_YMAP 1
#define ENTITIES_TEXTURE_POLYVOX_ZMAP 2
@ -26,17 +25,6 @@
namespace entities_renderer { namespace slot {
namespace uniform {
enum Uniform {
PolyvoxVoxelSize = ENTITIES_UNIFORM_POLYVOX_VOXEL_SIZE,
};
}
namespace buffer {
enum Buffer {
};
} // namespace buffer
namespace texture {
enum Texture {
PolyvoxXMap = ENTITIES_TEXTURE_POLYVOX_XMAP,

View file

@ -45,7 +45,7 @@ void main(void) {
int frontCondition = 1 -int(gl_FrontFacing) * 2;
vec3 color = varColor.rgb;
packDeferredFragmentTranslucent(
interpolatedNormal * frontCondition,
interpolatedNormal * float(frontCondition),
texel.a * varColor.a,
polyline.color * texel.rgb + fadeEmissive,
vec3(0.01, 0.01, 0.01),

View file

@ -23,15 +23,22 @@ layout(location=RENDER_UTILS_ATTR_POSITION_WS) in vec4 _worldPosition;
layout(binding=ENTITIES_TEXTURE_POLYVOX_XMAP) uniform sampler2D xMap;
layout(binding=ENTITIES_TEXTURE_POLYVOX_YMAP) uniform sampler2D yMap;
layout(binding=ENTITIES_TEXTURE_POLYVOX_ZMAP) uniform sampler2D zMap;
layout(location=ENTITIES_UNIFORM_POLYVOX_VOXEL_SIZE) uniform vec3 voxelVolumeSize;
struct PolyvoxParams {
vec4 voxelVolumeSize;
};
layout(binding=0) uniform polyvoxParamsBuffer {
PolyvoxParams params;
};
void main(void) {
vec3 worldNormal = cross(dFdy(_worldPosition.xyz), dFdx(_worldPosition.xyz));
worldNormal = normalize(worldNormal);
float inPositionX = (_worldPosition.x - 0.5) / voxelVolumeSize.x;
float inPositionY = (_worldPosition.y - 0.5) / voxelVolumeSize.y;
float inPositionZ = (_worldPosition.z - 0.5) / voxelVolumeSize.z;
float inPositionX = (_worldPosition.x - 0.5) / params.voxelVolumeSize.x;
float inPositionY = (_worldPosition.y - 0.5) / params.voxelVolumeSize.y;
float inPositionZ = (_worldPosition.z - 0.5) / params.voxelVolumeSize.z;
vec4 xyDiffuse = texture(xMap, vec2(-inPositionX, -inPositionY));
vec4 xzDiffuse = texture(yMap, vec2(-inPositionX, inPositionZ));

View file

@ -27,7 +27,13 @@ layout(binding=ENTITIES_TEXTURE_POLYVOX_XMAP) uniform sampler2D xMap;
layout(binding=ENTITIES_TEXTURE_POLYVOX_YMAP) uniform sampler2D yMap;
layout(binding=ENTITIES_TEXTURE_POLYVOX_ZMAP) uniform sampler2D zMap;
layout(location=ENTITIES_UNIFORM_POLYVOX_VOXEL_SIZE) uniform vec3 voxelVolumeSize;
struct PolyvoxParams {
vec4 voxelVolumeSize;
};
layout(binding=0) uniform polyvoxParamsBuffer {
PolyvoxParams params;
};
// Declare after all samplers to prevent sampler location mix up with voxel shading (sampler locations are hardcoded in RenderablePolyVoxEntityItem)
<$declareFadeFragment()$>
@ -42,9 +48,9 @@ void main(void) {
vec3 worldNormal = cross(dFdy(_worldPosition.xyz), dFdx(_worldPosition.xyz));
worldNormal = normalize(worldNormal);
float inPositionX = (_worldPosition.x - 0.5) / voxelVolumeSize.x;
float inPositionY = (_worldPosition.y - 0.5) / voxelVolumeSize.y;
float inPositionZ = (_worldPosition.z - 0.5) / voxelVolumeSize.z;
float inPositionX = (_worldPosition.x - 0.5) / params.voxelVolumeSize.x;
float inPositionY = (_worldPosition.y - 0.5) / params.voxelVolumeSize.y;
float inPositionZ = (_worldPosition.z - 0.5) / params.voxelVolumeSize.z;
vec4 xyDiffuse = texture(xMap, vec2(-inPositionX, -inPositionY));
vec4 xzDiffuse = texture(yMap, vec2(-inPositionX, inPositionZ));

View file

@ -0,0 +1,13 @@
//
// Created by Sabrina Shanman 9/5/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "EntityTransformNode.h"
template<>
glm::vec3 BaseNestableTransformNode<EntityItem>::getActualScale(const std::shared_ptr<EntityItem>& nestablePointer) const {
return nestablePointer->getScaledDimensions();
}

View file

@ -0,0 +1,20 @@
//
// Created by Sabrina Shanman 9/5/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_EntityTransformNode_h
#define hifi_EntityTransformNode_h
#include "NestableTransformNode.h"
#include "EntityItem.h"
class EntityTransformNode : public BaseNestableTransformNode<EntityItem> {
public:
EntityTransformNode(std::weak_ptr<EntityItem> spatiallyNestable, int jointIndex) : BaseNestableTransformNode(spatiallyNestable, jointIndex) {};
};
#endif // hifi_EntityTransformNode_h

View file

@ -585,13 +585,8 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
FBXMesh& fbxMesh = extractedMesh;
graphics::MeshPointer mesh(new graphics::Mesh());
// Grab the vertices in a buffer
auto vb = std::make_shared<gpu::Buffer>();
vb->setData(extractedMesh.vertices.size() * sizeof(glm::vec3),
(const gpu::Byte*) extractedMesh.vertices.data());
gpu::BufferView vbv(vb, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ));
mesh->setVertexBuffer(vbv);
bool hasBlendShapes = !fbxMesh.blendshapes.empty();
int numVerts = extractedMesh.vertices.size();
if (!fbxMesh.normals.empty() && fbxMesh.tangents.empty()) {
// Fill with a dummy value to force tangents to be present if there are normals
@ -607,43 +602,61 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
}
}
// evaluate all attribute channels sizes
const int normalsSize = fbxMesh.normals.size() * sizeof(NormalType);
const int tangentsSize = fbxMesh.tangents.size() * sizeof(NormalType);
// evaluate all attribute elements and data sizes
// Position is a vec3
const auto positionElement = gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ);
const int positionsSize = numVerts * positionElement.getSize();
// Normal and tangent are always there together packed in normalized xyz32bits word (times 2)
const auto normalElement = FBX_NORMAL_ELEMENT;
const int normalsSize = fbxMesh.normals.size() * normalElement.getSize();
const int tangentsSize = fbxMesh.tangents.size() * normalElement.getSize();
// If there are normals then there should be tangents
assert(normalsSize <= tangentsSize);
if (tangentsSize > normalsSize) {
qWarning() << "Unexpected tangents in " << url;
}
const auto normalsAndTangentsSize = normalsSize + tangentsSize;
const int normalsAndTangentsStride = 2 * sizeof(NormalType);
const int colorsSize = fbxMesh.colors.size() * sizeof(ColorType);
const int normalsAndTangentsStride = 2 * normalElement.getSize();
// Color attrib
const auto colorElement = FBX_COLOR_ELEMENT;
const int colorsSize = fbxMesh.colors.size() * colorElement.getSize();
// Texture coordinates are stored in 2 half floats
const int texCoordsSize = fbxMesh.texCoords.size() * sizeof(vec2h);
const int texCoords1Size = fbxMesh.texCoords1.size() * sizeof(vec2h);
const auto texCoordsElement = gpu::Element(gpu::VEC2, gpu::HALF, gpu::UV);
const int texCoordsSize = fbxMesh.texCoords.size() * texCoordsElement.getSize();
const int texCoords1Size = fbxMesh.texCoords1.size() * texCoordsElement.getSize();
int clusterIndicesSize = fbxMesh.clusterIndices.size() * sizeof(uint8_t);
if (fbxMesh.clusters.size() > UINT8_MAX) {
// we need 16 bits instead of just 8 for clusterIndices
clusterIndicesSize *= 2;
}
// Support for 4 skinning clusters:
// 4 Indices are uint8 ideally, uint16 if more than 256.
const auto clusterIndiceElement = (fbxMesh.clusters.size() < UINT8_MAX ? gpu::Element(gpu::VEC4, gpu::UINT8, gpu::XYZW) : gpu::Element(gpu::VEC4, gpu::UINT16, gpu::XYZW));
// 4 Weights are normalized 16bits
const auto clusterWeightElement = gpu::Element(gpu::VEC4, gpu::NUINT16, gpu::XYZW);
const int clusterWeightsSize = fbxMesh.clusterWeights.size() * sizeof(uint16_t);
// Cluster indices and weights must be the same sizes
const int NUM_CLUSTERS_PER_VERT = 4;
const int numVertClusters = (fbxMesh.clusterIndices.size() == fbxMesh.clusterWeights.size() ? fbxMesh.clusterIndices.size() / NUM_CLUSTERS_PER_VERT : 0);
const int clusterIndicesSize = numVertClusters * clusterIndiceElement.getSize();
const int clusterWeightsSize = numVertClusters * clusterWeightElement.getSize();
// Normals and tangents are interleaved
const int normalsOffset = 0;
const int tangentsOffset = normalsOffset + sizeof(NormalType);
const int colorsOffset = normalsOffset + normalsSize + tangentsSize;
// Decide on where to put what seequencially in a big buffer:
const int positionsOffset = 0;
const int normalsAndTangentsOffset = positionsOffset + positionsSize;
const int colorsOffset = normalsAndTangentsOffset + normalsAndTangentsSize;
const int texCoordsOffset = colorsOffset + colorsSize;
const int texCoords1Offset = texCoordsOffset + texCoordsSize;
const int clusterIndicesOffset = texCoords1Offset + texCoords1Size;
const int clusterWeightsOffset = clusterIndicesOffset + clusterIndicesSize;
const int totalAttributeSize = clusterWeightsOffset + clusterWeightsSize;
const int totalVertsSize = clusterWeightsOffset + clusterWeightsSize;
// Copy all attribute data in a single attribute buffer
auto attribBuffer = std::make_shared<gpu::Buffer>();
attribBuffer->resize(totalAttributeSize);
// Copy all vertex data in a single buffer
auto vertBuffer = std::make_shared<gpu::Buffer>();
vertBuffer->resize(totalVertsSize);
// First positions
vertBuffer->setSubData(positionsOffset, positionsSize, (const gpu::Byte*) extractedMesh.vertices.data());
// Interleave normals and tangents
if (normalsSize > 0) {
@ -651,8 +664,8 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
normalsAndTangents.reserve(fbxMesh.normals.size() + fbxMesh.tangents.size());
for (auto normalIt = fbxMesh.normals.constBegin(), tangentIt = fbxMesh.tangents.constBegin();
normalIt != fbxMesh.normals.constEnd();
++normalIt, ++tangentIt) {
normalIt != fbxMesh.normals.constEnd();
++normalIt, ++tangentIt) {
#if FBX_PACK_NORMALS
const auto normal = normalizeDirForPacking(*normalIt);
const auto tangent = normalizeDirForPacking(*tangentIt);
@ -665,9 +678,10 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
normalsAndTangents.push_back(packedNormal);
normalsAndTangents.push_back(packedTangent);
}
attribBuffer->setSubData(normalsOffset, normalsAndTangentsSize, (const gpu::Byte*) normalsAndTangents.data());
vertBuffer->setSubData(normalsAndTangentsOffset, normalsAndTangentsSize, (const gpu::Byte*) normalsAndTangents.data());
}
// Pack colors
if (colorsSize > 0) {
#if FBX_PACK_COLORS
std::vector<ColorType> colors;
@ -676,12 +690,13 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
for (const auto& color : fbxMesh.colors) {
colors.push_back(glm::packUnorm4x8(glm::vec4(color, 1.0f)));
}
attribBuffer->setSubData(colorsOffset, colorsSize, (const gpu::Byte*) colors.data());
vertBuffer->setSubData(colorsOffset, colorsSize, (const gpu::Byte*) colors.data());
#else
attribBuffer->setSubData(colorsOffset, colorsSize, (const gpu::Byte*) fbxMesh.colors.constData());
vertBuffer->setSubData(colorsOffset, colorsSize, (const gpu::Byte*) fbxMesh.colors.constData());
#endif
}
// Pack Texcoords 0 and 1 (if exists)
if (texCoordsSize > 0) {
QVector<vec2h> texCoordData;
texCoordData.reserve(fbxMesh.texCoords.size());
@ -692,9 +707,8 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
texCoordVec2h.y = glm::detail::toFloat16(texCoordVec2f.y);
texCoordData.push_back(texCoordVec2h);
}
attribBuffer->setSubData(texCoordsOffset, texCoordsSize, (const gpu::Byte*) texCoordData.constData());
vertBuffer->setSubData(texCoordsOffset, texCoordsSize, (const gpu::Byte*) texCoordData.constData());
}
if (texCoords1Size > 0) {
QVector<vec2h> texCoordData;
texCoordData.reserve(fbxMesh.texCoords1.size());
@ -705,69 +719,170 @@ void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
texCoordVec2h.y = glm::detail::toFloat16(texCoordVec2f.y);
texCoordData.push_back(texCoordVec2h);
}
attribBuffer->setSubData(texCoords1Offset, texCoords1Size, (const gpu::Byte*) texCoordData.constData());
vertBuffer->setSubData(texCoords1Offset, texCoords1Size, (const gpu::Byte*) texCoordData.constData());
}
if (fbxMesh.clusters.size() < UINT8_MAX) {
// yay! we can fit the clusterIndices within 8-bits
int32_t numIndices = fbxMesh.clusterIndices.size();
QVector<uint8_t> clusterIndices;
clusterIndices.resize(numIndices);
for (int32_t i = 0; i < numIndices; ++i) {
assert(fbxMesh.clusterIndices[i] <= UINT8_MAX);
clusterIndices[i] = (uint8_t)(fbxMesh.clusterIndices[i]);
// Clusters data
if (clusterIndicesSize > 0) {
if (fbxMesh.clusters.size() < UINT8_MAX) {
// yay! we can fit the clusterIndices within 8-bits
int32_t numIndices = fbxMesh.clusterIndices.size();
QVector<uint8_t> clusterIndices;
clusterIndices.resize(numIndices);
for (int32_t i = 0; i < numIndices; ++i) {
assert(fbxMesh.clusterIndices[i] <= UINT8_MAX);
clusterIndices[i] = (uint8_t)(fbxMesh.clusterIndices[i]);
}
vertBuffer->setSubData(clusterIndicesOffset, clusterIndicesSize, (const gpu::Byte*) clusterIndices.constData());
} else {
vertBuffer->setSubData(clusterIndicesOffset, clusterIndicesSize, (const gpu::Byte*) fbxMesh.clusterIndices.constData());
}
attribBuffer->setSubData(clusterIndicesOffset, clusterIndicesSize, (const gpu::Byte*) clusterIndices.constData());
} else {
attribBuffer->setSubData(clusterIndicesOffset, clusterIndicesSize, (const gpu::Byte*) fbxMesh.clusterIndices.constData());
}
attribBuffer->setSubData(clusterWeightsOffset, clusterWeightsSize, (const gpu::Byte*) fbxMesh.clusterWeights.constData());
if (clusterWeightsSize > 0) {
vertBuffer->setSubData(clusterWeightsOffset, clusterWeightsSize, (const gpu::Byte*) fbxMesh.clusterWeights.constData());
}
if (normalsSize) {
mesh->addAttribute(gpu::Stream::NORMAL,
graphics::BufferView(attribBuffer, normalsOffset, normalsAndTangentsSize,
normalsAndTangentsStride, FBX_NORMAL_ELEMENT));
mesh->addAttribute(gpu::Stream::TANGENT,
graphics::BufferView(attribBuffer, tangentsOffset, normalsAndTangentsSize,
normalsAndTangentsStride, FBX_NORMAL_ELEMENT));
// Now we decide on how to interleave the attributes and provide the vertices among bufers:
// Aka the Vertex format and the vertexBufferStream
auto vertexFormat = std::make_shared<gpu::Stream::Format>();
auto vertexBufferStream = std::make_shared<gpu::BufferStream>();
// Decision time:
// if blendshapes then keep position and normals/tangents as separated channel buffers from interleaved attributes
// else everything is interleaved in one buffer
// Default case is no blend shapes
gpu::BufferPointer attribBuffer;
int totalAttribBufferSize = totalVertsSize;
gpu::uint8 posChannel = 0;
gpu::uint8 tangentChannel = posChannel;
gpu::uint8 attribChannel = posChannel;
bool interleavePositions = true;
bool interleaveNormalsTangents = true;
// TODO: We are using the same vertex format layout for all meshes because this is more efficient
// This work is going into rc73 release which is meant to be used for the SPot500 event and we are picking the format
// that works best for blendshaped and skinned meshes aka the avatars.
// We will improve this technique in a hot fix to 73.
hasBlendShapes = true;
// If has blend shapes allocate and assign buffers for pos and tangents now
if (hasBlendShapes) {
auto posBuffer = std::make_shared<gpu::Buffer>();
posBuffer->setData(positionsSize, (const gpu::Byte*) vertBuffer->getData() + positionsOffset);
vertexBufferStream->addBuffer(posBuffer, 0, positionElement.getSize());
auto normalsAndTangentsBuffer = std::make_shared<gpu::Buffer>();
normalsAndTangentsBuffer->setData(normalsAndTangentsSize, (const gpu::Byte*) vertBuffer->getData() + normalsAndTangentsOffset);
vertexBufferStream->addBuffer(normalsAndTangentsBuffer, 0, normalsAndTangentsStride);
// update channels and attribBuffer size accordingly
interleavePositions = false;
interleaveNormalsTangents = false;
tangentChannel = 1;
attribChannel = 2;
totalAttribBufferSize = totalVertsSize - positionsSize - normalsAndTangentsSize;
}
// Define the vertex format, compute the offset for each attributes as we append them to the vertex format
gpu::Offset bufOffset = 0;
if (positionsSize) {
vertexFormat->setAttribute(gpu::Stream::POSITION, posChannel, positionElement, bufOffset);
bufOffset += positionElement.getSize();
if (!interleavePositions) {
bufOffset = 0;
}
}
if (normalsSize) {
vertexFormat->setAttribute(gpu::Stream::NORMAL, tangentChannel, normalElement, bufOffset);
bufOffset += normalElement.getSize();
vertexFormat->setAttribute(gpu::Stream::TANGENT, tangentChannel, normalElement, bufOffset);
bufOffset += normalElement.getSize();
if (!interleaveNormalsTangents) {
bufOffset = 0;
}
}
// Pack normal and Tangent with the rest of atributes if no blend shapes
if (colorsSize) {
mesh->addAttribute(gpu::Stream::COLOR,
graphics::BufferView(attribBuffer, colorsOffset, colorsSize, FBX_COLOR_ELEMENT));
vertexFormat->setAttribute(gpu::Stream::COLOR, attribChannel, colorElement, bufOffset);
bufOffset += colorElement.getSize();
}
if (texCoordsSize) {
mesh->addAttribute(gpu::Stream::TEXCOORD,
graphics::BufferView( attribBuffer, texCoordsOffset, texCoordsSize,
gpu::Element(gpu::VEC2, gpu::HALF, gpu::UV)));
vertexFormat->setAttribute(gpu::Stream::TEXCOORD, attribChannel, texCoordsElement, bufOffset);
bufOffset += texCoordsElement.getSize();
}
if (texCoords1Size) {
mesh->addAttribute( gpu::Stream::TEXCOORD1,
graphics::BufferView(attribBuffer, texCoords1Offset, texCoords1Size,
gpu::Element(gpu::VEC2, gpu::HALF, gpu::UV)));
vertexFormat->setAttribute(gpu::Stream::TEXCOORD1, attribChannel, texCoordsElement, bufOffset);
bufOffset += texCoordsElement.getSize();
} else if (texCoordsSize) {
mesh->addAttribute(gpu::Stream::TEXCOORD1,
graphics::BufferView(attribBuffer, texCoordsOffset, texCoordsSize,
gpu::Element(gpu::VEC2, gpu::HALF, gpu::UV)));
vertexFormat->setAttribute(gpu::Stream::TEXCOORD1, attribChannel, texCoordsElement, bufOffset - texCoordsElement.getSize());
}
if (clusterIndicesSize) {
if (fbxMesh.clusters.size() < UINT8_MAX) {
mesh->addAttribute(gpu::Stream::SKIN_CLUSTER_INDEX,
graphics::BufferView(attribBuffer, clusterIndicesOffset, clusterIndicesSize,
gpu::Element(gpu::VEC4, gpu::UINT8, gpu::XYZW)));
} else {
mesh->addAttribute(gpu::Stream::SKIN_CLUSTER_INDEX,
graphics::BufferView(attribBuffer, clusterIndicesOffset, clusterIndicesSize,
gpu::Element(gpu::VEC4, gpu::UINT16, gpu::XYZW)));
}
vertexFormat->setAttribute(gpu::Stream::SKIN_CLUSTER_INDEX, attribChannel, clusterIndiceElement, bufOffset);
bufOffset += clusterIndiceElement.getSize();
}
if (clusterWeightsSize) {
mesh->addAttribute(gpu::Stream::SKIN_CLUSTER_WEIGHT,
graphics::BufferView(attribBuffer, clusterWeightsOffset, clusterWeightsSize,
gpu::Element(gpu::VEC4, gpu::NUINT16, gpu::XYZW)));
vertexFormat->setAttribute(gpu::Stream::SKIN_CLUSTER_WEIGHT, attribChannel, clusterWeightElement, bufOffset);
bufOffset += clusterWeightElement.getSize();
}
// Finally, allocate and fill the attribBuffer interleaving the attributes as needed:
{
auto vPositionOffset = 0;
auto vPositionSize = (interleavePositions ? positionsSize / numVerts : 0);
auto vNormalsAndTangentsOffset = vPositionOffset + vPositionSize;
auto vNormalsAndTangentsSize = (interleaveNormalsTangents ? normalsAndTangentsSize / numVerts : 0);
auto vColorOffset = vNormalsAndTangentsOffset + vNormalsAndTangentsSize;
auto vColorSize = colorsSize / numVerts;
auto vTexcoord0Offset = vColorOffset + vColorSize;
auto vTexcoord0Size = texCoordsSize / numVerts;
auto vTexcoord1Offset = vTexcoord0Offset + vTexcoord0Size;
auto vTexcoord1Size = texCoords1Size / numVerts;
auto vClusterIndiceOffset = vTexcoord1Offset + vTexcoord1Size;
auto vClusterIndiceSize = clusterIndicesSize / numVerts;
auto vClusterWeightOffset = vClusterIndiceOffset + vClusterIndiceSize;
auto vClusterWeightSize = clusterWeightsSize / numVerts;
auto vStride = vClusterWeightOffset + vClusterWeightSize;
std::vector<gpu::Byte> dest;
dest.resize(totalAttribBufferSize);
auto vDest = dest.data();
auto source = vertBuffer->getData();
for (int i = 0; i < numVerts; i++) {
if (vPositionSize) memcpy(vDest + vPositionOffset, source + positionsOffset + i * vPositionSize, vPositionSize);
if (vNormalsAndTangentsSize) memcpy(vDest + vNormalsAndTangentsOffset, source + normalsAndTangentsOffset + i * vNormalsAndTangentsSize, vNormalsAndTangentsSize);
if (vColorSize) memcpy(vDest + vColorOffset, source + colorsOffset + i * vColorSize, vColorSize);
if (vTexcoord0Size) memcpy(vDest + vTexcoord0Offset, source + texCoordsOffset + i * vTexcoord0Size, vTexcoord0Size);
if (vTexcoord1Size) memcpy(vDest + vTexcoord1Offset, source + texCoords1Offset + i * vTexcoord1Size, vTexcoord1Size);
if (vClusterIndiceSize) memcpy(vDest + vClusterIndiceOffset, source + clusterIndicesOffset + i * vClusterIndiceSize, vClusterIndiceSize);
if (vClusterWeightSize) memcpy(vDest + vClusterWeightOffset, source + clusterWeightsOffset + i * vClusterWeightSize, vClusterWeightSize);
vDest += vStride;
}
auto attribBuffer = std::make_shared<gpu::Buffer>();
attribBuffer->setData(totalAttribBufferSize, dest.data());
vertexBufferStream->addBuffer(attribBuffer, 0, vStride);
}
// Mesh vertex format and vertex stream is ready
mesh->setVertexFormatAndStream(vertexFormat, vertexBufferStream);
// Index and Part Buffers
unsigned int totalIndices = 0;
foreach(const FBXMeshPart& part, extractedMesh.parts) {
totalIndices += (part.quadTrianglesIndices.size() + part.triangleIndices.size());

View file

@ -240,6 +240,7 @@ public:
virtual GLuint getFramebufferID(const FramebufferPointer& framebuffer) = 0;
virtual GLuint getTextureID(const TexturePointer& texture) final;
virtual GLuint getBufferID(const Buffer& buffer) = 0;
virtual GLuint getBufferIDUnsynced(const Buffer& buffer) = 0;
virtual GLuint getQueryID(const QueryPointer& query) = 0;
virtual GLFramebuffer* syncGPUObject(const Framebuffer& framebuffer) = 0;

View file

@ -11,6 +11,7 @@
#include "GLBackend.h"
#include "GLShared.h"
#include "GLInputFormat.h"
#include "GLBuffer.h"
using namespace gpu;
using namespace gpu::gl;
@ -43,13 +44,7 @@ void GLBackend::do_setInputBuffer(const Batch& batch, size_t paramOffset) {
bool isModified = false;
if (_input._buffers[channel] != buffer) {
_input._buffers[channel] = buffer;
GLuint vbo = 0;
if (buffer) {
vbo = getBufferID((*buffer));
}
_input._bufferVBOs[channel] = vbo;
_input._bufferVBOs[channel] = getBufferIDUnsynced((*buffer));
isModified = true;
}
@ -128,7 +123,7 @@ void GLBackend::do_setIndexBuffer(const Batch& batch, size_t paramOffset) {
if (indexBuffer != _input._indexBuffer) {
_input._indexBuffer = indexBuffer;
if (indexBuffer) {
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, getBufferID(*indexBuffer));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, getBufferIDUnsynced(*indexBuffer));
} else {
// FIXME do we really need this? Is there ever a draw call where we care that the element buffer is null?
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
@ -145,7 +140,7 @@ void GLBackend::do_setIndirectBuffer(const Batch& batch, size_t paramOffset) {
if (buffer != _input._indirectBuffer) {
_input._indirectBuffer = buffer;
if (buffer) {
glBindBuffer(GL_DRAW_INDIRECT_BUFFER, getBufferID(*buffer));
glBindBuffer(GL_DRAW_INDIRECT_BUFFER, getBufferIDUnsynced(*buffer));
} else {
// FIXME do we really need this? Is there ever a draw call where we care that the element buffer is null?
glBindBuffer(GL_DRAW_INDIRECT_BUFFER, 0);
@ -261,9 +256,17 @@ void GLBackend::updateInput() {
auto offset = _input._bufferOffsets.data();
auto stride = _input._bufferStrides.data();
// Profile the count of buffers to update and use it to short cut the for loop
int numInvalids = (int) _input._invalidBuffers.count();
_stats._ISNumInputBufferChanges += numInvalids;
for (GLuint buffer = 0; buffer < _input._buffers.size(); buffer++, vbo++, offset++, stride++) {
if (_input._invalidBuffers.test(buffer)) {
glBindVertexBuffer(buffer, (*vbo), (*offset), (GLsizei)(*stride));
numInvalids--;
if (numInvalids <= 0) {
break;
}
}
}

View file

@ -85,6 +85,8 @@ void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
auto& cameraCorrectionBuffer = _transform._viewCorrectionEnabled ?
_pipeline._cameraCorrectionBuffer._buffer :
_pipeline._cameraCorrectionBufferIdentity._buffer;
// Because we don't sync Buffers in the bindUniformBuffer, let s force this buffer synced
getBufferID(*cameraCorrectionBuffer);
bindUniformBuffer(gpu::slot::buffer::CameraCorrection, cameraCorrectionBuffer, 0, sizeof(CameraCorrection));
}
(void)CHECK_GL_ERROR();
@ -170,11 +172,10 @@ void GLBackend::bindUniformBuffer(uint32_t slot, const BufferPointer& buffer, GL
return;
}
// Sync BufferObject
auto* object = syncGPUObject(*bufferState.buffer);
if (object) {
glBindBufferRange(GL_UNIFORM_BUFFER, slot, object->_buffer, bufferState.offset, bufferState.size);
// Grab the true gl Buffer object
auto glBO = getBufferIDUnsynced(*buffer);
if (glBO) {
glBindBufferRange(GL_UNIFORM_BUFFER, slot, glBO, bufferState.offset, bufferState.size);
_uniform._buffers[slot] = bufferState;
(void)CHECK_GL_ERROR();
} else {

View file

@ -49,6 +49,16 @@ public:
}
}
template <typename GLBufferType>
static GLuint getIdUnsynced(GLBackend& backend, const Buffer& buffer) {
GLBufferType* object = Backend::getGPUObject<GLBufferType>(buffer);
if (object) {
return object->_buffer;
} else {
return 0;
}
}
const GLuint& _buffer { _id };
const GLuint _size;
const Stamp _stamp;

View file

@ -137,6 +137,7 @@ protected:
GLFramebuffer* syncGPUObject(const Framebuffer& framebuffer) override;
GLuint getBufferID(const Buffer& buffer) override;
GLuint getBufferIDUnsynced(const Buffer& buffer) override;
GLuint getResourceBufferID(const Buffer& buffer);
GLBuffer* syncGPUObject(const Buffer& buffer) override;

View file

@ -83,6 +83,10 @@ GLuint GL41Backend::getBufferID(const Buffer& buffer) {
return GL41Buffer::getId<GL41Buffer>(*this, buffer);
}
GLuint GL41Backend::getBufferIDUnsynced(const Buffer& buffer) {
return GL41Buffer::getIdUnsynced<GL41Buffer>(*this, buffer);
}
GLuint GL41Backend::getResourceBufferID(const Buffer& buffer) {
auto* object = GL41Buffer::sync<GL41Buffer>(*this, buffer);
if (object) {

View file

@ -78,8 +78,9 @@ void GL41Backend::updateInput() {
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
auto& inputChannels = _input._format->getChannels();
_stats._ISNumInputBufferChanges++;
int numInvalids = (int)_input._invalidBuffers.count();
_stats._ISNumInputBufferChanges += numInvalids;
GLuint boundVBO = 0;
for (auto& channelIt : inputChannels) {
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;

View file

@ -236,6 +236,7 @@ protected:
GLFramebuffer* syncGPUObject(const Framebuffer& framebuffer) override;
GLuint getBufferID(const Buffer& buffer) override;
GLuint getBufferIDUnsynced(const Buffer& buffer) override;
GLBuffer* syncGPUObject(const Buffer& buffer) override;
GLTexture* syncGPUObject(const TexturePointer& texture) override;

View file

@ -51,6 +51,10 @@ GLuint GL45Backend::getBufferID(const Buffer& buffer) {
return GL45Buffer::getId<GL45Buffer>(*this, buffer);
}
GLuint GL45Backend::getBufferIDUnsynced(const Buffer& buffer) {
return GL45Buffer::getIdUnsynced<GL45Buffer>(*this, buffer);
}
GLBuffer* GL45Backend::syncGPUObject(const Buffer& buffer) {
return GL45Buffer::sync<GL45Buffer>(*this, buffer);
}

View file

@ -132,9 +132,18 @@ void GL45Backend::updateInput() {
auto offset = _input._bufferOffsets.data();
auto stride = _input._bufferStrides.data();
for (GLuint buffer = 0; buffer < _input._buffers.size(); buffer++, vbo++, offset++, stride++) {
// Profile the count of buffers to update and use it to short cut the for loop
int numInvalids = (int) _input._invalidBuffers.count();
_stats._ISNumInputBufferChanges += numInvalids;
auto numBuffers = _input._buffers.size();
for (GLuint buffer = 0; buffer < numBuffers; buffer++, vbo++, offset++, stride++) {
if (_input._invalidBuffers.test(buffer)) {
glBindVertexBuffer(buffer, (*vbo), (*offset), (GLsizei)(*stride));
numInvalids--;
if (numInvalids <= 0) {
break;
}
}
}

View file

@ -130,6 +130,7 @@ protected:
GLFramebuffer* syncGPUObject(const Framebuffer& framebuffer) override;
GLuint getBufferID(const Buffer& buffer) override;
GLuint getBufferIDUnsynced(const Buffer& buffer) override;
GLuint getResourceBufferID(const Buffer& buffer);
GLBuffer* syncGPUObject(const Buffer& buffer) override;

View file

@ -64,6 +64,10 @@ GLuint GLESBackend::getBufferID(const Buffer& buffer) {
return GLESBuffer::getId<GLESBuffer>(*this, buffer);
}
GLuint GLESBackend::getBufferIDUnsynced(const Buffer& buffer) {
return GLESBuffer::getIdUnsynced<GLESBuffer>(*this, buffer);
}
GLBuffer* GLESBackend::syncGPUObject(const Buffer& buffer) {
return GLESBuffer::sync<GLESBuffer>(*this, buffer);
}

View file

@ -420,10 +420,7 @@ public:
}
const Data& get(uint32 offset) const {
if (offset >= _items.size()) {
static const Data EMPTY;
return EMPTY;
}
assert((offset < _items.size()));
return (_items.data() + offset)->_data;
}

View file

@ -31,29 +31,29 @@ vec4 color_sRGBAToLinear(vec4 srgba) {
}
vec3 color_LinearToYCoCg(vec3 rgb) {
// Y = R/4 + G/2 + B/4
// Co = R/2 - B/2
// Cg = -R/4 + G/2 - B/4
return vec3(
rgb.x/4.0 + rgb.y/2.0 + rgb.z/4.0,
rgb.x/2.0 - rgb.z/2.0,
-rgb.x/4.0 + rgb.y/2.0 - rgb.z/4.0
);
// Y = R/4 + G/2 + B/4
// Co = R/2 - B/2
// Cg = -R/4 + G/2 - B/4
return vec3(
rgb.x/4.0 + rgb.y/2.0 + rgb.z/4.0,
rgb.x/2.0 - rgb.z/2.0,
-rgb.x/4.0 + rgb.y/2.0 - rgb.z/4.0
);
}
vec3 color_YCoCgToUnclampedLinear(vec3 ycocg) {
// R = Y + Co - Cg
// G = Y + Cg
// B = Y - Co - Cg
return vec3(
ycocg.x + ycocg.y - ycocg.z,
ycocg.x + ycocg.z,
ycocg.x - ycocg.y - ycocg.z
);
// R = Y + Co - Cg
// G = Y + Cg
// B = Y - Co - Cg
return vec3(
ycocg.x + ycocg.y - ycocg.z,
ycocg.x + ycocg.z,
ycocg.x - ycocg.y - ycocg.z
);
}
vec3 color_YCoCgToLinear(vec3 ycocg) {
return clamp(color_YCoCgToUnclampedLinear(ycocg), vec3(0.0), vec3(1.0));
return clamp(color_YCoCgToUnclampedLinear(ycocg), vec3(0.0), vec3(1.0));
}
<@func declareColorWheel()@>

View file

@ -13,12 +13,16 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/ShaderConstants.h@>
struct DrawColorParams {
vec4 color;
};
layout(location=GPU_UNIFORM_COLOR) uniform vec4 color;
layout(binding=0) uniform drawColorParamsBuffer {
DrawColorParams params;
};
layout(location=0) out vec4 outFragColor;
void main(void) {
outFragColor = color;
outFragColor = params.color;
}

View file

@ -13,14 +13,19 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/ShaderConstants.h@>
layout(binding=0) uniform sampler2D colorMap;
layout(location=GPU_UNIFORM_COLOR) uniform vec4 color;
struct DrawColorParams {
vec4 color;
};
layout(binding=0) uniform drawColorParams {
DrawColorParams params;
};
layout(location=0) in vec2 varTexCoord0;
layout(location=0) out vec4 outFragColor;
void main(void) {
outFragColor = texture(colorMap, varTexCoord0) * color;
outFragColor = texture(colorMap, varTexCoord0) * params.color;
}

View file

@ -21,7 +21,13 @@
<$declareStandardTransform()$>
layout(location=GPU_UNIFORM_TEXCOORD_RECT) uniform vec4 texcoordRect;
struct TexCoordRectParams {
vec4 texcoordRect;
};
layout(binding=0) uniform texcoordRectBuffer {
TexCoordRectParams params;
};
layout(location=0) out vec2 varTexCoord0;
@ -39,5 +45,5 @@ void main(void) {
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, pos, gl_Position)$>
varTexCoord0 = ((pos.xy + 1.0) * 0.5) * texcoordRect.zw + texcoordRect.xy;
varTexCoord0 = ((pos.xy + 1.0) * 0.5) * params.texcoordRect.zw + params.texcoordRect.xy;
}

Some files were not shown because too many files have changed in this diff Show more