mirror of
https://github.com/AleziaKurdis/overte.git
synced 2025-04-08 06:32:35 +02:00
Merge branch 'master' into bug-fix/recorded-blendshapes-2x
This commit is contained in:
commit
1570ae6424
55 changed files with 970 additions and 353 deletions
|
@ -96,7 +96,6 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
DependencyManager::set<recording::Recorder>();
|
||||
DependencyManager::set<recording::ClipCache>();
|
||||
|
||||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
DependencyManager::set<UsersScriptingInterface>();
|
||||
|
||||
|
@ -177,6 +176,8 @@ void Agent::run() {
|
|||
// Create ScriptEngines on threaded-assignment thread then move to main thread.
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::AGENT_SCRIPT)->moveToThread(qApp->thread());
|
||||
|
||||
DependencyManager::set<ScriptCache>();
|
||||
|
||||
// make sure we request our script once the agent connects to the domain
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
@ -360,164 +361,178 @@ void Agent::scriptRequestFinished() {
|
|||
}
|
||||
|
||||
void Agent::executeScript() {
|
||||
_scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
|
||||
// the following block is scoped so that any shared pointers we take here
|
||||
// are cleared before we call setFinished at the end of the function
|
||||
{
|
||||
_scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
|
||||
|
||||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
|
||||
scriptedAvatar->setID(getSessionUUID());
|
||||
scriptedAvatar->setID(getSessionUUID());
|
||||
|
||||
connect(_scriptEngine.data(), SIGNAL(update(float)),
|
||||
scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
connect(_scriptEngine.data(), SIGNAL(update(float)),
|
||||
scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
|
||||
// force lazy initialization of the head data for the scripted avatar
|
||||
// since it is referenced below by computeLoudness and getAudioLoudness
|
||||
scriptedAvatar->getHeadOrientation();
|
||||
// force lazy initialization of the head data for the scripted avatar
|
||||
// since it is referenced below by computeLoudness and getAudioLoudness
|
||||
scriptedAvatar->getHeadOrientation();
|
||||
|
||||
// give this AvatarData object to the script engine
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
// give this AvatarData object to the script engine
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
|
||||
// give scripts access to the Users object
|
||||
_scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
// give scripts access to the Users object
|
||||
_scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
connect(player.data(), &recording::Deck::playbackStateChanged, [&player, &scriptedAvatar] {
|
||||
if (player->isPlaying()) {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
if (recordingInterface->getPlayFromCurrentLocation()) {
|
||||
scriptedAvatar->setRecordingBasis();
|
||||
}
|
||||
|
||||
// these procedural movements are included in the recordings
|
||||
scriptedAvatar->setHasProceduralEyeFaceMovement(false);
|
||||
scriptedAvatar->setHasProceduralBlinkFaceMovement(false);
|
||||
scriptedAvatar->setHasAudioEnabledFaceMovement(false);
|
||||
} else {
|
||||
scriptedAvatar->clearRecordingBasis();
|
||||
|
||||
// restore procedural blendshape movement
|
||||
scriptedAvatar->setHasProceduralEyeFaceMovement(true);
|
||||
scriptedAvatar->setHasProceduralBlinkFaceMovement(true);
|
||||
scriptedAvatar->setHasAudioEnabledFaceMovement(true);
|
||||
}
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
|
||||
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
connect(player.data(), &recording::Deck::playbackStateChanged, [=] {
|
||||
if (player->isPlaying()) {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
if (recordingInterface->getPlayFromCurrentLocation()) {
|
||||
scriptedAvatar->setRecordingBasis();
|
||||
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
|
||||
|
||||
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
|
||||
if (!useFrameSkeleton) {
|
||||
static std::once_flag warning;
|
||||
std::call_once(warning, [] {
|
||||
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
|
||||
});
|
||||
}
|
||||
|
||||
// these procedural movements are included in the recordings
|
||||
scriptedAvatar->setHasProceduralEyeFaceMovement(false);
|
||||
scriptedAvatar->setHasProceduralBlinkFaceMovement(false);
|
||||
scriptedAvatar->setHasAudioEnabledFaceMovement(false);
|
||||
} else {
|
||||
scriptedAvatar->clearRecordingBasis();
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
// restore procedural blendshape movement
|
||||
scriptedAvatar->setHasProceduralEyeFaceMovement(true);
|
||||
scriptedAvatar->setHasProceduralBlinkFaceMovement(true);
|
||||
scriptedAvatar->setHasAudioEnabledFaceMovement(true);
|
||||
}
|
||||
});
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
|
||||
QByteArray audio(frame->data);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audio.data());
|
||||
int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
||||
_audioGate.render(samples, samples, numSamples);
|
||||
}
|
||||
|
||||
computeLoudness(&audio, scriptedAvatar);
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
Q_UNUSED(openedInLastBlock);
|
||||
|
||||
// the codec must be flushed to silence before sending silent packets,
|
||||
// so delay the transition to silent packets by one packet after becoming silent.
|
||||
auto packetType = PacketType::MicrophoneAudioNoEcho;
|
||||
if (!audioGateOpen && !closedInLastBlock) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
auto headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
|
||||
audioTransform.setRotation(headOrientation);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audio;
|
||||
}
|
||||
|
||||
AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
|
||||
audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
|
||||
packetType, _selectedCodecName);
|
||||
});
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
|
||||
|
||||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
|
||||
|
||||
_scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCacheScriptingInterface>().data());
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
_scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
|
||||
LocationScriptingInterface::locationSetter);
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
|
||||
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
|
||||
if (!useFrameSkeleton) {
|
||||
static std::once_flag warning;
|
||||
std::call_once(warning, [] {
|
||||
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
|
||||
});
|
||||
entityScriptingInterface->init();
|
||||
|
||||
_entityViewer.init();
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
_avatarAudioTimer.start();
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
|
||||
QTimer* avatarDataTimer = new QTimer(this);
|
||||
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
|
||||
avatarDataTimer->setSingleShot(false);
|
||||
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
|
||||
avatarDataTimer->setTimerType(Qt::PreciseTimer);
|
||||
avatarDataTimer->start();
|
||||
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
if (recordingInterface->isPlaying()) {
|
||||
recordingInterface->stopPlaying();
|
||||
}
|
||||
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
|
||||
QByteArray audio(frame->data);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audio.data());
|
||||
int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
||||
_audioGate.render(samples, samples, numSamples);
|
||||
if (recordingInterface->isRecording()) {
|
||||
recordingInterface->stopRecording();
|
||||
}
|
||||
|
||||
computeLoudness(&audio, scriptedAvatar);
|
||||
avatarDataTimer->stop();
|
||||
_avatarAudioTimer.stop();
|
||||
}
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
Q_UNUSED(openedInLastBlock);
|
||||
|
||||
// the codec must be flushed to silence before sending silent packets,
|
||||
// so delay the transition to silent packets by one packet after becoming silent.
|
||||
auto packetType = PacketType::MicrophoneAudioNoEcho;
|
||||
if (!audioGateOpen && !closedInLastBlock) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
auto headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
|
||||
audioTransform.setRotation(headOrientation);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audio;
|
||||
}
|
||||
|
||||
AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
|
||||
audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
|
||||
packetType, _selectedCodecName);
|
||||
});
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
|
||||
|
||||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
|
||||
|
||||
_scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCacheScriptingInterface>().data());
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCacheScriptingInterface>().data());
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
_scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
|
||||
LocationScriptingInterface::locationSetter);
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
_scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
entityScriptingInterface->init();
|
||||
|
||||
_entityViewer.init();
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
|
||||
QTimer* avatarDataTimer = new QTimer(this);
|
||||
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
|
||||
avatarDataTimer->setSingleShot(false);
|
||||
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
|
||||
avatarDataTimer->setTimerType(Qt::PreciseTimer);
|
||||
avatarDataTimer->start();
|
||||
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
setFinished(true);
|
||||
}
|
||||
|
||||
|
@ -869,17 +884,25 @@ void Agent::aboutToFinish() {
|
|||
DependencyManager::destroy<SoundCache>();
|
||||
DependencyManager::destroy<AudioScriptingInterface>();
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
DependencyManager::destroy<recording::Deck>();
|
||||
DependencyManager::destroy<recording::Recorder>();
|
||||
DependencyManager::destroy<recording::ClipCache>();
|
||||
DependencyManager::destroy<ScriptEngine>();
|
||||
|
||||
// drop our shared pointer to the script engine, then ask ScriptEngines to shutdown scripting
|
||||
// this ensures that the ScriptEngine goes down before ScriptEngines
|
||||
_scriptEngine.clear();
|
||||
|
||||
{
|
||||
DependencyManager::get<ScriptEngines>()->shutdownScripting();
|
||||
}
|
||||
|
||||
DependencyManager::destroy<ScriptEngines>();
|
||||
|
||||
DependencyManager::destroy<AssignmentDynamicFactory>();
|
||||
|
||||
DependencyManager::destroy<ScriptableAvatar>();
|
||||
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
|
||||
|
||||
// cleanup codec & encoder
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
|
|
|
@ -654,6 +654,15 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
|
|||
|
||||
if (addToIgnore) {
|
||||
senderNode->addIgnoredNode(ignoredUUID);
|
||||
|
||||
if (ignoredNode) {
|
||||
// send a reliable kill packet to remove the sending avatar for the ignored avatar
|
||||
auto killPacket = NLPacket::create(PacketType::KillAvatar,
|
||||
NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
|
||||
killPacket->write(senderNode->getUUID().toRfc4122());
|
||||
killPacket->writePrimitive(KillAvatarReason::AvatarDisconnected);
|
||||
nodeList->sendPacket(std::move(killPacket), *ignoredNode);
|
||||
}
|
||||
} else {
|
||||
senderNode->removeIgnoredNode(ignoredUUID);
|
||||
}
|
||||
|
|
157
interface/resources/qml/AnimStats.qml
Normal file
157
interface/resources/qml/AnimStats.qml
Normal file
|
@ -0,0 +1,157 @@
|
|||
import Hifi 1.0 as Hifi
|
||||
import QtQuick 2.3
|
||||
import '.'
|
||||
|
||||
Item {
|
||||
id: animStats
|
||||
|
||||
anchors.leftMargin: 300
|
||||
objectName: "StatsItem"
|
||||
property int modality: Qt.NonModal
|
||||
implicitHeight: row.height
|
||||
implicitWidth: row.width
|
||||
|
||||
Component.onCompleted: {
|
||||
animStats.parentChanged.connect(fill);
|
||||
fill();
|
||||
}
|
||||
Component.onDestruction: {
|
||||
animStats.parentChanged.disconnect(fill);
|
||||
}
|
||||
|
||||
function fill() {
|
||||
// This will cause a warning at shutdown, need to find another way to remove
|
||||
// the warning other than filling the anchors to the parent
|
||||
anchors.horizontalCenter = parent.horizontalCenter
|
||||
}
|
||||
|
||||
Hifi.AnimStats {
|
||||
id: root
|
||||
objectName: "AnimStats"
|
||||
implicitHeight: row.height
|
||||
implicitWidth: row.width
|
||||
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
readonly property string bgColor: "#AA111111"
|
||||
|
||||
Row {
|
||||
id: row
|
||||
spacing: 8
|
||||
|
||||
Rectangle {
|
||||
width: firstCol.width + 8;
|
||||
height: firstCol.height + 8;
|
||||
color: root.bgColor;
|
||||
|
||||
Column {
|
||||
id: firstCol
|
||||
spacing: 4; x: 4; y: 4;
|
||||
|
||||
StatText {
|
||||
text: "State Machines:---------------------------------------------------------------------------"
|
||||
}
|
||||
ListView {
|
||||
width: firstCol.width
|
||||
height: root.animStateMachines.length * 15
|
||||
visible: root.animStateMchines.length > 0;
|
||||
model: root.animStateMachines
|
||||
delegate: StatText {
|
||||
text: {
|
||||
return modelData;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
width: secondCol.width + 8
|
||||
height: secondCol.height + 8
|
||||
color: root.bgColor;
|
||||
|
||||
Column {
|
||||
id: secondCol
|
||||
spacing: 4; x: 4; y: 4;
|
||||
|
||||
StatText {
|
||||
text: "Anim Vars:--------------------------------------------------------------------------------"
|
||||
}
|
||||
|
||||
ListView {
|
||||
width: secondCol.width
|
||||
height: root.animVars.length * 15
|
||||
visible: root.animVars.length > 0;
|
||||
model: root.animVars
|
||||
delegate: StatText {
|
||||
text: {
|
||||
var actualText = modelData.split("|")[1];
|
||||
if (actualText) {
|
||||
return actualText;
|
||||
} else {
|
||||
return modelData;
|
||||
}
|
||||
}
|
||||
color: {
|
||||
var grayScale = parseFloat(modelData.split("|")[0]);
|
||||
return Qt.rgba(1.0, 1.0, 1.0, grayScale);
|
||||
}
|
||||
styleColor: {
|
||||
var grayScale = parseFloat(modelData.split("|")[0]);
|
||||
return Qt.rgba(0.0, 0.0, 0.0, grayScale);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
width: thirdCol.width + 8
|
||||
height: thirdCol.height + 8
|
||||
color: root.bgColor;
|
||||
|
||||
Column {
|
||||
id: thirdCol
|
||||
spacing: 4; x: 4; y: 4;
|
||||
|
||||
StatText {
|
||||
text: "Alpha Values:--------------------------------------------------------------------------"
|
||||
}
|
||||
|
||||
ListView {
|
||||
width: thirdCol.width
|
||||
height: root.animAlphaValues.length * 15
|
||||
visible: root.animAlphaValues.length > 0;
|
||||
model: root.animAlphaValues
|
||||
delegate: StatText {
|
||||
text: {
|
||||
var actualText = modelData.split("|")[1];
|
||||
if (actualText) {
|
||||
return actualText;
|
||||
} else {
|
||||
return modelData;
|
||||
}
|
||||
}
|
||||
color: {
|
||||
var grayScale = parseFloat(modelData.split("|")[0]);
|
||||
return Qt.rgba(1.0, 1.0, 1.0, grayScale);
|
||||
}
|
||||
styleColor: {
|
||||
var grayScale = parseFloat(modelData.split("|")[0]);
|
||||
return Qt.rgba(0.0, 0.0, 0.0, grayScale);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Connections {
|
||||
target: root.parent
|
||||
onWidthChanged: {
|
||||
root.x = root.parent.width - root.width;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -192,21 +192,6 @@ Item {
|
|||
StatText {
|
||||
text: "Yaw: " + root.yaw.toFixed(1)
|
||||
}
|
||||
StatText {
|
||||
visible: root.animStackNames.length > 0;
|
||||
text: "Anim Stack Names:"
|
||||
}
|
||||
ListView {
|
||||
width: geoCol.width
|
||||
height: root.animStackNames.length * 15
|
||||
visible: root.animStackNames.length > 0;
|
||||
model: root.animStackNames
|
||||
delegate: StatText {
|
||||
text: modelData.length > 30
|
||||
? modelData.substring(0, 5) + "..." + modelData.substring(modelData.length - 22)
|
||||
: modelData
|
||||
}
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Avatar Mixer In: " + root.avatarMixerInKbps + " kbps, " +
|
||||
|
|
|
@ -780,6 +780,12 @@ Rectangle {
|
|||
headerVisible: true;
|
||||
sortIndicatorColumn: settings.connectionsSortIndicatorColumn;
|
||||
sortIndicatorOrder: settings.connectionsSortIndicatorOrder;
|
||||
onSortIndicatorColumnChanged: {
|
||||
settings.connectionsSortIndicatorColumn = sortIndicatorColumn;
|
||||
}
|
||||
onSortIndicatorOrderChanged: {
|
||||
settings.connectionsSortIndicatorOrder = sortIndicatorOrder;
|
||||
}
|
||||
|
||||
TableViewColumn {
|
||||
id: connectionsUserNameHeader;
|
||||
|
|
|
@ -26,7 +26,7 @@ Rectangle {
|
|||
HifiConstants { id: hifi; }
|
||||
|
||||
property var eventBridge;
|
||||
property string title: "Audio Settings - " + AudioScriptingInterface.context;
|
||||
property string title: "Audio Settings"
|
||||
signal sendToScript(var message);
|
||||
|
||||
color: hifi.colors.baseGray;
|
||||
|
|
|
@ -18,6 +18,7 @@ import "../../windows"
|
|||
Rectangle {
|
||||
id: root
|
||||
objectName: "DCConectionTiming"
|
||||
property string title: "Domain Connection Timing"
|
||||
|
||||
signal sendToScript(var message);
|
||||
property bool isHMD: false
|
||||
|
@ -33,7 +34,7 @@ Rectangle {
|
|||
Row {
|
||||
id: header
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: hifi.dimensions.tabletMenuHeader
|
||||
anchors.topMargin: hifi.dimensions.contentMargin.y
|
||||
anchors.leftMargin: 5
|
||||
anchors.rightMargin: 5
|
||||
anchors.left: parent.left
|
||||
|
|
|
@ -18,6 +18,7 @@ import "../../windows"
|
|||
Rectangle {
|
||||
id: root
|
||||
objectName: "EntityStatistics"
|
||||
property string title: "Entity Statistics"
|
||||
|
||||
signal sendToScript(var message);
|
||||
property bool isHMD: false
|
||||
|
@ -40,6 +41,7 @@ Rectangle {
|
|||
id: scrollView
|
||||
width: parent.width
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: hifi.dimensions.contentMargin.y
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: hifi.dimensions.tabletMenuHeader
|
||||
contentWidth: column.implicitWidth
|
||||
|
@ -48,10 +50,15 @@ Rectangle {
|
|||
|
||||
Column {
|
||||
id: column
|
||||
anchors.margins: 10
|
||||
anchors.top: parent.top
|
||||
anchors.left: parent.left
|
||||
anchors.right: parent.right
|
||||
y: hifi.dimensions.tabletMenuHeader //-bgNavBar
|
||||
anchors {
|
||||
topMargin: 0
|
||||
leftMargin: 10
|
||||
rightMargin: 10
|
||||
bottomMargin: 0
|
||||
}
|
||||
spacing: 20
|
||||
|
||||
TabletEntityStatisticsItem {
|
||||
|
|
|
@ -24,6 +24,8 @@ Item {
|
|||
height: parent.height
|
||||
width: parent.width
|
||||
|
||||
property string title: "Controls"
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
TabBar {
|
||||
|
|
|
@ -23,6 +23,8 @@ FocusScope {
|
|||
property string subMenu: ""
|
||||
signal sendToScript(var message);
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
Rectangle {
|
||||
id: bgNavBar
|
||||
height: 90
|
||||
|
@ -45,24 +47,22 @@ FocusScope {
|
|||
anchors.topMargin: 0
|
||||
anchors.top: parent.top
|
||||
|
||||
Image {
|
||||
HiFiGlyphs {
|
||||
id: menuRootIcon
|
||||
width: 40
|
||||
height: 40
|
||||
source: "../../../icons/tablet-icons/menu-i.svg"
|
||||
text: breadcrumbText.text !== "Menu" ? hifi.glyphs.backward : ""
|
||||
size: 72
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 15
|
||||
width: breadcrumbText.text === "Menu" ? 32 : 50
|
||||
visible: breadcrumbText.text !== "Menu"
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
onEntered: iconColorOverlay.color = "#1fc6a6";
|
||||
onExited: iconColorOverlay.color = "#34a2c7";
|
||||
// navigate back to root level menu
|
||||
onClicked: {
|
||||
buildMenu();
|
||||
breadcrumbText.text = "Menu";
|
||||
menuPopperUpper.closeLastMenu();
|
||||
tabletRoot.playButtonClickSound();
|
||||
}
|
||||
}
|
||||
|
@ -79,23 +79,10 @@ FocusScope {
|
|||
id: breadcrumbText
|
||||
text: "Menu"
|
||||
size: 26
|
||||
color: "#34a2c7"
|
||||
color: "#e3e3e3"
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: menuRootIcon.right
|
||||
anchors.leftMargin: 15
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
onEntered: breadcrumbText.color = "#1fc6a6";
|
||||
onExited: breadcrumbText.color = "#34a2c7";
|
||||
// navigate back to parent level menu if there is one
|
||||
onClicked: {
|
||||
if (breadcrumbText.text !== "Menu") {
|
||||
menuPopperUpper.closeLastMenu();
|
||||
}
|
||||
tabletRoot.playButtonClickSound();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -103,7 +90,6 @@ FocusScope {
|
|||
menuPopperUpper.closeLastMenu();
|
||||
}
|
||||
|
||||
|
||||
function setRootMenu(rootMenu, subMenu) {
|
||||
tabletMenu.subMenu = subMenu;
|
||||
tabletMenu.rootMenu = rootMenu;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//
|
||||
// MessageDialog.qml
|
||||
// TabletMenuStack.qml
|
||||
//
|
||||
// Created by Dante Ruiz on 13 Feb 2017
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
|
@ -66,7 +66,7 @@ Item {
|
|||
|
||||
function popSource() {
|
||||
console.log("trying to pop page");
|
||||
d.pop();
|
||||
closeLastMenu();
|
||||
}
|
||||
|
||||
function toModel(items, newMenu) {
|
||||
|
|
|
@ -41,7 +41,11 @@ Item {
|
|||
section.saveAll();
|
||||
}
|
||||
|
||||
closeDialog();
|
||||
if (HMD.active) {
|
||||
tablet.popFromStack();
|
||||
} else {
|
||||
closeDialog();
|
||||
}
|
||||
}
|
||||
|
||||
function restoreAll() {
|
||||
|
@ -50,7 +54,11 @@ Item {
|
|||
section.restoreAll();
|
||||
}
|
||||
|
||||
closeDialog();
|
||||
if (HMD.active) {
|
||||
tablet.popFromStack();
|
||||
} else {
|
||||
closeDialog();
|
||||
}
|
||||
}
|
||||
|
||||
function closeDialog() {
|
||||
|
|
|
@ -195,6 +195,7 @@
|
|||
#include "ui/SnapshotAnimated.h"
|
||||
#include "ui/StandAloneJSConsole.h"
|
||||
#include "ui/Stats.h"
|
||||
#include "ui/AnimStats.h"
|
||||
#include "ui/UpdateDialog.h"
|
||||
#include "ui/overlays/Overlays.h"
|
||||
#include "ui/DomainConnectionModel.h"
|
||||
|
@ -3081,8 +3082,10 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
|
|||
|
||||
void Application::onDesktopRootItemCreated(QQuickItem* rootItem) {
|
||||
Stats::show();
|
||||
AnimStats::show();
|
||||
auto surfaceContext = DependencyManager::get<OffscreenUi>()->getSurfaceContext();
|
||||
surfaceContext->setContextProperty("Stats", Stats::getInstance());
|
||||
surfaceContext->setContextProperty("AnimStats", AnimStats::getInstance());
|
||||
|
||||
#if !defined(Q_OS_ANDROID)
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
|
@ -4618,6 +4621,7 @@ void Application::idle() {
|
|||
checkChangeCursor();
|
||||
|
||||
Stats::getInstance()->updateStats();
|
||||
AnimStats::getInstance()->updateStats();
|
||||
|
||||
// Normally we check PipelineWarnings, but since idle will often take more than 10ms we only show these idle timing
|
||||
// details if we're in ExtraDebugging mode. However, the ::update() and its subcomponents will show their timing
|
||||
|
@ -5855,9 +5859,7 @@ void Application::update(float deltaTime) {
|
|||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::update()");
|
||||
|
||||
#if !defined(Q_OS_ANDROID)
|
||||
updateLOD(deltaTime);
|
||||
#endif
|
||||
|
||||
// TODO: break these out into distinct perfTimers when they prove interesting
|
||||
{
|
||||
|
|
|
@ -19,8 +19,11 @@
|
|||
#include <SimpleMovingAverage.h>
|
||||
#include <render/Args.h>
|
||||
|
||||
|
||||
#ifdef Q_OS_ANDROID
|
||||
const float LOD_DEFAULT_QUALITY_LEVEL = 0.75f; // default quality level setting is High (lower framerate)
|
||||
#else
|
||||
const float LOD_DEFAULT_QUALITY_LEVEL = 0.5f; // default quality level setting is Mid
|
||||
#endif
|
||||
const float LOD_MAX_LIKELY_DESKTOP_FPS = 60.0f; // this is essentially, V-synch fps
|
||||
const float LOD_MAX_LIKELY_HMD_FPS = 90.0f; // this is essentially, V-synch fps
|
||||
const float LOD_OFFSET_FPS = 5.0f; // offset of FPS to add for computing the target framerate
|
||||
|
|
|
@ -255,7 +255,7 @@ Menu::Menu() {
|
|||
connect(action, &QAction::triggered, [] {
|
||||
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
tablet->loadQMLSource("hifi/tablet/ControllerSettings.qml");
|
||||
tablet->pushOntoStack("hifi/tablet/ControllerSettings.qml");
|
||||
|
||||
if (!hmd->getShouldShowTablet()) {
|
||||
hmd->toggleShouldShowTablet();
|
||||
|
@ -737,6 +737,7 @@ Menu::Menu() {
|
|||
|
||||
// Developer > Stats
|
||||
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::Stats);
|
||||
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::AnimStats);
|
||||
|
||||
// Settings > Enable Speech Control API
|
||||
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
|
||||
|
|
|
@ -197,6 +197,7 @@ namespace MenuOption {
|
|||
const QString SMIEyeTracking = "SMI Eye Tracking";
|
||||
const QString SparseTextureManagement = "Enable Sparse Texture Management";
|
||||
const QString Stats = "Show Statistics";
|
||||
const QString AnimStats = "Show Animation Stats";
|
||||
const QString StopAllScripts = "Stop All Scripts";
|
||||
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
|
||||
const QString ThirdPerson = "Third Person";
|
||||
|
|
|
@ -516,6 +516,10 @@ void MyAvatar::update(float deltaTime) {
|
|||
head->relax(deltaTime);
|
||||
updateFromTrackers(deltaTime);
|
||||
|
||||
if (getIsInWalkingState() && glm::length(getControllerPoseInAvatarFrame(controller::Action::HEAD).getVelocity()) < DEFAULT_AVATAR_WALK_SPEED_THRESHOLD) {
|
||||
setIsInWalkingState(false);
|
||||
}
|
||||
|
||||
// Get audio loudness data from audio input device
|
||||
// Also get the AudioClient so we can update the avatar bounding box data
|
||||
// on the AudioClient side.
|
||||
|
@ -3678,10 +3682,10 @@ static bool headAngularVelocityBelowThreshold(const controller::Pose& head) {
|
|||
return isBelowThreshold;
|
||||
}
|
||||
|
||||
static bool isWithinThresholdHeightMode(const controller::Pose& head,const float& newMode) {
|
||||
static bool isWithinThresholdHeightMode(const controller::Pose& head, const float& newMode, const float& scale) {
|
||||
bool isWithinThreshold = true;
|
||||
if (head.isValid()) {
|
||||
isWithinThreshold = (head.getTranslation().y - newMode) > DEFAULT_AVATAR_MODE_HEIGHT_STEPPING_THRESHOLD;
|
||||
isWithinThreshold = (head.getTranslation().y - newMode) > (DEFAULT_AVATAR_MODE_HEIGHT_STEPPING_THRESHOLD * scale);
|
||||
}
|
||||
return isWithinThreshold;
|
||||
}
|
||||
|
@ -3802,6 +3806,10 @@ float MyAvatar::getUserEyeHeight() const {
|
|||
return userHeight - userHeight * ratio;
|
||||
}
|
||||
|
||||
bool MyAvatar::getIsInWalkingState() const {
|
||||
return _isInWalkingState;
|
||||
}
|
||||
|
||||
float MyAvatar::getWalkSpeed() const {
|
||||
return _walkSpeed.get() * _walkSpeedScalar;
|
||||
}
|
||||
|
@ -3818,6 +3826,10 @@ void MyAvatar::setSprintMode(bool sprint) {
|
|||
_walkSpeedScalar = sprint ? _sprintSpeed.get() : AVATAR_WALK_SPEED_SCALAR;
|
||||
}
|
||||
|
||||
void MyAvatar::setIsInWalkingState(bool isWalking) {
|
||||
_isInWalkingState = isWalking;
|
||||
}
|
||||
|
||||
void MyAvatar::setWalkSpeed(float value) {
|
||||
_walkSpeed.set(value);
|
||||
}
|
||||
|
@ -3912,7 +3924,6 @@ void MyAvatar::lateUpdatePalms() {
|
|||
Avatar::updatePalms();
|
||||
}
|
||||
|
||||
|
||||
static const float FOLLOW_TIME = 0.5f;
|
||||
|
||||
MyAvatar::FollowHelper::FollowHelper() {
|
||||
|
@ -4004,24 +4015,36 @@ bool MyAvatar::FollowHelper::shouldActivateHorizontalCG(MyAvatar& myAvatar) cons
|
|||
controller::Pose currentRightHandPose = myAvatar.getControllerPoseInAvatarFrame(controller::Action::RIGHT_HAND);
|
||||
|
||||
bool stepDetected = false;
|
||||
if (!withinBaseOfSupport(currentHeadPose) &&
|
||||
float myScale = myAvatar.getAvatarScale();
|
||||
|
||||
if (myAvatar.getIsInWalkingState()) {
|
||||
stepDetected = true;
|
||||
} else {
|
||||
if (!withinBaseOfSupport(currentHeadPose) &&
|
||||
headAngularVelocityBelowThreshold(currentHeadPose) &&
|
||||
isWithinThresholdHeightMode(currentHeadPose, myAvatar.getCurrentStandingHeight()) &&
|
||||
isWithinThresholdHeightMode(currentHeadPose, myAvatar.getCurrentStandingHeight(), myScale) &&
|
||||
handDirectionMatchesHeadDirection(currentLeftHandPose, currentRightHandPose, currentHeadPose) &&
|
||||
handAngularVelocityBelowThreshold(currentLeftHandPose, currentRightHandPose) &&
|
||||
headVelocityGreaterThanThreshold(currentHeadPose) &&
|
||||
isHeadLevel(currentHeadPose, myAvatar.getAverageHeadRotation())) {
|
||||
// a step is detected
|
||||
stepDetected = true;
|
||||
} else {
|
||||
glm::vec3 defaultHipsPosition = myAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(myAvatar.getJointIndex("Hips"));
|
||||
glm::vec3 defaultHeadPosition = myAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(myAvatar.getJointIndex("Head"));
|
||||
glm::vec3 currentHeadPosition = currentHeadPose.getTranslation();
|
||||
float anatomicalHeadToHipsDistance = glm::length(defaultHeadPosition - defaultHipsPosition);
|
||||
if (!isActive(Horizontal) &&
|
||||
(glm::length(currentHeadPosition - defaultHipsPosition) > (anatomicalHeadToHipsDistance + (DEFAULT_AVATAR_SPINE_STRETCH_LIMIT * anatomicalHeadToHipsDistance)))) {
|
||||
myAvatar.setResetMode(true);
|
||||
// a step is detected
|
||||
stepDetected = true;
|
||||
if (glm::length(currentHeadPose.velocity) > DEFAULT_AVATAR_WALK_SPEED_THRESHOLD) {
|
||||
myAvatar.setIsInWalkingState(true);
|
||||
}
|
||||
} else {
|
||||
glm::vec3 defaultHipsPosition = myAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(myAvatar.getJointIndex("Hips"));
|
||||
glm::vec3 defaultHeadPosition = myAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(myAvatar.getJointIndex("Head"));
|
||||
glm::vec3 currentHeadPosition = currentHeadPose.getTranslation();
|
||||
float anatomicalHeadToHipsDistance = glm::length(defaultHeadPosition - defaultHipsPosition);
|
||||
if (!isActive(Horizontal) &&
|
||||
(glm::length(currentHeadPosition - defaultHipsPosition) > (anatomicalHeadToHipsDistance + (DEFAULT_AVATAR_SPINE_STRETCH_LIMIT * anatomicalHeadToHipsDistance)))) {
|
||||
myAvatar.setResetMode(true);
|
||||
stepDetected = true;
|
||||
if (glm::length(currentHeadPose.velocity) > DEFAULT_AVATAR_WALK_SPEED_THRESHOLD) {
|
||||
myAvatar.setIsInWalkingState(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return stepDetected;
|
||||
|
|
|
@ -1086,6 +1086,8 @@ public:
|
|||
|
||||
const QUuid& getSelfID() const { return AVATAR_SELF_ID; }
|
||||
|
||||
void setIsInWalkingState(bool isWalking);
|
||||
bool getIsInWalkingState() const;
|
||||
void setWalkSpeed(float value);
|
||||
float getWalkSpeed() const;
|
||||
void setWalkBackwardSpeed(float value);
|
||||
|
@ -1788,6 +1790,7 @@ private:
|
|||
ThreadSafeValueCache<float> _walkBackwardSpeed { DEFAULT_AVATAR_MAX_WALKING_BACKWARD_SPEED };
|
||||
ThreadSafeValueCache<float> _sprintSpeed { AVATAR_SPRINT_SPEED_SCALAR };
|
||||
float _walkSpeedScalar { AVATAR_WALK_SPEED_SCALAR };
|
||||
bool _isInWalkingState { false };
|
||||
|
||||
// load avatar scripts once when rig is ready
|
||||
bool _shouldLoadScripts { false };
|
||||
|
|
|
@ -16,8 +16,9 @@ Transform MyAvatarHeadTransformNode::getTransform() {
|
|||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
|
||||
glm::vec3 pos = myAvatar->getHeadPosition();
|
||||
glm::vec3 scale = glm::vec3(myAvatar->scaleForChildren());
|
||||
glm::quat headOri = myAvatar->getHeadOrientation();
|
||||
glm::quat ori = headOri * glm::angleAxis(-PI / 2.0f, Vectors::RIGHT);
|
||||
|
||||
return Transform(ori, glm::vec3(1.0f), pos);
|
||||
return Transform(ori, scale, pos);
|
||||
}
|
|
@ -46,7 +46,7 @@ static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
|
|||
}
|
||||
|
||||
glm::mat4 hipsMat;
|
||||
if (myAvatar->getCenterOfGravityModelEnabled() && !isFlying) {
|
||||
if (myAvatar->getCenterOfGravityModelEnabled() && !isFlying && !(myAvatar->getIsInWalkingState())) {
|
||||
// then we use center of gravity model
|
||||
hipsMat = myAvatar->deriveBodyUsingCgModel();
|
||||
} else {
|
||||
|
|
|
@ -75,7 +75,6 @@ void SafeLanding::addTrackedEntity(const EntityItemID& entityID) {
|
|||
if (hasAABox && downloadedCollisionTypes.count(modelEntity->getShapeType()) != 0) {
|
||||
// Only track entities with downloaded collision bodies.
|
||||
_trackedEntities.emplace(entityID, entity);
|
||||
qCDebug(interfaceapp) << "Safe Landing: Tracking entity " << entity->getItemName();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -110,7 +109,6 @@ bool SafeLanding::isLoadSequenceComplete() {
|
|||
_initialEnd = INVALID_SEQUENCE;
|
||||
_entityTree = nullptr;
|
||||
EntityTreeRenderer::setEntityLoadingPriorityFunction(StandardPriority);
|
||||
qCDebug(interfaceapp) << "Safe Landing: load sequence complete";
|
||||
}
|
||||
|
||||
return !_trackingEntities;
|
||||
|
|
|
@ -86,23 +86,23 @@ bool CollisionPick::isLoaded() const {
|
|||
return !_mathPick.shouldComputeShapeInfo() || (_cachedResource && _cachedResource->isLoaded());
|
||||
}
|
||||
|
||||
bool CollisionPick::getShapeInfoReady() {
|
||||
bool CollisionPick::getShapeInfoReady(const CollisionRegion& pick) {
|
||||
if (_mathPick.shouldComputeShapeInfo()) {
|
||||
if (_cachedResource && _cachedResource->isLoaded()) {
|
||||
computeShapeInfo(_mathPick, *_mathPick.shapeInfo, _cachedResource);
|
||||
computeShapeInfo(pick, *_mathPick.shapeInfo, _cachedResource);
|
||||
_mathPick.loaded = true;
|
||||
} else {
|
||||
_mathPick.loaded = false;
|
||||
}
|
||||
} else {
|
||||
computeShapeInfoDimensionsOnly(_mathPick, *_mathPick.shapeInfo, _cachedResource);
|
||||
computeShapeInfoDimensionsOnly(pick, *_mathPick.shapeInfo, _cachedResource);
|
||||
_mathPick.loaded = true;
|
||||
}
|
||||
|
||||
return _mathPick.loaded;
|
||||
}
|
||||
|
||||
void CollisionPick::computeShapeInfoDimensionsOnly(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
|
||||
void CollisionPick::computeShapeInfoDimensionsOnly(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
|
||||
ShapeType type = shapeInfo.getType();
|
||||
glm::vec3 dimensions = pick.transform.getScale();
|
||||
QString modelURL = (resource ? resource->getURL().toString() : "");
|
||||
|
@ -115,7 +115,7 @@ void CollisionPick::computeShapeInfoDimensionsOnly(CollisionRegion& pick, ShapeI
|
|||
}
|
||||
}
|
||||
|
||||
void CollisionPick::computeShapeInfo(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
|
||||
void CollisionPick::computeShapeInfo(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
|
||||
// This code was copied and modified from RenderableModelEntityItem::computeShapeInfo
|
||||
// TODO: Move to some shared code area (in entities-renderer? model-networking?)
|
||||
// after we verify this is working and do a diff comparison with RenderableModelEntityItem::computeShapeInfo
|
||||
|
@ -357,12 +357,14 @@ CollisionPick::CollisionPick(const PickFilter& filter, float maxDistance, bool e
|
|||
CollisionRegion CollisionPick::getMathematicalPick() const {
|
||||
CollisionRegion mathPick = _mathPick;
|
||||
mathPick.loaded = isLoaded();
|
||||
if (!parentTransform) {
|
||||
return mathPick;
|
||||
} else {
|
||||
mathPick.transform = parentTransform->getTransform().worldTransform(mathPick.transform);
|
||||
return mathPick;
|
||||
if (parentTransform) {
|
||||
Transform parentTransformValue = parentTransform->getTransform();
|
||||
mathPick.transform = parentTransformValue.worldTransform(mathPick.transform);
|
||||
glm::vec3 scale = parentTransformValue.getScale();
|
||||
float largestDimension = glm::max(glm::max(scale.x, scale.y), scale.z);
|
||||
mathPick.threshold *= largestDimension;
|
||||
}
|
||||
return mathPick;
|
||||
}
|
||||
|
||||
void CollisionPick::filterIntersections(std::vector<ContactTestResult>& intersections) const {
|
||||
|
@ -393,9 +395,9 @@ PickResultPointer CollisionPick::getEntityIntersection(const CollisionRegion& pi
|
|||
// Cannot compute result
|
||||
return std::make_shared<CollisionPickResult>(pick.toVariantMap(), std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
|
||||
}
|
||||
getShapeInfoReady();
|
||||
getShapeInfoReady(pick);
|
||||
|
||||
auto entityIntersections = _physicsEngine->contactTest(USER_COLLISION_MASK_ENTITIES, *pick.shapeInfo, pick.transform, USER_COLLISION_GROUP_DYNAMIC, pick.threshold);
|
||||
auto entityIntersections = _physicsEngine->contactTest(USER_COLLISION_MASK_ENTITIES, *_mathPick.shapeInfo, pick.transform, USER_COLLISION_GROUP_DYNAMIC, pick.threshold);
|
||||
filterIntersections(entityIntersections);
|
||||
return std::make_shared<CollisionPickResult>(pick, entityIntersections, std::vector<ContactTestResult>());
|
||||
}
|
||||
|
@ -409,9 +411,9 @@ PickResultPointer CollisionPick::getAvatarIntersection(const CollisionRegion& pi
|
|||
// Cannot compute result
|
||||
return std::make_shared<CollisionPickResult>(pick, std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
|
||||
}
|
||||
getShapeInfoReady();
|
||||
getShapeInfoReady(pick);
|
||||
|
||||
auto avatarIntersections = _physicsEngine->contactTest(USER_COLLISION_MASK_AVATARS, *pick.shapeInfo, pick.transform, USER_COLLISION_GROUP_DYNAMIC, pick.threshold);
|
||||
auto avatarIntersections = _physicsEngine->contactTest(USER_COLLISION_MASK_AVATARS, *_mathPick.shapeInfo, pick.transform, USER_COLLISION_GROUP_DYNAMIC, pick.threshold);
|
||||
filterIntersections(avatarIntersections);
|
||||
return std::make_shared<CollisionPickResult>(pick, std::vector<ContactTestResult>(), avatarIntersections);
|
||||
}
|
||||
|
|
|
@ -62,9 +62,9 @@ protected:
|
|||
// Returns true if the resource for _mathPick.shapeInfo is loaded or if a resource is not needed.
|
||||
bool isLoaded() const;
|
||||
// Returns true if _mathPick.shapeInfo is valid. Otherwise, attempts to get the _mathPick ready for use.
|
||||
bool getShapeInfoReady();
|
||||
void computeShapeInfo(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
|
||||
void computeShapeInfoDimensionsOnly(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
|
||||
bool getShapeInfoReady(const CollisionRegion& pick);
|
||||
void computeShapeInfo(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
|
||||
void computeShapeInfoDimensionsOnly(const CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
|
||||
void filterIntersections(std::vector<ContactTestResult>& intersections) const;
|
||||
|
||||
CollisionRegion _mathPick;
|
||||
|
|
|
@ -24,11 +24,14 @@
|
|||
#include "CollisionPick.h"
|
||||
|
||||
#include "SpatialParentFinder.h"
|
||||
#include "NestableTransformNode.h"
|
||||
#include "PickTransformNode.h"
|
||||
#include "MouseTransformNode.h"
|
||||
#include "avatar/MyAvatarHeadTransformNode.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
#include "NestableTransformNode.h"
|
||||
#include "avatars-renderer/AvatarTransformNode.h"
|
||||
#include "ui/overlays/OverlayTransformNode.h"
|
||||
#include "EntityTransformNode.h"
|
||||
|
||||
#include <ScriptEngine.h>
|
||||
|
||||
|
@ -260,9 +263,16 @@ unsigned int PickScriptingInterface::createParabolaPick(const QVariant& properti
|
|||
* A set of properties that can be passed to {@link Picks.createPick} to create a new Collision Pick.
|
||||
|
||||
* @typedef {object} Picks.CollisionPickProperties
|
||||
* @property {Shape} shape - The information about the collision region's size and shape.
|
||||
* @property {Vec3} position - The position of the collision region.
|
||||
* @property {Quat} orientation - The orientation of the collision region.
|
||||
* @property {boolean} [enabled=false] If this Pick should start enabled or not. Disabled Picks do not updated their pick results.
|
||||
* @property {number} [filter=Picks.PICK_NOTHING] The filter for this Pick to use, constructed using filter flags combined using bitwise OR.
|
||||
* @property {Shape} shape - The information about the collision region's size and shape. Dimensions are in world space, but will scale with the parent if defined.
|
||||
* @property {Vec3} position - The position of the collision region, relative to a parent if defined.
|
||||
* @property {Quat} orientation - The orientation of the collision region, relative to a parent if defined.
|
||||
* @property {float} threshold - The approximate minimum penetration depth for a test object to be considered in contact with the collision region.
|
||||
* The depth is measured in world space, but will scale with the parent if defined.
|
||||
* @property {Uuid} parentID - The ID of the parent, either an avatar, an entity, or an overlay.
|
||||
* @property {number} parentJointIndex - The joint of the parent to parent to, for example, the joints on the model of an avatar. (default = 0, no joint)
|
||||
* @property {string} joint - If "Mouse," parents the pick to the mouse. If "Avatar," parents the pick to MyAvatar's head. Otherwise, parents to the joint of the given name on MyAvatar.
|
||||
*/
|
||||
unsigned int PickScriptingInterface::createCollisionPick(const QVariant& properties) {
|
||||
QVariantMap propMap = properties.toMap();
|
||||
|
@ -375,7 +385,16 @@ std::shared_ptr<TransformNode> PickScriptingInterface::createTransformNode(const
|
|||
}
|
||||
auto sharedNestablePointer = nestablePointer.lock();
|
||||
if (success && sharedNestablePointer) {
|
||||
return std::make_shared<NestableTransformNode>(nestablePointer, parentJointIndex);
|
||||
NestableType nestableType = sharedNestablePointer->getNestableType();
|
||||
if (nestableType == NestableType::Avatar) {
|
||||
return std::make_shared<AvatarTransformNode>(std::static_pointer_cast<Avatar>(sharedNestablePointer), parentJointIndex);
|
||||
} else if (nestableType == NestableType::Overlay) {
|
||||
return std::make_shared<OverlayTransformNode>(std::static_pointer_cast<Base3DOverlay>(sharedNestablePointer), parentJointIndex);
|
||||
} else if (nestableType == NestableType::Entity) {
|
||||
return std::make_shared<EntityTransformNode>(std::static_pointer_cast<EntityItem>(sharedNestablePointer), parentJointIndex);
|
||||
} else {
|
||||
return std::make_shared<NestableTransformNode>(nestablePointer, parentJointIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -394,7 +413,7 @@ std::shared_ptr<TransformNode> PickScriptingInterface::createTransformNode(const
|
|||
} else if (!joint.isNull()) {
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
int jointIndex = myAvatar->getJointIndex(joint);
|
||||
return std::make_shared<NestableTransformNode>(myAvatar, jointIndex);
|
||||
return std::make_shared<AvatarTransformNode>(myAvatar, jointIndex);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
141
interface/src/ui/AnimStats.cpp
Normal file
141
interface/src/ui/AnimStats.cpp
Normal file
|
@ -0,0 +1,141 @@
|
|||
//
|
||||
// Created by Anthony J. Thibault 2018/08/06
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AnimStats.h"
|
||||
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <OffscreenUi.h>
|
||||
#include "Menu.h"
|
||||
|
||||
HIFI_QML_DEF(AnimStats)
|
||||
|
||||
static AnimStats* INSTANCE{ nullptr };
|
||||
|
||||
AnimStats* AnimStats::getInstance() {
|
||||
Q_ASSERT(INSTANCE);
|
||||
return INSTANCE;
|
||||
}
|
||||
|
||||
AnimStats::AnimStats(QQuickItem* parent) : QQuickItem(parent) {
|
||||
INSTANCE = this;
|
||||
}
|
||||
|
||||
void AnimStats::updateStats(bool force) {
|
||||
QQuickItem* parent = parentItem();
|
||||
if (!force) {
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::AnimStats)) {
|
||||
if (parent->isVisible()) {
|
||||
parent->setVisible(false);
|
||||
}
|
||||
return;
|
||||
} else if (!parent->isVisible()) {
|
||||
parent->setVisible(true);
|
||||
}
|
||||
}
|
||||
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto myAvatar = avatarManager->getMyAvatar();
|
||||
auto debugAlphaMap = myAvatar->getSkeletonModel()->getRig().getDebugAlphaMap();
|
||||
|
||||
// update animation debug alpha values
|
||||
QStringList newAnimAlphaValues;
|
||||
qint64 now = usecTimestampNow();
|
||||
for (auto& iter : debugAlphaMap) {
|
||||
QString key = iter.first;
|
||||
float alpha = std::get<0>(iter.second);
|
||||
|
||||
auto prevIter = _prevDebugAlphaMap.find(key);
|
||||
if (prevIter != _prevDebugAlphaMap.end()) {
|
||||
float prevAlpha = std::get<0>(iter.second);
|
||||
if (prevAlpha != alpha) {
|
||||
// change detected: reset timer
|
||||
_animAlphaValueChangedTimers[key] = now;
|
||||
}
|
||||
} else {
|
||||
// new value: start timer
|
||||
_animAlphaValueChangedTimers[key] = now;
|
||||
}
|
||||
|
||||
AnimNodeType type = std::get<1>(iter.second);
|
||||
if (type == AnimNodeType::Clip) {
|
||||
|
||||
// figure out the grayScale color of this line.
|
||||
const float LIT_TIME = 2.0f;
|
||||
const float FADE_OUT_TIME = 1.0f;
|
||||
float grayScale = 0.0f;
|
||||
float secondsElapsed = (float)(now - _animAlphaValueChangedTimers[key]) / (float)USECS_PER_SECOND;
|
||||
if (secondsElapsed < LIT_TIME) {
|
||||
grayScale = 1.0f;
|
||||
} else if (secondsElapsed < LIT_TIME + FADE_OUT_TIME) {
|
||||
grayScale = (FADE_OUT_TIME - (secondsElapsed - LIT_TIME)) / FADE_OUT_TIME;
|
||||
} else {
|
||||
grayScale = 0.0f;
|
||||
}
|
||||
|
||||
if (grayScale > 0.0f) {
|
||||
// append grayScaleColor to start of debug string
|
||||
newAnimAlphaValues << QString::number(grayScale, 'f', 2) + "|" + key + ": " + QString::number(alpha, 'f', 3);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_animAlphaValues = newAnimAlphaValues;
|
||||
_prevDebugAlphaMap = debugAlphaMap;
|
||||
|
||||
emit animAlphaValuesChanged();
|
||||
|
||||
// update animation anim vars
|
||||
_animVarsList.clear();
|
||||
auto animVars = myAvatar->getSkeletonModel()->getRig().getAnimVars().toDebugMap();
|
||||
for (auto& iter : animVars) {
|
||||
QString key = iter.first;
|
||||
QString value = iter.second;
|
||||
|
||||
auto prevIter = _prevAnimVars.find(key);
|
||||
if (prevIter != _prevAnimVars.end()) {
|
||||
QString prevValue = prevIter->second;
|
||||
if (value != prevValue) {
|
||||
// change detected: reset timer
|
||||
_animVarChangedTimers[key] = now;
|
||||
}
|
||||
} else {
|
||||
// new value: start timer
|
||||
_animVarChangedTimers[key] = now;
|
||||
}
|
||||
|
||||
// figure out the grayScale color of this line.
|
||||
const float LIT_TIME = 2.0f;
|
||||
const float FADE_OUT_TIME = 0.5f;
|
||||
float grayScale = 0.0f;
|
||||
float secondsElapsed = (float)(now - _animVarChangedTimers[key]) / (float)USECS_PER_SECOND;
|
||||
if (secondsElapsed < LIT_TIME) {
|
||||
grayScale = 1.0f;
|
||||
} else if (secondsElapsed < LIT_TIME + FADE_OUT_TIME) {
|
||||
grayScale = (FADE_OUT_TIME - (secondsElapsed - LIT_TIME)) / FADE_OUT_TIME;
|
||||
} else {
|
||||
grayScale = 0.0f;
|
||||
}
|
||||
|
||||
if (grayScale > 0.0f) {
|
||||
// append grayScaleColor to start of debug string
|
||||
_animVarsList << QString::number(grayScale, 'f', 2) + "|" + key + ": " + value;
|
||||
}
|
||||
}
|
||||
_prevAnimVars = animVars;
|
||||
emit animVarsChanged();
|
||||
|
||||
// animation state machines
|
||||
_animStateMachines.clear();
|
||||
auto stateMachineMap = myAvatar->getSkeletonModel()->getRig().getStateMachineMap();
|
||||
for (auto& iter : stateMachineMap) {
|
||||
_animStateMachines << iter.second;
|
||||
}
|
||||
emit animStateMachinesChanged();
|
||||
}
|
||||
|
||||
|
55
interface/src/ui/AnimStats.h
Normal file
55
interface/src/ui/AnimStats.h
Normal file
|
@ -0,0 +1,55 @@
|
|||
//
|
||||
// Created by Anthony J. Thibault 2018/08/06
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AnimStats_h
|
||||
#define hifi_AnimStats_h
|
||||
|
||||
#include <OffscreenQmlElement.h>
|
||||
#include <AnimContext.h>
|
||||
|
||||
class AnimStats : public QQuickItem {
|
||||
Q_OBJECT
|
||||
HIFI_QML_DECL
|
||||
|
||||
Q_PROPERTY(QStringList animAlphaValues READ animAlphaValues NOTIFY animAlphaValuesChanged)
|
||||
Q_PROPERTY(QStringList animVars READ animVars NOTIFY animVarsChanged)
|
||||
Q_PROPERTY(QStringList animStateMachines READ animStateMachines NOTIFY animStateMachinesChanged)
|
||||
|
||||
public:
|
||||
static AnimStats* getInstance();
|
||||
|
||||
AnimStats(QQuickItem* parent = nullptr);
|
||||
|
||||
void updateStats(bool force = false);
|
||||
|
||||
QStringList animAlphaValues() { return _animAlphaValues; }
|
||||
QStringList animVars() { return _animVarsList; }
|
||||
QStringList animStateMachines() { return _animStateMachines; }
|
||||
|
||||
public slots:
|
||||
void forceUpdateStats() { updateStats(true); }
|
||||
|
||||
signals:
|
||||
|
||||
void animAlphaValuesChanged();
|
||||
void animVarsChanged();
|
||||
void animStateMachinesChanged();
|
||||
|
||||
private:
|
||||
QStringList _animAlphaValues;
|
||||
AnimContext::DebugAlphaMap _prevDebugAlphaMap; // alpha values from previous frame
|
||||
std::map<QString, qint64> _animAlphaValueChangedTimers; // last time alpha value has changed
|
||||
|
||||
QStringList _animVarsList;
|
||||
std::map<QString, QString> _prevAnimVars; // anim vars from previous frame
|
||||
std::map<QString, qint64> _animVarChangedTimers; // last time animVar value has changed.
|
||||
|
||||
QStringList _animStateMachines;
|
||||
};
|
||||
|
||||
#endif // hifi_AnimStats_h
|
|
@ -207,14 +207,6 @@ void Stats::updateStats(bool force) {
|
|||
|
||||
// Third column, avatar stats
|
||||
auto myAvatar = avatarManager->getMyAvatar();
|
||||
auto animStack = myAvatar->getSkeletonModel()->getRig().getAnimStack();
|
||||
|
||||
_animStackNames.clear();
|
||||
for (auto animStackIterator = animStack.begin(); animStackIterator != animStack.end(); ++animStackIterator) {
|
||||
_animStackNames << animStackIterator->first + ": " + QString::number(animStackIterator->second,'f',3);
|
||||
}
|
||||
emit animStackNamesChanged();
|
||||
|
||||
glm::vec3 avatarPos = myAvatar->getWorldPosition();
|
||||
STAT_UPDATE(position, QVector3D(avatarPos.x, avatarPos.y, avatarPos.z));
|
||||
STAT_UPDATE_FLOAT(speed, glm::length(myAvatar->getWorldVelocity()), 0.01f);
|
||||
|
|
|
@ -134,7 +134,6 @@ private: \
|
|||
* @property {number} batchFrameTime - <em>Read-only.</em>
|
||||
* @property {number} engineFrameTime - <em>Read-only.</em>
|
||||
* @property {number} avatarSimulationTime - <em>Read-only.</em>
|
||||
* @property {string[]} animStackNames - <em>Read-only.</em>
|
||||
*
|
||||
*
|
||||
* @property {number} x
|
||||
|
@ -292,7 +291,6 @@ class Stats : public QQuickItem {
|
|||
STATS_PROPERTY(float, batchFrameTime, 0)
|
||||
STATS_PROPERTY(float, engineFrameTime, 0)
|
||||
STATS_PROPERTY(float, avatarSimulationTime, 0)
|
||||
Q_PROPERTY(QStringList animStackNames READ animStackNames NOTIFY animStackNamesChanged)
|
||||
|
||||
STATS_PROPERTY(int, stylusPicksCount, 0)
|
||||
STATS_PROPERTY(int, rayPicksCount, 0)
|
||||
|
@ -326,7 +324,6 @@ public:
|
|||
}
|
||||
|
||||
QStringList downloadUrls () { return _downloadUrls; }
|
||||
QStringList animStackNames() { return _animStackNames; }
|
||||
|
||||
public slots:
|
||||
void forceUpdateStats() { updateStats(true); }
|
||||
|
@ -1028,13 +1025,6 @@ signals:
|
|||
*/
|
||||
void avatarSimulationTimeChanged();
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the value of the <code>animStackNames</code> property changes.
|
||||
* @function Stats.animStackNamesChanged
|
||||
* @returns {Signal}
|
||||
*/
|
||||
void animStackNamesChanged();
|
||||
|
||||
/**jsdoc
|
||||
* Triggered when the value of the <code>rectifiedTextureCount</code> property changes.
|
||||
* @function Stats.rectifiedTextureCountChanged
|
||||
|
@ -1049,7 +1039,6 @@ signals:
|
|||
*/
|
||||
void decimatedTextureCountChanged();
|
||||
|
||||
|
||||
// QQuickItem signals.
|
||||
|
||||
/**jsdoc
|
||||
|
@ -1336,7 +1325,6 @@ private:
|
|||
QString _monospaceFont;
|
||||
const AudioIOStats* _audioStats;
|
||||
QStringList _downloadUrls = QStringList();
|
||||
QStringList _animStackNames = QStringList();
|
||||
};
|
||||
|
||||
#endif // hifi_Stats_h
|
||||
|
|
13
interface/src/ui/overlays/OverlayTransformNode.cpp
Normal file
13
interface/src/ui/overlays/OverlayTransformNode.cpp
Normal file
|
@ -0,0 +1,13 @@
|
|||
//
|
||||
// Created by Sabrina Shanman 9/5/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "OverlayTransformNode.h"
|
||||
|
||||
template<>
|
||||
glm::vec3 BaseNestableTransformNode<Base3DOverlay>::getActualScale(const std::shared_ptr<Base3DOverlay>& nestablePointer) const {
|
||||
return nestablePointer->getBounds().getScale();
|
||||
}
|
21
interface/src/ui/overlays/OverlayTransformNode.h
Normal file
21
interface/src/ui/overlays/OverlayTransformNode.h
Normal file
|
@ -0,0 +1,21 @@
|
|||
//
|
||||
// Created by Sabrina Shanman 9/5/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_OverlayTransformNode_h
|
||||
#define hifi_OverlayTransformNode_h
|
||||
|
||||
#include "NestableTransformNode.h"
|
||||
|
||||
#include "Base3DOverlay.h"
|
||||
|
||||
// For 3D overlays only
|
||||
class OverlayTransformNode : public BaseNestableTransformNode<Base3DOverlay> {
|
||||
public:
|
||||
OverlayTransformNode(std::weak_ptr<Base3DOverlay> spatiallyNestable, int jointIndex) : BaseNestableTransformNode(spatiallyNestable, jointIndex) {};
|
||||
};
|
||||
|
||||
#endif // hifi_OverlayTransformNode_h
|
|
@ -27,7 +27,7 @@ AnimBlendLinear::~AnimBlendLinear() {
|
|||
const AnimPoseVec& AnimBlendLinear::evaluate(const AnimVariantMap& animVars, const AnimContext& context, float dt, AnimVariantMap& triggersOut) {
|
||||
|
||||
_alpha = animVars.lookup(_alphaVar, _alpha);
|
||||
float parentAlpha = _animStack[_id];
|
||||
float parentDebugAlpha = context.getDebugAlpha(_id);
|
||||
|
||||
if (_children.size() == 0) {
|
||||
for (auto&& pose : _poses) {
|
||||
|
@ -35,7 +35,7 @@ const AnimPoseVec& AnimBlendLinear::evaluate(const AnimVariantMap& animVars, con
|
|||
}
|
||||
} else if (_children.size() == 1) {
|
||||
_poses = _children[0]->evaluate(animVars, context, dt, triggersOut);
|
||||
_animStack[_children[0]->getID()] = parentAlpha;
|
||||
context.setDebugAlpha(_children[0]->getID(), parentDebugAlpha, _children[0]->getType());
|
||||
} else {
|
||||
float clampedAlpha = glm::clamp(_alpha, 0.0f, (float)(_children.size() - 1));
|
||||
size_t prevPoseIndex = glm::floor(clampedAlpha);
|
||||
|
@ -48,12 +48,12 @@ const AnimPoseVec& AnimBlendLinear::evaluate(const AnimVariantMap& animVars, con
|
|||
float weight2 = 0.0f;
|
||||
if (prevPoseIndex == nextPoseIndex) {
|
||||
weight2 = 1.0f;
|
||||
_animStack[_children[nextPoseIndex]->getID()] = weight2 * parentAlpha;
|
||||
context.setDebugAlpha(_children[nextPoseIndex]->getID(), weight2 * parentDebugAlpha, _children[nextPoseIndex]->getType());
|
||||
} else {
|
||||
weight2 = alpha;
|
||||
weight1 = 1.0f - weight2;
|
||||
_animStack[_children[prevPoseIndex]->getID()] = weight1 * parentAlpha;
|
||||
_animStack[_children[nextPoseIndex]->getID()] = weight2 * parentAlpha;
|
||||
context.setDebugAlpha(_children[prevPoseIndex]->getID(), weight1 * parentDebugAlpha, _children[prevPoseIndex]->getType());
|
||||
context.setDebugAlpha(_children[nextPoseIndex]->getID(), weight2 * parentDebugAlpha, _children[nextPoseIndex]->getType());
|
||||
}
|
||||
}
|
||||
processOutputJoints(triggersOut);
|
||||
|
|
|
@ -62,9 +62,7 @@ const AnimPoseVec& AnimBlendLinearMove::evaluate(const AnimVariantMap& animVars,
|
|||
speed = animVars.lookup("moveForwardSpeed", speed);
|
||||
}
|
||||
_alpha = calculateAlpha(speed, _characteristicSpeeds);
|
||||
float parentAlpha = _animStack[_id];
|
||||
|
||||
_animStack["speed"] = speed;
|
||||
float parentDebugAlpha = context.getDebugAlpha(_id);
|
||||
|
||||
if (_children.size() == 0) {
|
||||
for (auto&& pose : _poses) {
|
||||
|
@ -77,7 +75,7 @@ const AnimPoseVec& AnimBlendLinearMove::evaluate(const AnimVariantMap& animVars,
|
|||
float prevDeltaTime, nextDeltaTime;
|
||||
setFrameAndPhase(dt, alpha, prevPoseIndex, nextPoseIndex, &prevDeltaTime, &nextDeltaTime, triggersOut);
|
||||
evaluateAndBlendChildren(animVars, context, triggersOut, alpha, prevPoseIndex, nextPoseIndex, prevDeltaTime, nextDeltaTime);
|
||||
_animStack[_children[0]->getID()] = parentAlpha;
|
||||
context.setDebugAlpha(_children[0]->getID(), parentDebugAlpha, _children[0]->getType());
|
||||
} else {
|
||||
auto clampedAlpha = glm::clamp(_alpha, 0.0f, (float)(_children.size() - 1));
|
||||
auto prevPoseIndex = glm::floor(clampedAlpha);
|
||||
|
@ -87,17 +85,11 @@ const AnimPoseVec& AnimBlendLinearMove::evaluate(const AnimVariantMap& animVars,
|
|||
setFrameAndPhase(dt, alpha, prevPoseIndex, nextPoseIndex, &prevDeltaTime, &nextDeltaTime, triggersOut);
|
||||
evaluateAndBlendChildren(animVars, context, triggersOut, alpha, prevPoseIndex, nextPoseIndex, prevDeltaTime, nextDeltaTime);
|
||||
|
||||
// weights are for animation stack debug purposes only.
|
||||
float weight1 = 0.0f;
|
||||
float weight2 = 0.0f;
|
||||
if (prevPoseIndex == nextPoseIndex) {
|
||||
weight2 = 1.0f;
|
||||
_animStack[_children[nextPoseIndex]->getID()] = weight2 * parentAlpha;
|
||||
context.setDebugAlpha(_children[nextPoseIndex]->getID(), parentDebugAlpha, _children[nextPoseIndex]->getType());
|
||||
} else {
|
||||
weight2 = alpha;
|
||||
weight1 = 1.0f - weight2;
|
||||
_animStack[_children[prevPoseIndex]->getID()] = weight1 * parentAlpha;
|
||||
_animStack[_children[nextPoseIndex]->getID()] = weight2 * parentAlpha;
|
||||
context.setDebugAlpha(_children[prevPoseIndex]->getID(), (1.0f - alpha) * parentDebugAlpha, _children[prevPoseIndex]->getType());
|
||||
context.setDebugAlpha(_children[nextPoseIndex]->getID(), alpha * parentDebugAlpha, _children[nextPoseIndex]->getType());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -14,8 +14,27 @@
|
|||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
#include <QString>
|
||||
#include <QStringList>
|
||||
#include <map>
|
||||
|
||||
enum class AnimNodeType {
|
||||
Clip = 0,
|
||||
BlendLinear,
|
||||
BlendLinearMove,
|
||||
Overlay,
|
||||
StateMachine,
|
||||
Manipulator,
|
||||
InverseKinematics,
|
||||
DefaultPose,
|
||||
TwoBoneIK,
|
||||
PoleVectorConstraint,
|
||||
NumTypes
|
||||
};
|
||||
|
||||
class AnimContext {
|
||||
public:
|
||||
AnimContext() {}
|
||||
AnimContext(bool enableDebugDrawIKTargets, bool enableDebugDrawIKConstraints, bool enableDebugDrawIKChains,
|
||||
const glm::mat4& geometryToRigMatrix, const glm::mat4& rigToWorldMatrix);
|
||||
|
||||
|
@ -25,6 +44,39 @@ public:
|
|||
const glm::mat4& getGeometryToRigMatrix() const { return _geometryToRigMatrix; }
|
||||
const glm::mat4& getRigToWorldMatrix() const { return _rigToWorldMatrix; }
|
||||
|
||||
float getDebugAlpha(const QString& key) const {
|
||||
auto it = _debugAlphaMap.find(key);
|
||||
if (it != _debugAlphaMap.end()) {
|
||||
return std::get<0>(it->second);
|
||||
} else {
|
||||
return 1.0f;
|
||||
}
|
||||
}
|
||||
|
||||
using DebugAlphaMapValue = std::tuple<float, AnimNodeType>;
|
||||
using DebugAlphaMap = std::map<QString, DebugAlphaMapValue>;
|
||||
|
||||
void setDebugAlpha(const QString& key, float alpha, AnimNodeType type) const {
|
||||
_debugAlphaMap[key] = DebugAlphaMapValue(alpha, type);
|
||||
}
|
||||
|
||||
const DebugAlphaMap& getDebugAlphaMap() const {
|
||||
return _debugAlphaMap;
|
||||
}
|
||||
|
||||
using DebugStateMachineMapValue = QString;
|
||||
using DebugStateMachineMap = std::map<QString, DebugStateMachineMapValue>;
|
||||
|
||||
void addStateMachineInfo(const QString& stateMachineName, const QString& currentState, const QString& previousState, bool duringInterp, float alpha) const {
|
||||
if (duringInterp) {
|
||||
_stateMachineMap[stateMachineName] = QString("%1: %2 -> %3 (%4)").arg(stateMachineName).arg(previousState).arg(currentState).arg(QString::number(alpha, 'f', 2));
|
||||
} else {
|
||||
_stateMachineMap[stateMachineName] = QString("%1: %2").arg(stateMachineName).arg(currentState);
|
||||
}
|
||||
}
|
||||
|
||||
const DebugStateMachineMap& getStateMachineMap() const { return _stateMachineMap; }
|
||||
|
||||
protected:
|
||||
|
||||
bool _enableDebugDrawIKTargets { false };
|
||||
|
@ -32,6 +84,10 @@ protected:
|
|||
bool _enableDebugDrawIKChains { false };
|
||||
glm::mat4 _geometryToRigMatrix;
|
||||
glm::mat4 _rigToWorldMatrix;
|
||||
|
||||
// used for debugging internal state of animation system.
|
||||
mutable DebugAlphaMap _debugAlphaMap;
|
||||
mutable DebugStateMachineMap _stateMachineMap;
|
||||
};
|
||||
|
||||
#endif // hifi_AnimContext_h
|
||||
|
|
|
@ -12,10 +12,6 @@
|
|||
|
||||
#include <QtGlobal>
|
||||
|
||||
std::map<QString, float> AnimNode::_animStack = {
|
||||
{"none", 0.0f}
|
||||
};
|
||||
|
||||
AnimNode::Pointer AnimNode::getParent() {
|
||||
return _parent.lock();
|
||||
}
|
||||
|
|
|
@ -36,19 +36,7 @@ class QJsonObject;
|
|||
|
||||
class AnimNode : public std::enable_shared_from_this<AnimNode> {
|
||||
public:
|
||||
enum class Type {
|
||||
Clip = 0,
|
||||
BlendLinear,
|
||||
BlendLinearMove,
|
||||
Overlay,
|
||||
StateMachine,
|
||||
Manipulator,
|
||||
InverseKinematics,
|
||||
DefaultPose,
|
||||
TwoBoneIK,
|
||||
PoleVectorConstraint,
|
||||
NumTypes
|
||||
};
|
||||
using Type = AnimNodeType;
|
||||
using Pointer = std::shared_ptr<AnimNode>;
|
||||
using ConstPointer = std::shared_ptr<const AnimNode>;
|
||||
|
||||
|
@ -84,7 +72,6 @@ public:
|
|||
}
|
||||
|
||||
void setCurrentFrame(float frame);
|
||||
const std::map<QString, float> getAnimStack() { return _animStack; }
|
||||
|
||||
template <typename F>
|
||||
bool traverse(F func) {
|
||||
|
@ -127,9 +114,6 @@ protected:
|
|||
std::weak_ptr<AnimNode> _parent;
|
||||
std::vector<QString> _outputJointNames;
|
||||
|
||||
// global available to Stats.h
|
||||
static std::map<QString, float> _animStack;
|
||||
|
||||
// no copies
|
||||
AnimNode(const AnimNode&) = delete;
|
||||
AnimNode& operator=(const AnimNode&) = delete;
|
||||
|
|
|
@ -23,9 +23,7 @@ AnimStateMachine::~AnimStateMachine() {
|
|||
|
||||
const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, const AnimContext& context, float dt, AnimVariantMap& triggersOut) {
|
||||
|
||||
if (_id.contains("userAnimStateMachine")) {
|
||||
_animStack.clear();
|
||||
}
|
||||
float parentDebugAlpha = context.getDebugAlpha(_id);
|
||||
|
||||
QString desiredStateID = animVars.lookup(_currentStateVar, _currentState->getID());
|
||||
if (_currentState->getID() != desiredStateID) {
|
||||
|
@ -33,8 +31,6 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
|
|||
bool foundState = false;
|
||||
for (auto& state : _states) {
|
||||
if (state->getID() == desiredStateID) {
|
||||
// parenthesis means previous state, which is a snapshot.
|
||||
_previousStateID = "(" + _currentState->getID() + ")";
|
||||
switchState(animVars, context, state);
|
||||
foundState = true;
|
||||
break;
|
||||
|
@ -48,8 +44,6 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
|
|||
// evaluate currentState transitions
|
||||
auto desiredState = evaluateTransitions(animVars);
|
||||
if (desiredState != _currentState) {
|
||||
// parenthesis means previous state, which is a snapshot.
|
||||
_previousStateID = "(" + _currentState->getID() + ")";
|
||||
switchState(animVars, context, desiredState);
|
||||
}
|
||||
|
||||
|
@ -57,17 +51,8 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
|
|||
auto currentStateNode = _children[_currentState->getChildIndex()];
|
||||
assert(currentStateNode);
|
||||
|
||||
if (!_previousStateID.contains("none")) {
|
||||
_animStack[_previousStateID] = 1.0f - _alpha;
|
||||
}
|
||||
|
||||
if (_duringInterp) {
|
||||
_alpha += _alphaVel * dt;
|
||||
if (_alpha > 1.0f) {
|
||||
_animStack[_currentState->getID()] = 1.0f;
|
||||
} else {
|
||||
_animStack[_currentState->getID()] = _alpha;
|
||||
}
|
||||
if (_alpha < 1.0f) {
|
||||
AnimPoseVec* nextPoses = nullptr;
|
||||
AnimPoseVec* prevPoses = nullptr;
|
||||
|
@ -88,26 +73,27 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
|
|||
if (_poses.size() > 0 && nextPoses && prevPoses && nextPoses->size() > 0 && prevPoses->size() > 0) {
|
||||
::blend(_poses.size(), &(prevPoses->at(0)), &(nextPoses->at(0)), _alpha, &_poses[0]);
|
||||
}
|
||||
context.setDebugAlpha(_currentState->getID(), _alpha * parentDebugAlpha, _children[_currentState->getChildIndex()]->getType());
|
||||
} else {
|
||||
_duringInterp = false;
|
||||
if (_animStack.count(_previousStateID) > 0) {
|
||||
_animStack.erase(_previousStateID);
|
||||
}
|
||||
_previousStateID = "none";
|
||||
_prevPoses.clear();
|
||||
_nextPoses.clear();
|
||||
}
|
||||
}
|
||||
|
||||
if (!_duringInterp) {
|
||||
_animStack[_currentState->getID()] = 1.0f;
|
||||
context.setDebugAlpha(_currentState->getID(), parentDebugAlpha, _children[_currentState->getChildIndex()]->getType());
|
||||
_poses = currentStateNode->evaluate(animVars, context, dt, triggersOut);
|
||||
}
|
||||
processOutputJoints(triggersOut);
|
||||
|
||||
context.addStateMachineInfo(_id, _currentState->getID(), _previousState->getID(), _duringInterp, _alpha);
|
||||
|
||||
return _poses;
|
||||
}
|
||||
|
||||
void AnimStateMachine::setCurrentState(State::Pointer state) {
|
||||
_previousState = _currentState ? _currentState : state;
|
||||
_currentState = state;
|
||||
}
|
||||
|
||||
|
@ -152,7 +138,7 @@ void AnimStateMachine::switchState(const AnimVariantMap& animVars, const AnimCon
|
|||
qCDebug(animation) << "AnimStateMachine::switchState:" << _currentState->getID() << "->" << desiredState->getID() << "duration =" << duration << "targetFrame =" << desiredState->_interpTarget << "interpType = " << (int)_interpType;
|
||||
#endif
|
||||
|
||||
_currentState = desiredState;
|
||||
setCurrentState(desiredState);
|
||||
}
|
||||
|
||||
AnimStateMachine::State::Pointer AnimStateMachine::evaluateTransitions(const AnimVariantMap& animVars) const {
|
||||
|
|
|
@ -138,9 +138,9 @@ protected:
|
|||
float _alpha = 0.0f;
|
||||
AnimPoseVec _prevPoses;
|
||||
AnimPoseVec _nextPoses;
|
||||
QString _previousStateID { "none" };
|
||||
|
||||
State::Pointer _currentState;
|
||||
State::Pointer _previousState;
|
||||
std::vector<State::Pointer> _states;
|
||||
|
||||
QString _currentStateVar;
|
||||
|
|
|
@ -67,6 +67,7 @@ QScriptValue AnimVariantMap::animVariantMapToScriptValue(QScriptEngine* engine,
|
|||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
void AnimVariantMap::copyVariantsFrom(const AnimVariantMap& other) {
|
||||
for (auto& pair : other._map) {
|
||||
_map[pair.first] = pair.second;
|
||||
|
@ -124,3 +125,43 @@ void AnimVariantMap::animVariantMapFromScriptValue(const QScriptValue& source) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::map<QString, QString> AnimVariantMap::toDebugMap() const {
|
||||
std::map<QString, QString> result;
|
||||
for (auto& pair : _map) {
|
||||
switch (pair.second.getType()) {
|
||||
case AnimVariant::Type::Bool:
|
||||
result[pair.first] = QString("%1").arg(pair.second.getBool());
|
||||
break;
|
||||
case AnimVariant::Type::Int:
|
||||
result[pair.first] = QString("%1").arg(pair.second.getInt());
|
||||
break;
|
||||
case AnimVariant::Type::Float:
|
||||
result[pair.first] = QString::number(pair.second.getFloat(), 'f', 3);
|
||||
break;
|
||||
case AnimVariant::Type::Vec3: {
|
||||
glm::vec3 value = pair.second.getVec3();
|
||||
result[pair.first] = QString("(%1, %2, %3)").
|
||||
arg(QString::number(value.x, 'f', 3)).
|
||||
arg(QString::number(value.y, 'f', 3)).
|
||||
arg(QString::number(value.z, 'f', 3));
|
||||
break;
|
||||
}
|
||||
case AnimVariant::Type::Quat: {
|
||||
glm::quat value = pair.second.getQuat();
|
||||
result[pair.first] = QString("(%1, %2, %3, %4)").
|
||||
arg(QString::number(value.x, 'f', 3)).
|
||||
arg(QString::number(value.y, 'f', 3)).
|
||||
arg(QString::number(value.z, 'f', 3)).
|
||||
arg(QString::number(value.w, 'f', 3));
|
||||
break;
|
||||
}
|
||||
case AnimVariant::Type::String:
|
||||
result[pair.first] = pair.second.getString();
|
||||
break;
|
||||
default:
|
||||
assert(("invalid AnimVariant::Type", false));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -235,6 +235,9 @@ public:
|
|||
void animVariantMapFromScriptValue(const QScriptValue& object);
|
||||
void copyVariantsFrom(const AnimVariantMap& other);
|
||||
|
||||
// For stat debugging.
|
||||
std::map<QString, QString> toDebugMap() const;
|
||||
|
||||
#ifdef NDEBUG
|
||||
void dump() const {
|
||||
qCDebug(animation) << "AnimVariantMap =";
|
||||
|
|
|
@ -1061,8 +1061,10 @@ void Rig::updateAnimations(float deltaTime, const glm::mat4& rootTransform, cons
|
|||
// animations haven't fully loaded yet.
|
||||
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
|
||||
}
|
||||
_lastAnimVars = _animVars;
|
||||
_animVars.clearTriggers();
|
||||
_animVars = triggersOut;
|
||||
_lastContext = context;
|
||||
}
|
||||
applyOverridePoses();
|
||||
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
|
||||
|
|
|
@ -222,7 +222,10 @@ public:
|
|||
// input assumed to be in rig space
|
||||
void computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const;
|
||||
|
||||
const std::map<QString, float> getAnimStack() { return _animNode->getAnimStack(); }
|
||||
// used to debug animation playback
|
||||
const AnimContext::DebugAlphaMap& getDebugAlphaMap() const { return _lastContext.getDebugAlphaMap(); }
|
||||
const AnimVariantMap& getAnimVars() const { return _lastAnimVars; }
|
||||
const AnimContext::DebugStateMachineMap& getStateMachineMap() const { return _lastContext.getStateMachineMap(); }
|
||||
|
||||
void toggleSmoothPoleVectors() { _smoothPoleVectors = !_smoothPoleVectors; };
|
||||
signals:
|
||||
|
@ -388,6 +391,9 @@ protected:
|
|||
|
||||
int _rigId;
|
||||
bool _headEnabled { false };
|
||||
|
||||
AnimContext _lastContext;
|
||||
AnimVariantMap _lastAnimVars;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__Rig__) */
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
//
|
||||
// Created by Sabrina Shanman 9/5/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "AvatarTransformNode.h"
|
||||
|
||||
template<>
|
||||
glm::vec3 BaseNestableTransformNode<Avatar>::getActualScale(const std::shared_ptr<Avatar>& nestablePointer) const {
|
||||
return nestablePointer->scaleForChildren();
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// Created by Sabrina Shanman 9/5/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_AvatarTransformNode_h
|
||||
#define hifi_AvatarTransformNode_h
|
||||
|
||||
#include "NestableTransformNode.h"
|
||||
|
||||
#include "Avatar.h"
|
||||
|
||||
class AvatarTransformNode : public BaseNestableTransformNode<Avatar> {
|
||||
public:
|
||||
AvatarTransformNode(std::weak_ptr<Avatar> spatiallyNestable, int jointIndex) : BaseNestableTransformNode(spatiallyNestable, jointIndex) {};
|
||||
};
|
||||
|
||||
#endif // hifi_AvatarTransformNode_h
|
|
@ -14,7 +14,6 @@
|
|||
#include <gpu/Batch.h>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include "AvatarLogging.h"
|
||||
#include "AvatarTraits.h"
|
||||
|
||||
#include "Profile.h"
|
||||
|
||||
void AvatarReplicas::addReplica(const QUuid& parentID, AvatarSharedPointer replica) {
|
||||
if (parentID == QUuid()) {
|
||||
|
@ -214,6 +215,7 @@ AvatarSharedPointer AvatarHashMap::findAvatar(const QUuid& sessionUUID) const {
|
|||
}
|
||||
|
||||
void AvatarHashMap::processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
DETAILED_PROFILE_RANGE(network, __FUNCTION__);
|
||||
PerformanceTimer perfTimer("receiveAvatar");
|
||||
// enumerate over all of the avatars in this packet
|
||||
// only add them if mixerWeakPointer points to something (meaning that mixer is still around)
|
||||
|
|
13
libraries/entities/src/EntityTransformNode.cpp
Normal file
13
libraries/entities/src/EntityTransformNode.cpp
Normal file
|
@ -0,0 +1,13 @@
|
|||
//
|
||||
// Created by Sabrina Shanman 9/5/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "EntityTransformNode.h"
|
||||
|
||||
template<>
|
||||
glm::vec3 BaseNestableTransformNode<EntityItem>::getActualScale(const std::shared_ptr<EntityItem>& nestablePointer) const {
|
||||
return nestablePointer->getScaledDimensions();
|
||||
}
|
20
libraries/entities/src/EntityTransformNode.h
Normal file
20
libraries/entities/src/EntityTransformNode.h
Normal file
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// Created by Sabrina Shanman 9/5/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_EntityTransformNode_h
|
||||
#define hifi_EntityTransformNode_h
|
||||
|
||||
#include "NestableTransformNode.h"
|
||||
|
||||
#include "EntityItem.h"
|
||||
|
||||
class EntityTransformNode : public BaseNestableTransformNode<EntityItem> {
|
||||
public:
|
||||
EntityTransformNode(std::weak_ptr<EntityItem> spatiallyNestable, int jointIndex) : BaseNestableTransformNode(spatiallyNestable, jointIndex) {};
|
||||
};
|
||||
|
||||
#endif // hifi_EntityTransformNode_h
|
|
@ -56,13 +56,20 @@ LimitedNodeList::LimitedNodeList(int socketListenPort, int dtlsListenPort) :
|
|||
qRegisterMetaType<ConnectionStep>("ConnectionStep");
|
||||
auto port = (socketListenPort != INVALID_PORT) ? socketListenPort : LIMITED_NODELIST_LOCAL_PORT.get();
|
||||
_nodeSocket.bind(QHostAddress::AnyIPv4, port);
|
||||
qCDebug(networking) << "NodeList socket is listening on" << _nodeSocket.localPort();
|
||||
quint16 assignedPort = _nodeSocket.localPort();
|
||||
if (socketListenPort != INVALID_PORT && socketListenPort != 0 && socketListenPort != assignedPort) {
|
||||
qCCritical(networking) << "NodeList is unable to assign requested port of" << socketListenPort;
|
||||
}
|
||||
qCDebug(networking) << "NodeList socket is listening on" << assignedPort;
|
||||
|
||||
if (dtlsListenPort != INVALID_PORT) {
|
||||
// only create the DTLS socket during constructor if a custom port is passed
|
||||
_dtlsSocket = new QUdpSocket(this);
|
||||
|
||||
_dtlsSocket->bind(QHostAddress::AnyIPv4, dtlsListenPort);
|
||||
if (dtlsListenPort != 0 && _dtlsSocket->localPort() != dtlsListenPort) {
|
||||
qCDebug(networking) << "NodeList is unable to assign requested DTLS port of" << dtlsListenPort;
|
||||
}
|
||||
qCDebug(networking) << "NodeList DTLS socket is listening on" << _dtlsSocket->localPort();
|
||||
}
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@ void DeferredFramebuffer::allocate() {
|
|||
_deferredFramebufferDepthColor->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
|
||||
|
||||
|
||||
auto smoothSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT);
|
||||
auto smoothSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR_MIP_POINT, gpu::Sampler::WRAP_CLAMP);
|
||||
|
||||
_lightingTexture = gpu::Texture::createRenderBuffer(gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::R11G11B10), width, height, gpu::Texture::SINGLE_MIP, smoothSampler);
|
||||
_lightingFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("lighting"));
|
||||
|
|
|
@ -108,6 +108,10 @@ public:
|
|||
Q_INVOKABLE void setBakingEnabled(QString path, bool enabled, QScriptValue callback);
|
||||
|
||||
#if (PR_BUILD || DEV_BUILD)
|
||||
/**
|
||||
* This function is purely for development purposes, and not meant for use in a
|
||||
* production context. It is not a public-facing API, so it should not contain jsdoc.
|
||||
*/
|
||||
Q_INVOKABLE void sendFakedHandshake();
|
||||
#endif
|
||||
|
||||
|
|
|
@ -68,6 +68,7 @@ const glm::quat DEFAULT_AVATAR_RIGHTFOOT_ROT { -0.4016716778278351f, 0.915461599
|
|||
const float DEFAULT_AVATAR_MAX_WALKING_SPEED = 2.6f; // meters / second
|
||||
const float DEFAULT_AVATAR_MAX_WALKING_BACKWARD_SPEED = 2.2f; // meters / second
|
||||
const float DEFAULT_AVATAR_MAX_FLYING_SPEED = 30.0f; // meters / second
|
||||
const float DEFAULT_AVATAR_WALK_SPEED_THRESHOLD = 0.15f;
|
||||
|
||||
const float DEFAULT_AVATAR_GRAVITY = -5.0f; // meters / second^2
|
||||
const float DEFAULT_AVATAR_JUMP_SPEED = 3.5f; // meters / second
|
||||
|
|
|
@ -8,24 +8,7 @@
|
|||
|
||||
#include "NestableTransformNode.h"
|
||||
|
||||
NestableTransformNode::NestableTransformNode(SpatiallyNestableWeakPointer spatiallyNestable, int jointIndex) :
|
||||
_spatiallyNestable(spatiallyNestable),
|
||||
_jointIndex(jointIndex)
|
||||
{
|
||||
}
|
||||
|
||||
Transform NestableTransformNode::getTransform() {
|
||||
auto nestable = _spatiallyNestable.lock();
|
||||
if (!nestable) {
|
||||
return Transform();
|
||||
}
|
||||
|
||||
bool success;
|
||||
Transform jointWorldTransform = nestable->getTransform(_jointIndex, success);
|
||||
|
||||
if (success) {
|
||||
return jointWorldTransform;
|
||||
} else {
|
||||
return Transform();
|
||||
}
|
||||
template<>
|
||||
glm::vec3 BaseNestableTransformNode<SpatiallyNestable>::getActualScale(const std::shared_ptr<SpatiallyNestable>& nestablePointer) const {
|
||||
return nestablePointer->getAbsoluteJointScaleInObjectFrame(_jointIndex);
|
||||
}
|
|
@ -12,14 +12,48 @@
|
|||
|
||||
#include "SpatiallyNestable.h"
|
||||
|
||||
class NestableTransformNode : public TransformNode {
|
||||
template <typename T>
|
||||
class BaseNestableTransformNode : public TransformNode {
|
||||
public:
|
||||
NestableTransformNode(SpatiallyNestableWeakPointer spatiallyNestable, int jointIndex);
|
||||
Transform getTransform() override;
|
||||
BaseNestableTransformNode(std::weak_ptr<T> spatiallyNestable, int jointIndex) :
|
||||
_spatiallyNestable(spatiallyNestable),
|
||||
_jointIndex(jointIndex) {
|
||||
auto nestablePointer = _spatiallyNestable.lock();
|
||||
if (nestablePointer) {
|
||||
glm::vec3 nestableDimensions = getActualScale(nestablePointer);
|
||||
_baseScale = glm::max(glm::vec3(0.001f), nestableDimensions);
|
||||
}
|
||||
}
|
||||
|
||||
Transform getTransform() override {
|
||||
std::shared_ptr<T> nestable = _spatiallyNestable.lock();
|
||||
if (!nestable) {
|
||||
return Transform();
|
||||
}
|
||||
|
||||
bool success;
|
||||
Transform jointWorldTransform = nestable->getTransform(_jointIndex, success);
|
||||
|
||||
if (!success) {
|
||||
return Transform();
|
||||
}
|
||||
|
||||
jointWorldTransform.setScale(getActualScale(nestable) / _baseScale);
|
||||
|
||||
return jointWorldTransform;
|
||||
}
|
||||
|
||||
glm::vec3 getActualScale(const std::shared_ptr<T>& nestablePointer) const;
|
||||
|
||||
protected:
|
||||
SpatiallyNestableWeakPointer _spatiallyNestable;
|
||||
std::weak_ptr<T> _spatiallyNestable;
|
||||
int _jointIndex;
|
||||
glm::vec3 _baseScale { 1.0f };
|
||||
};
|
||||
|
||||
class NestableTransformNode : public BaseNestableTransformNode<SpatiallyNestable> {
|
||||
public:
|
||||
NestableTransformNode(std::weak_ptr<SpatiallyNestable> spatiallyNestable, int jointIndex) : BaseNestableTransformNode(spatiallyNestable, jointIndex) {};
|
||||
};
|
||||
|
||||
#endif // hifi_NestableTransformNode_h
|
|
@ -259,10 +259,11 @@ public:
|
|||
* A CollisionRegion defines a volume for checking collisions in the physics simulation.
|
||||
|
||||
* @typedef {object} CollisionRegion
|
||||
* @property {Shape} shape - The information about the collision region's size and shape.
|
||||
* @property {Shape} shape - The information about the collision region's size and shape. Dimensions are in world space, but will scale with the parent if defined.
|
||||
* @property {Vec3} position - The position of the collision region, relative to a parent if defined.
|
||||
* @property {Quat} orientation - The orientation of the collision region, relative to a parent if defined.
|
||||
* @property {float} threshold - The approximate minimum penetration depth for a test object to be considered in contact with the collision region.
|
||||
* The depth is measured in world space, but will scale with the parent if defined.
|
||||
* @property {Uuid} parentID - The ID of the parent, either an avatar, an entity, or an overlay.
|
||||
* @property {number} parentJointIndex - The joint of the parent to parent to, for example, the joints on the model of an avatar. (default = 0, no joint)
|
||||
* @property {string} joint - If "Mouse," parents the pick to the mouse. If "Avatar," parents the pick to MyAvatar's head. Otherwise, parents to the joint of the given name on MyAvatar.
|
||||
|
|
|
@ -23,7 +23,7 @@ if (scripts.length >= 2) {
|
|||
var qml = Script.resolvePath('debugWindow.qml');
|
||||
|
||||
var HMD_DEBUG_WINDOW_GEOMETRY_KEY = 'hmdDebugWindowGeometry';
|
||||
var hmdDebugWindowGeometryValue = Settings.getValue(HMD_DEBUG_WINDOW_GEOMETRY_KEY)
|
||||
var hmdDebugWindowGeometryValue = Settings.getValue(HMD_DEBUG_WINDOW_GEOMETRY_KEY);
|
||||
|
||||
var windowWidth = 400;
|
||||
var windowHeight = 900;
|
||||
|
@ -34,12 +34,13 @@ var windowY = 0;
|
|||
|
||||
if (hmdDebugWindowGeometryValue !== '') {
|
||||
var geometry = JSON.parse(hmdDebugWindowGeometryValue);
|
||||
|
||||
windowWidth = geometry.width
|
||||
windowHeight = geometry.height
|
||||
windowX = geometry.x
|
||||
windowY = geometry.y
|
||||
hasPosition = true;
|
||||
if ((geometry.x !== 0) && (geometry.y !== 0)) {
|
||||
windowWidth = geometry.width;
|
||||
windowHeight = geometry.height;
|
||||
windowX = geometry.x;
|
||||
windowY = geometry.y;
|
||||
hasPosition = true;
|
||||
}
|
||||
}
|
||||
|
||||
var window = new OverlayWindow({
|
||||
|
@ -52,6 +53,12 @@ if (hasPosition) {
|
|||
window.setPosition(windowX, windowY);
|
||||
}
|
||||
|
||||
window.visibleChanged.connect(function() {
|
||||
if (!window.visible) {
|
||||
window.setVisible(true);
|
||||
}
|
||||
});
|
||||
|
||||
window.closed.connect(function () { Script.stop(); });
|
||||
|
||||
var getFormattedDate = function() {
|
||||
|
@ -93,10 +100,10 @@ Script.scriptEnding.connect(function () {
|
|||
y: window.position.y,
|
||||
width: window.size.x,
|
||||
height: window.size.y
|
||||
})
|
||||
});
|
||||
|
||||
Settings.setValue(HMD_DEBUG_WINDOW_GEOMETRY_KEY, geometry);
|
||||
window.close();
|
||||
})
|
||||
});
|
||||
|
||||
}());
|
||||
|
|
Loading…
Reference in a new issue