Merge branch 'master' of https://github.com/highfidelity/hifi into autoLaserFix2

This commit is contained in:
David Back 2018-06-18 11:06:56 -07:00
commit 0c644c0504
44 changed files with 462 additions and 1489 deletions

View file

@ -24,6 +24,7 @@ Rectangle {
color: hifi.colors.baseGray;
signal sendToScript(var message);
property bool keyboardEnabled: false
property bool keyboardRaised: false
property bool punctuationMode: false
property bool keyboardRasied: false
@ -235,10 +236,11 @@ Rectangle {
Keyboard {
id: keyboard
raised: parent.keyboardEnabled
raised: parent.keyboardEnabled && parent.keyboardRaised
numeric: parent.punctuationMode
anchors {
bottom: parent.bottom
bottomMargin: 40
left: parent.left
right: parent.right
}

View file

@ -198,7 +198,6 @@
#include <GPUIdent.h>
#include <gl/GLHelpers.h>
#include <src/scripting/LimitlessVoiceRecognitionScriptingInterface.h>
#include <src/scripting/GooglePolyScriptingInterface.h>
#include <EntityScriptClient.h>
#include <ModelScriptingInterface.h>
@ -921,7 +920,6 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
DependencyManager::set<OffscreenQmlSurfaceCache>();
DependencyManager::set<EntityScriptClient>();
DependencyManager::set<EntityScriptServerLogClient>();
DependencyManager::set<LimitlessVoiceRecognitionScriptingInterface>();
DependencyManager::set<GooglePolyScriptingInterface>();
DependencyManager::set<OctreeStatsProvider>(nullptr, qApp->getOcteeSceneStats());
DependencyManager::set<AvatarBookmarks>();
@ -2243,9 +2241,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
DependencyManager::get<EntityTreeRenderer>()->setSetPrecisionPickingOperator([&](unsigned int rayPickID, bool value) {
DependencyManager::get<PickManager>()->setPrecisionPicking(rayPickID, value);
});
EntityTreeRenderer::setRenderDebugHullsOperator([] {
return Menu::getInstance()->isOptionChecked(MenuOption::PhysicsShowHulls);
});
// Preload Tablet sounds
DependencyManager::get<TabletScriptingInterface>()->preloadSounds();
@ -5858,14 +5853,10 @@ void Application::update(float deltaTime) {
{
PerformanceTimer perfTimer("limitless");
PerformanceTimer perfTimer("AnimDebugDraw");
AnimDebugDraw::getInstance().update();
}
{
PerformanceTimer perfTimer("limitless");
DependencyManager::get<LimitlessVoiceRecognitionScriptingInterface>()->update();
}
{ // Game loop is done, mark the end of the frame for the scene transactions and the render loop to take over
PerformanceTimer perfTimer("enqueueFrame");
@ -6574,7 +6565,6 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEnginePointe
scriptEngine->registerGlobalObject("UserActivityLogger", DependencyManager::get<UserActivityLoggerScriptingInterface>().data());
scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
scriptEngine->registerGlobalObject("LimitlessSpeechRecognition", DependencyManager::get<LimitlessVoiceRecognitionScriptingInterface>().data());
scriptEngine->registerGlobalObject("GooglePoly", DependencyManager::get<GooglePolyScriptingInterface>().data());
if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) {

View file

@ -205,10 +205,6 @@ void Application::runRenderFrame(RenderArgs* renderArgs) {
RenderArgs::DebugFlags renderDebugFlags = RenderArgs::RENDER_DEBUG_NONE;
if (Menu::getInstance()->isOptionChecked(MenuOption::PhysicsShowHulls)) {
renderDebugFlags = static_cast<RenderArgs::DebugFlags>(renderDebugFlags |
static_cast<int>(RenderArgs::RENDER_DEBUG_HULLS));
}
renderArgs->_debugFlags = renderDebugFlags;
}

View file

@ -725,7 +725,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowOwned,
0, false, drawStatusConfig, SLOT(setShowNetwork(bool)));
}
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowHulls, 0, false, qApp->getEntities().data(), SIGNAL(setRenderDebugHulls()));
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowBulletWireframe, 0, false, qApp, SLOT(setShowBulletWireframe(bool)));
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowBulletAABBs, 0, false, qApp, SLOT(setShowBulletAABBs(bool)));
@ -813,7 +812,7 @@ Menu::Menu() {
});
essLogAction->setEnabled(nodeList->getThisNodeCanRez());
action = addActionToQMenuAndActionHash(developerMenu, "Script Log (HMD friendly)...", Qt::NoButton,
addActionToQMenuAndActionHash(developerMenu, "Script Log (HMD friendly)...", Qt::NoButton,
qApp, SLOT(showScriptLogs()));
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::VerboseLogging, 0, false,

View file

@ -141,7 +141,6 @@ namespace MenuOption {
const QString Overlays = "Show Overlays";
const QString PackageModel = "Package Model as .fst...";
const QString Pair = "Pair";
const QString PhysicsShowHulls = "Draw Collision Shapes";
const QString PhysicsShowOwned = "Highlight Simulation Ownership";
const QString VerboseLogging = "Verbose Logging";
const QString PhysicsShowBulletWireframe = "Show Bullet Collision";

View file

@ -1,93 +0,0 @@
#include "LimitlessConnection.h"
#include <QJsonObject>
#include <QJsonDocument>
#include <QJsonArray>
#include <src/InterfaceLogging.h>
#include <src/ui/AvatarInputs.h>
#include "LimitlessVoiceRecognitionScriptingInterface.h"
LimitlessConnection::LimitlessConnection() :
_streamingAudioForTranscription(false)
{
}
void LimitlessConnection::startListening(QString authCode) {
_transcribeServerSocket.reset(new QTcpSocket(this));
connect(_transcribeServerSocket.get(), &QTcpSocket::readyRead, this,
&LimitlessConnection::transcriptionReceived);
connect(_transcribeServerSocket.get(), &QTcpSocket::disconnected, this, [this](){stopListening();});
static const auto host = "gserv_devel.studiolimitless.com";
_transcribeServerSocket->connectToHost(host, 1407);
_transcribeServerSocket->waitForConnected();
QString requestHeader = QString::asprintf("Authorization: %s\r\nfs: %i\r\n",
authCode.toLocal8Bit().data(), AudioConstants::SAMPLE_RATE);
qCDebug(interfaceapp) << "Sending Limitless Audio Stream Request: " << requestHeader;
_transcribeServerSocket->write(requestHeader.toLocal8Bit());
_transcribeServerSocket->waitForBytesWritten();
}
void LimitlessConnection::stopListening() {
emit onFinishedSpeaking(_currentTranscription);
_streamingAudioForTranscription = false;
_currentTranscription = "";
if (!isConnected())
return;
_transcribeServerSocket->close();
disconnect(_transcribeServerSocket.get(), &QTcpSocket::readyRead, this,
&LimitlessConnection::transcriptionReceived);
_transcribeServerSocket.release()->deleteLater();
disconnect(DependencyManager::get<AudioClient>().data(), &AudioClient::inputReceived, this,
&LimitlessConnection::audioInputReceived);
qCDebug(interfaceapp) << "Connection to Limitless Voice Server closed.";
}
void LimitlessConnection::audioInputReceived(const QByteArray& inputSamples) {
if (isConnected()) {
_transcribeServerSocket->write(inputSamples.data(), inputSamples.size());
_transcribeServerSocket->waitForBytesWritten();
}
}
void LimitlessConnection::transcriptionReceived() {
while (_transcribeServerSocket && _transcribeServerSocket->bytesAvailable() > 0) {
const QByteArray data = _transcribeServerSocket->readAll();
_serverDataBuffer.append(data);
int begin = _serverDataBuffer.indexOf('<');
int end = _serverDataBuffer.indexOf('>');
while (begin > -1 && end > -1) {
const int len = end - begin;
const QByteArray serverMessage = _serverDataBuffer.mid(begin+1, len-1);
if (serverMessage.contains("1407")) {
qCDebug(interfaceapp) << "Limitless Speech Server denied the request.";
// Don't spam the server with further false requests please.
DependencyManager::get<LimitlessVoiceRecognitionScriptingInterface>()->setListeningToVoice(true);
stopListening();
return;
} else if (serverMessage.contains("1408")) {
qCDebug(interfaceapp) << "Limitless Audio request authenticated!";
_serverDataBuffer.clear();
connect(DependencyManager::get<AudioClient>().data(), &AudioClient::inputReceived, this,
&LimitlessConnection::audioInputReceived);
return;
}
QJsonObject json = QJsonDocument::fromJson(serverMessage.data()).object();
_serverDataBuffer.remove(begin, len+1);
_currentTranscription = json["alternatives"].toArray()[0].toObject()["transcript"].toString();
emit onReceivedTranscription(_currentTranscription);
if (json["isFinal"] == true) {
qCDebug(interfaceapp) << "Final transcription: " << _currentTranscription;
stopListening();
return;
}
begin = _serverDataBuffer.indexOf('<');
end = _serverDataBuffer.indexOf('>');
}
}
}
bool LimitlessConnection::isConnected() const {
return _transcribeServerSocket.get() && _transcribeServerSocket->isWritable()
&& _transcribeServerSocket->state() != QAbstractSocket::SocketState::UnconnectedState;
}

View file

@ -1,46 +0,0 @@
//
// SpeechRecognitionScriptingInterface.h
// interface/src/scripting
//
// Created by Trevor Berninger on 3/24/17.
// Copyright 2017 Limitless ltd.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_LimitlessConnection_h
#define hifi_LimitlessConnection_h
#include <QtCore/QObject>
#include <QtCore/QFuture>
#include <QtNetwork/QTcpSocket>
#include <AudioClient.h>
class LimitlessConnection : public QObject {
Q_OBJECT
public:
LimitlessConnection();
Q_INVOKABLE void startListening(QString authCode);
Q_INVOKABLE void stopListening();
std::atomic<bool> _streamingAudioForTranscription;
signals:
void onReceivedTranscription(QString speech);
void onFinishedSpeaking(QString speech);
private:
void transcriptionReceived();
void audioInputReceived(const QByteArray& inputSamples);
bool isConnected() const;
std::unique_ptr<QTcpSocket> _transcribeServerSocket;
QByteArray _serverDataBuffer;
QString _currentTranscription;
};
#endif //hifi_LimitlessConnection_h

View file

@ -1,66 +0,0 @@
//
// SpeechRecognitionScriptingInterface.h
// interface/src/scripting
//
// Created by Trevor Berninger on 3/20/17.
// Copyright 2017 Limitless ltd.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "LimitlessVoiceRecognitionScriptingInterface.h"
#include <QtConcurrent/QtConcurrentRun>
#include <ThreadHelpers.h>
#include "InterfaceLogging.h"
#include "ui/AvatarInputs.h"
const float LimitlessVoiceRecognitionScriptingInterface::_audioLevelThreshold = 0.33f;
const int LimitlessVoiceRecognitionScriptingInterface::_voiceTimeoutDuration = 2000;
LimitlessVoiceRecognitionScriptingInterface::LimitlessVoiceRecognitionScriptingInterface() :
_shouldStartListeningForVoice(false)
{
_voiceTimer.setSingleShot(true);
connect(&_voiceTimer, &QTimer::timeout, this, &LimitlessVoiceRecognitionScriptingInterface::voiceTimeout);
connect(&_connection, &LimitlessConnection::onReceivedTranscription, this, [this](QString transcription){emit onReceivedTranscription(transcription);});
connect(&_connection, &LimitlessConnection::onFinishedSpeaking, this, [this](QString transcription){emit onFinishedSpeaking(transcription);});
moveToNewNamedThread(&_connection, "Limitless Connection");
}
void LimitlessVoiceRecognitionScriptingInterface::update() {
const float audioLevel = AvatarInputs::getInstance()->loudnessToAudioLevel(DependencyManager::get<AudioClient>()->getAudioAverageInputLoudness());
if (_shouldStartListeningForVoice) {
if (_connection._streamingAudioForTranscription) {
if (audioLevel > _audioLevelThreshold) {
if (_voiceTimer.isActive()) {
_voiceTimer.stop();
}
} else if (!_voiceTimer.isActive()){
_voiceTimer.start(_voiceTimeoutDuration);
}
} else if (audioLevel > _audioLevelThreshold) {
// to make sure invoke doesn't get called twice before the method actually gets called
_connection._streamingAudioForTranscription = true;
QMetaObject::invokeMethod(&_connection, "startListening", Q_ARG(QString, authCode));
}
}
}
void LimitlessVoiceRecognitionScriptingInterface::setListeningToVoice(bool listening) {
_shouldStartListeningForVoice = listening;
}
void LimitlessVoiceRecognitionScriptingInterface::setAuthKey(QString key) {
authCode = key;
}
void LimitlessVoiceRecognitionScriptingInterface::voiceTimeout() {
if (_connection._streamingAudioForTranscription) {
QMetaObject::invokeMethod(&_connection, "stopListening");
}
}

View file

@ -1,49 +0,0 @@
//
// SpeechRecognitionScriptingInterface.h
// interface/src/scripting
//
// Created by Trevor Berninger on 3/20/17.
// Copyright 2017 Limitless ltd.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_SpeechRecognitionScriptingInterface_h
#define hifi_SpeechRecognitionScriptingInterface_h
#include <AudioClient.h>
#include <QObject>
#include <QFuture>
#include "LimitlessConnection.h"
class LimitlessVoiceRecognitionScriptingInterface : public QObject, public Dependency {
Q_OBJECT
public:
LimitlessVoiceRecognitionScriptingInterface();
void update();
QString authCode;
public slots:
void setListeningToVoice(bool listening);
void setAuthKey(QString key);
signals:
void onReceivedTranscription(QString speech);
void onFinishedSpeaking(QString speech);
private:
bool _shouldStartListeningForVoice;
static const float _audioLevelThreshold;
static const int _voiceTimeoutDuration;
QTimer _voiceTimer;
LimitlessConnection _connection;
void voiceTimeout();
};
#endif //hifi_SpeechRecognitionScriptingInterface_h

View file

@ -201,13 +201,12 @@ void Circle3DOverlay::render(RenderArgs* args) {
float tickMarkAngle = getMajorTickMarksAngle();
float angle = _startAt - fmodf(_startAt, tickMarkAngle) + tickMarkAngle;
float angleInRadians = glm::radians(angle);
float tickMarkLength = getMajorTickMarksLength();
float startRadius = (tickMarkLength > 0.0f) ? _innerRadius : _outerRadius;
float endRadius = startRadius + tickMarkLength;
while (angle <= _endAt) {
angleInRadians = glm::radians(angle);
float angleInRadians = glm::radians(angle);
glm::vec2 thisPointA(cosf(angleInRadians) * startRadius, sinf(angleInRadians) * startRadius);
glm::vec2 thisPointB(cosf(angleInRadians) * endRadius, sinf(angleInRadians) * endRadius);
@ -223,13 +222,12 @@ void Circle3DOverlay::render(RenderArgs* args) {
float tickMarkAngle = getMinorTickMarksAngle();
float angle = _startAt - fmodf(_startAt, tickMarkAngle) + tickMarkAngle;
float angleInRadians = glm::radians(angle);
float tickMarkLength = getMinorTickMarksLength();
float startRadius = (tickMarkLength > 0.0f) ? _innerRadius : _outerRadius;
float endRadius = startRadius + tickMarkLength;
while (angle <= _endAt) {
angleInRadians = glm::radians(angle);
float angleInRadians = glm::radians(angle);
glm::vec2 thisPointA(cosf(angleInRadians) * startRadius, sinf(angleInRadians) * startRadius);
glm::vec2 thisPointB(cosf(angleInRadians) * endRadius, sinf(angleInRadians) * endRadius);

View file

@ -105,8 +105,10 @@ QStringList Animation::getJointNames() const {
return result;
}
QStringList names;
foreach (const FBXJoint& joint, _geometry->joints) {
names.append(joint.name);
if (_geometry) {
foreach (const FBXJoint& joint, _geometry->joints) {
names.append(joint.name);
}
}
return names;
}
@ -114,11 +116,15 @@ QStringList Animation::getJointNames() const {
QVector<FBXAnimationFrame> Animation::getFrames() const {
if (QThread::currentThread() != thread()) {
QVector<FBXAnimationFrame> result;
BLOCKING_INVOKE_METHOD(const_cast<Animation*>(this), "getFrames",
BLOCKING_INVOKE_METHOD(const_cast<Animation*>(this), "getFrames",
Q_RETURN_ARG(QVector<FBXAnimationFrame>, result));
return result;
}
return _geometry->animationFrames;
if (_geometry) {
return _geometry->animationFrames;
} else {
return QVector<FBXAnimationFrame>();
}
}
const QVector<FBXAnimationFrame>& Animation::getFramesReference() const {

View file

@ -42,7 +42,6 @@
size_t std::hash<EntityItemID>::operator()(const EntityItemID& id) const { return qHash(id); }
std::function<bool()> EntityTreeRenderer::_entitiesShouldFadeFunction;
std::function<bool()> EntityTreeRenderer::_renderDebugHullsOperator = [] { return false; };
QString resolveScriptURL(const QString& scriptUrl) {
auto normalizedScriptUrl = DependencyManager::get<ResourceManager>()->normalizeURL(scriptUrl);

View file

@ -116,14 +116,10 @@ public:
EntityItemPointer getEntity(const EntityItemID& id);
void onEntityChanged(const EntityItemID& id);
static void setRenderDebugHullsOperator(std::function<bool()> renderDebugHullsOperator) { _renderDebugHullsOperator = renderDebugHullsOperator; }
static bool shouldRenderDebugHulls() { return _renderDebugHullsOperator(); }
signals:
void enterEntity(const EntityItemID& entityItemID);
void leaveEntity(const EntityItemID& entityItemID);
void collisionWithEntity(const EntityItemID& idA, const EntityItemID& idB, const Collision& collision);
void setRenderDebugHulls();
public slots:
void addingEntity(const EntityItemID& entityID);
@ -259,8 +255,6 @@ private:
static int _entitiesScriptEngineCount;
static CalculateEntityLoadingPriority _calculateEntityLoadingPriorityFunc;
static std::function<bool()> _entitiesShouldFadeFunction;
static std::function<bool()> _renderDebugHullsOperator;
};

View file

@ -23,7 +23,6 @@
#include <QtCore/QUrlQuery>
#include <AbstractViewStateInterface.h>
#include <CollisionRenderMeshCache.h>
#include <Model.h>
#include <PerfStat.h>
#include <render/Scene.h>
@ -35,8 +34,6 @@
#include "EntitiesRendererLogging.h"
static CollisionRenderMeshCache collisionMeshCache;
void ModelEntityWrapper::setModel(const ModelPointer& model) {
withWriteLock([&] {
if (_model != model) {
@ -1052,10 +1049,7 @@ using namespace render;
using namespace render::entities;
ModelEntityRenderer::ModelEntityRenderer(const EntityItemPointer& entity) : Parent(entity) {
connect(DependencyManager::get<EntityTreeRenderer>().data(), &EntityTreeRenderer::setRenderDebugHulls, this, [&] {
_needsCollisionGeometryUpdate = true;
emit requestRenderUpdate();
});
}
void ModelEntityRenderer::setKey(bool didVisualGeometryRequestSucceed) {
@ -1215,10 +1209,6 @@ bool ModelEntityRenderer::needsRenderUpdate() const {
if (model->getRenderItemsNeedUpdate()) {
return true;
}
if (_needsCollisionGeometryUpdate) {
return true;
}
}
return Parent::needsRenderUpdate();
}
@ -1285,12 +1275,7 @@ bool ModelEntityRenderer::needsRenderUpdateFromTypedEntity(const TypedEntityPoin
}
void ModelEntityRenderer::setCollisionMeshKey(const void*key) {
if (key != _collisionMeshKey) {
if (_collisionMeshKey) {
collisionMeshCache.releaseMesh(_collisionMeshKey);
}
_collisionMeshKey = key;
}
_collisionMeshKey = key;
}
void ModelEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) {
@ -1339,7 +1324,6 @@ void ModelEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& sce
_didLastVisualGeometryRequestSucceed = didVisualGeometryRequestSucceed;
});
connect(model.get(), &Model::requestRenderUpdate, this, &ModelEntityRenderer::requestRenderUpdate);
connect(entity.get(), &RenderableModelEntityItem::requestCollisionGeometryUpdate, this, &ModelEntityRenderer::flagForCollisionGeometryUpdate);
model->setLoadingPriority(EntityTreeRenderer::getEntityLoadingPriority(*entity));
entity->setModel(model);
withWriteLock([&] { _model = model; });
@ -1412,26 +1396,6 @@ void ModelEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& sce
model->setCanCastShadow(_canCastShadow, scene);
}
if (_needsCollisionGeometryUpdate) {
setCollisionMeshKey(entity->getCollisionMeshKey());
_needsCollisionGeometryUpdate = false;
ShapeType type = entity->getShapeType();
if (DependencyManager::get<EntityTreeRenderer>()->shouldRenderDebugHulls() && type != SHAPE_TYPE_STATIC_MESH && type != SHAPE_TYPE_NONE) {
// NOTE: it is OK if _collisionMeshKey is nullptr
graphics::MeshPointer mesh = collisionMeshCache.getMesh(_collisionMeshKey);
// NOTE: the model will render the collisionGeometry if it has one
_model->setCollisionMesh(mesh);
} else {
if (_collisionMeshKey) {
// release mesh
collisionMeshCache.releaseMesh(_collisionMeshKey);
}
// clear model's collision geometry
graphics::MeshPointer mesh = nullptr;
_model->setCollisionMesh(mesh);
}
}
{
DETAILED_PROFILE_RANGE(simulation_physics, "Fixup");
if (model->needsFixupInScene()) {
@ -1487,11 +1451,6 @@ void ModelEntityRenderer::setIsVisibleInSecondaryCamera(bool value) {
setKey(_didLastVisualGeometryRequestSucceed);
}
void ModelEntityRenderer::flagForCollisionGeometryUpdate() {
_needsCollisionGeometryUpdate = true;
emit requestRenderUpdate();
}
// NOTE: this only renders the "meta" portion of the Model, namely it renders debugging items
void ModelEntityRenderer::doRender(RenderArgs* args) {
DETAILED_PROFILE_RANGE(render_detail, "MetaModelRender");

View file

@ -161,7 +161,6 @@ protected:
virtual bool needsRenderUpdate() const override;
virtual void doRender(RenderArgs* args) override;
virtual void doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) override;
void flagForCollisionGeometryUpdate();
void setCollisionMeshKey(const void* key);
render::hifi::Tag getTagMask() const override;
@ -189,7 +188,6 @@ private:
#endif
bool _needsJointSimulation { false };
bool _needsCollisionGeometryUpdate { false };
const void* _collisionMeshKey { nullptr };
// used on client side

View file

@ -2423,9 +2423,7 @@ OctreeElement::AppendState EntityItemProperties::encodeEntityEditPacket(PacketTy
if (appendState != OctreeElement::COMPLETED) {
didntFitProperties = propertiesDidntFit;
}
}
if (success) {
packetData->endSubTree();
const char* finalizedData = reinterpret_cast<const char*>(packetData->getFinalizedData());
@ -2436,7 +2434,6 @@ OctreeElement::AppendState EntityItemProperties::encodeEntityEditPacket(PacketTy
buffer.resize(finalizedSize);
} else {
qCDebug(entities) << "ERROR - encoded edit message doesn't fit in output buffer.";
success = false;
appendState = OctreeElement::NONE; // if we got here, then we didn't include the item
// maybe we should assert!!!
}
@ -2444,7 +2441,6 @@ OctreeElement::AppendState EntityItemProperties::encodeEntityEditPacket(PacketTy
packetData->discardSubTree();
}
return appendState;
}
@ -2834,7 +2830,6 @@ bool EntityItemProperties::encodeEraseEntityMessage(const EntityItemID& entityIt
outputLength = sizeof(numberOfIds);
memcpy(copyAt, entityItemID.toRfc4122().constData(), NUM_BYTES_RFC4122_UUID);
copyAt += NUM_BYTES_RFC4122_UUID;
outputLength += NUM_BYTES_RFC4122_UUID;
buffer.resize(outputLength);
@ -2856,7 +2851,6 @@ bool EntityItemProperties::encodeCloneEntityMessage(const EntityItemID& entityID
outputLength += NUM_BYTES_RFC4122_UUID;
memcpy(copyAt, newEntityID.toRfc4122().constData(), NUM_BYTES_RFC4122_UUID);
copyAt += NUM_BYTES_RFC4122_UUID;
outputLength += NUM_BYTES_RFC4122_UUID;
buffer.resize(outputLength);

View file

@ -67,11 +67,12 @@
// when multiple participants (with variable ping-times to the server) bid simultaneously for a
// recently activated entity.
//
// (9) When a participant changes an entity's transform/velocity it will bid at priority = POKE (=127)
// (9) When a participant changes an entity's transform/velocity (via script) it will bid at
// priority = POKE (=127).
//
// (10) When an entity touches MyAvatar the participant it will bid at priority = POKE.
// (10) When an entity collides with MyAvatar the participant it will bid at priority = POKE.
//
// (11) When a participant grabs an entity it will bid at priority = GRAB (=128).
// (11) When a participant grabs (via script) an entity it will bid at priority = GRAB (=128).
//
// (12) When entityA, locally owned at priority = N, collides with an unowned entityB the owner will
// also bid for entityB at priority = N-1 (or VOLUNTEER, whichever is larger).
@ -79,7 +80,7 @@
// (13) When an entity comes to rest and is deactivated in the physics simulation the owner will
// send an update to: clear their ownerhsip, set priority to zero, and set the object's
// velocities to be zero. As per a normal bid, the owner does NOT assume that its ownership
// has been cleared until it hears from the entity-server. This, if the packet is lost the
// has been cleared until it hears from the entity-server. Thus, if the packet is lost the
// owner will re-send after some period.
//
// (14) When an entity's ownership priority drops below VOLUNTEER other participants may bid for it

View file

@ -63,13 +63,13 @@ Sysmem& Sysmem::operator=(const Sysmem& sysmem) {
Sysmem::~Sysmem() {
deallocateMemory( _data, _size );
_data = NULL;
_data = nullptr;
_size = 0;
}
Size Sysmem::allocate(Size size) {
if (size != _size) {
Byte* newData = NULL;
Byte* newData = nullptr;
Size newSize = 0;
if (size > 0) {
Size allocated = allocateMemory(&newData, size);
@ -90,7 +90,7 @@ Size Sysmem::allocate(Size size) {
Size Sysmem::resize(Size size) {
if (size != _size) {
Byte* newData = NULL;
Byte* newData = nullptr;
Size newSize = 0;
if (size > 0) {
Size allocated = allocateMemory(&newData, size);
@ -124,7 +124,7 @@ Size Sysmem::setData( Size size, const Byte* bytes ) {
}
Size Sysmem::setSubData( Size offset, Size size, const Byte* bytes) {
if (size && ((offset + size) <= getSize()) && bytes) {
if (_data && size && ((offset + size) <= getSize()) && bytes) {
memcpy( _data + offset, bytes, size );
return size;
}

View file

@ -140,7 +140,6 @@ struct IrradianceKTXPayload {
data += sizeof(Version);
memcpy(&_irradianceSH, data, sizeof(SphericalHarmonics));
data += sizeof(SphericalHarmonics);
return true;
}

View file

@ -267,8 +267,6 @@ bool PacketSender::nonThreadedProcess() {
// Keep average packets and time for "second half" of check interval
_lastPPSCheck += (elapsedSinceLastCheck / 2);
_packetsOverCheckInterval = (_packetsOverCheckInterval / 2);
elapsedSinceLastCheck = now - _lastPPSCheck;
}
}
@ -296,12 +294,10 @@ bool PacketSender::nonThreadedProcess() {
DependencyManager::get<NodeList>()->sendPacketList(std::move(packetPair.second.second), *packetPair.first);
}
packetsSentThisCall += packetCount;
_packetsOverCheckInterval += packetCount;
_totalPacketsSent += packetCount;
_totalBytesSent += packetSize;
emit packetSent(packetSize); // FIXME should include number of packets?
_lastSendTime = now;

View file

@ -401,7 +401,6 @@ int Octree::readElementData(const OctreeElementPointer& destinationElement, cons
// tell the element to read the subsequent data
int rootDataSize = _rootElement->readElementDataFromBuffer(nodeData + bytesRead, bytesLeftToRead, args);
bytesRead += rootDataSize;
bytesLeftToRead -= rootDataSize;
}
return bytesRead;

View file

@ -69,7 +69,7 @@ bool CharacterGhostObject::rayTest(const btVector3& start,
const btVector3& end,
CharacterRayResult& result) const {
if (_world && _inWorld) {
_world->rayTest(start, end, result);
btGhostObject::rayTest(start, end, result);
}
return result.hasHit();
}

View file

@ -23,7 +23,7 @@
class CharacterGhostShape;
class CharacterGhostObject : public btPairCachingGhostObject {
class CharacterGhostObject : public btGhostObject {
public:
CharacterGhostObject() { }
~CharacterGhostObject();

View file

@ -92,7 +92,7 @@ void ObjectMotionState::setMass(float mass) {
}
float ObjectMotionState::getMass() const {
if (_shape) {
if (_shape && _shape->getShapeType() != TRIANGLE_MESH_SHAPE_PROXYTYPE) {
// scale the density by the current Aabb volume to get mass
btTransform transform;
transform.setIdentity();
@ -348,8 +348,10 @@ void ObjectMotionState::updateLastKinematicStep() {
void ObjectMotionState::updateBodyMassProperties() {
float mass = getMass();
btVector3 inertia(0.0f, 0.0f, 0.0f);
_body->getCollisionShape()->calculateLocalInertia(mass, inertia);
btVector3 inertia(1.0f, 1.0f, 1.0f);
if (mass > 0.0f) {
_body->getCollisionShape()->calculateLocalInertia(mass, inertia);
}
_body->setMassProps(mass, inertia);
_body->updateInertiaTensor();
}

View file

@ -61,7 +61,6 @@ PointerFrameHeaderList parseFrameHeaders(uchar* const start, const size_t& size)
current += sizeof(FrameSize);
header.fileOffset = current - start;
if (end - current < header.size) {
current = end;
break;
}
current += header.size;

View file

@ -47,7 +47,7 @@ bool CauterizedModel::updateGeometry() {
return needsFullUpdate;
}
void CauterizedModel::createVisibleRenderItemSet() {
void CauterizedModel::createRenderItemSet() {
if (_isCauterized) {
assert(isLoaded());
const auto& meshes = _renderGeometry->getMeshes();
@ -94,15 +94,10 @@ void CauterizedModel::createVisibleRenderItemSet() {
}
}
} else {
Model::createVisibleRenderItemSet();
Model::createRenderItemSet();
}
}
void CauterizedModel::createCollisionRenderItemSet() {
// Temporary HACK: use base class method for now
Model::createCollisionRenderItemSet();
}
void CauterizedModel::updateClusterMatrices() {
PerformanceTimer perfTimer("CauterizedModel::updateClusterMatrices");
@ -186,12 +181,6 @@ void CauterizedModel::updateRenderItems() {
if (!_addedToScene) {
return;
}
glm::vec3 scale = getScale();
if (_collisionGeometry) {
// _collisionGeometry is already scaled
scale = glm::vec3(1.0f);
}
_needsUpdateClusterMatrices = true;
_renderItemsNeedUpdate = false;

View file

@ -31,9 +31,8 @@ public:
void deleteGeometry() override;
bool updateGeometry() override;
void createVisibleRenderItemSet() override;
void createCollisionRenderItemSet() override;
void createRenderItemSet() override;
virtual void updateClusterMatrices() override;
void updateRenderItems() override;

View file

@ -47,51 +47,9 @@ int vec3VectorTypeId = qRegisterMetaType<QVector<glm::vec3> >();
float Model::FAKE_DIMENSION_PLACEHOLDER = -1.0f;
#define HTTP_INVALID_COM "http://invalid.com"
const int NUM_COLLISION_HULL_COLORS = 24;
std::vector<graphics::MaterialPointer> _collisionMaterials;
void initCollisionMaterials() {
// generates bright colors in red, green, blue, yellow, magenta, and cyan spectrums
// (no browns, greys, or dark shades)
float component[NUM_COLLISION_HULL_COLORS] = {
0.0f, 0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 0.0f,
0.2f, 0.4f, 0.6f, 0.8f,
1.0f, 1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f,
0.8f, 0.6f, 0.4f, 0.2f
};
_collisionMaterials.reserve(NUM_COLLISION_HULL_COLORS);
// each component gets the same cuve
// but offset by a multiple of one third the full width
int numComponents = 3;
int sectionWidth = NUM_COLLISION_HULL_COLORS / numComponents;
int greenPhase = sectionWidth;
int bluePhase = 2 * sectionWidth;
// we stride through the colors to scatter adjacent shades
// so they don't tend to group together for large models
for (int i = 0; i < sectionWidth; ++i) {
for (int j = 0; j < numComponents; ++j) {
graphics::MaterialPointer material;
material = std::make_shared<graphics::Material>();
int index = j * sectionWidth + i;
float red = component[index % NUM_COLLISION_HULL_COLORS];
float green = component[(index + greenPhase) % NUM_COLLISION_HULL_COLORS];
float blue = component[(index + bluePhase) % NUM_COLLISION_HULL_COLORS];
material->setAlbedo(glm::vec3(red, green, blue));
material->setMetallic(0.02f);
material->setRoughness(0.5f);
_collisionMaterials.push_back(material);
}
}
}
Model::Model(QObject* parent, SpatiallyNestable* spatiallyNestableOverride) :
QObject(parent),
_renderGeometry(),
_collisionGeometry(),
_renderWatcher(_renderGeometry),
_spatiallyNestableOverride(spatiallyNestableOverride),
_translation(0.0f),
@ -310,16 +268,6 @@ void Model::updateRenderItems() {
});
}
Transform collisionMeshOffset;
collisionMeshOffset.setIdentity();
foreach(auto itemID, self->_collisionRenderItemsMap.keys()) {
transaction.updateItem<MeshPartPayload>(itemID, [renderItemKeyGlobalFlags, modelTransform, collisionMeshOffset](MeshPartPayload& data) {
// update the model transform for this render item.
data.updateKey(renderItemKeyGlobalFlags);
data.updateTransform(modelTransform, collisionMeshOffset);
});
}
AbstractViewStateInterface::instance()->getMain3DScene()->enqueueTransaction(transaction);
});
}
@ -777,11 +725,6 @@ void Model::updateRenderItemsKey(const render::ScenePointer& scene) {
data.updateKey(renderItemsKey);
});
}
foreach(auto item, _collisionRenderItemsMap.keys()) {
transaction.updateItem<ModelMeshPartPayload>(item, [renderItemsKey](ModelMeshPartPayload& data) {
data.updateKey(renderItemsKey);
});
}
scene->enqueueTransaction(transaction);
}
@ -862,49 +805,37 @@ const render::ItemKey Model::getRenderItemKeyGlobalFlags() const {
bool Model::addToScene(const render::ScenePointer& scene,
render::Transaction& transaction,
render::Item::Status::Getters& statusGetters) {
bool readyToRender = _collisionGeometry || isLoaded();
if (!_addedToScene && readyToRender) {
createRenderItemSet();
if (!_addedToScene && isLoaded()) {
updateClusterMatrices();
if (_modelMeshRenderItems.empty()) {
createRenderItemSet();
}
}
bool somethingAdded = false;
if (_collisionGeometry) {
if (_collisionRenderItemsMap.empty()) {
foreach (auto renderItem, _collisionRenderItems) {
auto item = scene->allocateID();
auto renderPayload = std::make_shared<MeshPartPayload::Payload>(renderItem);
if (_collisionRenderItems.empty() && statusGetters.size()) {
renderPayload->addStatusGetters(statusGetters);
}
transaction.resetItem(item, renderPayload);
_collisionRenderItemsMap.insert(item, renderPayload);
if (_modelMeshRenderItemsMap.empty()) {
bool hasTransparent = false;
size_t verticesCount = 0;
foreach(auto renderItem, _modelMeshRenderItems) {
auto item = scene->allocateID();
auto renderPayload = std::make_shared<ModelMeshPartPayload::Payload>(renderItem);
if (_modelMeshRenderItemsMap.empty() && statusGetters.size()) {
renderPayload->addStatusGetters(statusGetters);
}
somethingAdded = !_collisionRenderItemsMap.empty();
transaction.resetItem(item, renderPayload);
hasTransparent = hasTransparent || renderItem.get()->getShapeKey().isTranslucent();
verticesCount += renderItem.get()->getVerticesCount();
_modelMeshRenderItemsMap.insert(item, renderPayload);
_modelMeshRenderItemIDs.emplace_back(item);
}
} else {
if (_modelMeshRenderItemsMap.empty()) {
somethingAdded = !_modelMeshRenderItemsMap.empty();
bool hasTransparent = false;
size_t verticesCount = 0;
foreach(auto renderItem, _modelMeshRenderItems) {
auto item = scene->allocateID();
auto renderPayload = std::make_shared<ModelMeshPartPayload::Payload>(renderItem);
if (_modelMeshRenderItemsMap.empty() && statusGetters.size()) {
renderPayload->addStatusGetters(statusGetters);
}
transaction.resetItem(item, renderPayload);
hasTransparent = hasTransparent || renderItem.get()->getShapeKey().isTranslucent();
verticesCount += renderItem.get()->getVerticesCount();
_modelMeshRenderItemsMap.insert(item, renderPayload);
_modelMeshRenderItemIDs.emplace_back(item);
}
somethingAdded = !_modelMeshRenderItemsMap.empty();
_renderInfoVertexCount = verticesCount;
_renderInfoDrawCalls = _modelMeshRenderItemsMap.count();
_renderInfoHasTransparent = hasTransparent;
}
_renderInfoVertexCount = verticesCount;
_renderInfoDrawCalls = _modelMeshRenderItemsMap.count();
_renderInfoHasTransparent = hasTransparent;
}
if (somethingAdded) {
@ -926,11 +857,6 @@ void Model::removeFromScene(const render::ScenePointer& scene, render::Transacti
_modelMeshMaterialNames.clear();
_modelMeshRenderItemShapes.clear();
foreach(auto item, _collisionRenderItemsMap.keys()) {
transaction.removeItem(item);
}
_collisionRenderItems.clear();
_collisionRenderItemsMap.clear();
_addedToScene = false;
_renderInfoVertexCount = 0;
@ -1504,7 +1430,6 @@ void Model::deleteGeometry() {
_rig.destroyAnimGraph();
_blendedBlendshapeCoefficients.clear();
_renderGeometry.reset();
_collisionGeometry.reset();
}
void Model::overrideModelTransformAndOffset(const Transform& transform, const glm::vec3& offset) {
@ -1533,19 +1458,6 @@ const render::ItemIDs& Model::fetchRenderItemIDs() const {
}
void Model::createRenderItemSet() {
updateClusterMatrices();
if (_collisionGeometry) {
if (_collisionRenderItems.empty()) {
createCollisionRenderItemSet();
}
} else {
if (_modelMeshRenderItems.empty()) {
createVisibleRenderItemSet();
}
}
};
void Model::createVisibleRenderItemSet() {
assert(isLoaded());
const auto& meshes = _renderGeometry->getMeshes();
@ -1591,41 +1503,6 @@ void Model::createVisibleRenderItemSet() {
}
}
void Model::createCollisionRenderItemSet() {
assert((bool)_collisionGeometry);
if (_collisionMaterials.empty()) {
initCollisionMaterials();
}
const auto& meshes = _collisionGeometry->getMeshes();
// We should not have any existing renderItems if we enter this section of code
Q_ASSERT(_collisionRenderItems.isEmpty());
Transform identity;
identity.setIdentity();
Transform offset;
offset.postTranslate(_offset);
// Run through all of the meshes, and place them into their segregated, but unsorted buckets
uint32_t numMeshes = (uint32_t)meshes.size();
for (uint32_t i = 0; i < numMeshes; i++) {
const auto& mesh = meshes.at(i);
if (!mesh) {
continue;
}
// Create the render payloads
int numParts = (int)mesh->getNumParts();
for (int partIndex = 0; partIndex < numParts; partIndex++) {
graphics::MaterialPointer& material = _collisionMaterials[partIndex % NUM_COLLISION_HULL_COLORS];
auto payload = std::make_shared<MeshPartPayload>(mesh, partIndex, material);
payload->updateTransform(identity, offset);
_collisionRenderItems << payload;
}
}
}
bool Model::isRenderable() const {
return !_meshStates.empty() || (isLoaded() && _renderGeometry->getMeshes().empty());
}
@ -1709,15 +1586,6 @@ public:
}
};
void Model::setCollisionMesh(graphics::MeshPointer mesh) {
if (mesh) {
_collisionGeometry = std::make_shared<CollisionRenderGeometry>(mesh);
} else {
_collisionGeometry.reset();
}
_needsFixupInScene = true;
}
ModelBlender::ModelBlender() :
_pendingBlenders(0) {
}

View file

@ -153,8 +153,6 @@ public:
/// Returns a reference to the shared geometry.
const Geometry::Pointer& getGeometry() const { return _renderGeometry; }
/// Returns a reference to the shared collision geometry.
const Geometry::Pointer& getCollisionGeometry() const { return _collisionGeometry; }
const QVariantMap getTextures() const { assert(isLoaded()); return _renderGeometry->getTextures(); }
Q_INVOKABLE virtual void setTextures(const QVariantMap& textures);
@ -260,7 +258,6 @@ public:
// returns 'true' if needs fullUpdate after geometry change
virtual bool updateGeometry();
void setCollisionMesh(graphics::MeshPointer mesh);
void setLoadingPriority(float priority) { _loadingPriority = priority; }
@ -362,7 +359,6 @@ protected:
bool getJointPosition(int jointIndex, glm::vec3& position) const;
Geometry::Pointer _renderGeometry; // only ever set by its watcher
Geometry::Pointer _collisionGeometry;
GeometryResourceWatcher _renderWatcher;
@ -430,9 +426,7 @@ protected:
QVector<TriangleSet> _modelSpaceMeshTriangleSets; // model space triangles for all sub meshes
void createRenderItemSet();
virtual void createVisibleRenderItemSet();
virtual void createCollisionRenderItemSet();
virtual void createRenderItemSet();
bool _isWireframe;
bool _useDualQuaternionSkinning { false };
@ -443,9 +437,6 @@ protected:
static AbstractViewStateInterface* _viewState;
QVector<std::shared_ptr<MeshPartPayload>> _collisionRenderItems;
QMap<render::ItemID, render::PayloadPointer> _collisionRenderItemsMap;
QVector<std::shared_ptr<ModelMeshPartPayload>> _modelMeshRenderItems;
QMap<render::ItemID, render::PayloadPointer> _modelMeshRenderItemsMap;
render::ItemIDs _modelMeshRenderItemIDs;

View file

@ -432,10 +432,13 @@ bool ScriptEngines::stopScript(const QString& rawScriptURL, bool restart) {
ScriptEngine::Type type = scriptEngine->getType();
auto scriptCache = DependencyManager::get<ScriptCache>();
scriptCache->deleteScript(scriptURL);
connect(scriptEngine.data(), &ScriptEngine::finished,
this, [this, isUserLoaded, type](QString scriptName, ScriptEnginePointer engine) {
reloadScript(scriptName, isUserLoaded)->setType(type);
});
if (!scriptEngine->isStopping()) {
connect(scriptEngine.data(), &ScriptEngine::finished,
this, [this, isUserLoaded, type](QString scriptName, ScriptEnginePointer engine) {
reloadScript(scriptName, isUserLoaded)->setType(type);
});
}
}
scriptEngine->stop();
stoppedScript = true;
@ -594,7 +597,7 @@ void ScriptEngines::onScriptFinished(const QString& rawScriptURL, ScriptEnginePo
}
}
if (removed) {
if (removed && !_isReloading) {
// Update settings with removed script
saveScripts();
emit scriptCountChanged();

View file

@ -402,8 +402,10 @@ MenuWrapper* Menu::addMenu(const QString& menuName, const QString& grouping) {
// hook our show/hide for popup menus, so we can keep track of whether or not one
// of our submenus is currently showing.
connect(menu->_realMenu, &QMenu::aboutToShow, []() { _isSomeSubmenuShown = true; });
connect(menu->_realMenu, &QMenu::aboutToHide, []() { _isSomeSubmenuShown = false; });
if (menu && menu->_realMenu) {
connect(menu->_realMenu, &QMenu::aboutToShow, []() { _isSomeSubmenuShown = true; });
connect(menu->_realMenu, &QMenu::aboutToHide, []() { _isSomeSubmenuShown = false; });
}
return menu;
}

View file

@ -51,7 +51,7 @@ void OculusControllerManager::checkForConnectedDevices() {
unsigned int controllerConnected = ovr_GetConnectedControllerTypes(session);
if (!_remote && (controllerConnected & ovrControllerType_Remote) == ovrControllerType_Remote) {
if (OVR_SUCCESS(ovr_GetInputState(session, ovrControllerType_Remote, &_inputState))) {
if (OVR_SUCCESS(ovr_GetInputState(session, ovrControllerType_Remote, &_remoteInputState))) {
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
_remote = std::make_shared<RemoteDevice>(*this);
userInputMapper->registerDevice(_remote);
@ -59,7 +59,7 @@ void OculusControllerManager::checkForConnectedDevices() {
}
if (!_touch && (controllerConnected & ovrControllerType_Touch) != 0) {
if (OVR_SUCCESS(ovr_GetInputState(session, ovrControllerType_Touch, &_inputState))) {
if (OVR_SUCCESS(ovr_GetInputState(session, ovrControllerType_Touch, &_touchInputState))) {
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
_touch = std::make_shared<TouchDevice>(*this);
userInputMapper->registerDevice(_touch);
@ -90,13 +90,13 @@ void OculusControllerManager::pluginUpdate(float deltaTime, const controller::In
ovr::withSession([&](ovrSession session) {
if (_touch) {
updateTouch = OVR_SUCCESS(ovr_GetInputState(session, ovrControllerType_Touch, &_inputState));
updateTouch = OVR_SUCCESS(ovr_GetInputState(session, ovrControllerType_Touch, &_touchInputState));
if (!updateTouch) {
qCWarning(oculusLog) << "Unable to read Oculus touch input state" << ovr::getError();
}
}
if (_remote) {
updateRemote = OVR_SUCCESS(ovr_GetInputState(session, ovrControllerType_Remote, &_inputState));
updateRemote = OVR_SUCCESS(ovr_GetInputState(session, ovrControllerType_Remote, &_remoteInputState));
if (!updateRemote) {
qCWarning(oculusLog) << "Unable to read Oculus remote input state" << ovr::getError();
}
@ -194,7 +194,7 @@ QString OculusControllerManager::RemoteDevice::getDefaultMappingConfig() const {
void OculusControllerManager::RemoteDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
_buttonPressedMap.clear();
const auto& inputState = _parent._inputState;
const auto& inputState = _parent._remoteInputState;
for (const auto& pair : BUTTON_MAP) {
if (inputState.Buttons & pair.first) {
_buttonPressedMap.insert(pair.second);
@ -257,7 +257,7 @@ void OculusControllerManager::TouchDevice::update(float deltaTime,
using namespace controller;
// Axes
const auto& inputState = _parent._inputState;
const auto& inputState = _parent._touchInputState;
_axisStateMap[LX] = inputState.Thumbstick[ovrHand_Left].x;
_axisStateMap[LY] = inputState.Thumbstick[ovrHand_Left].y;
_axisStateMap[LT] = inputState.IndexTrigger[ovrHand_Left];

View file

@ -103,7 +103,8 @@ private:
void checkForConnectedDevices();
ovrInputState _inputState {};
ovrInputState _remoteInputState {};
ovrInputState _touchInputState {};
RemoteDevice::Pointer _remote;
TouchDevice::Pointer _touch;
static const char* NAME;

View file

@ -177,6 +177,10 @@ function goActive() {
UserActivityLogger.toggledAway(false);
MyAvatar.isAway = false;
if (!Window.hasFocus()) {
Window.setFocus();
}
}
MyAvatar.wentAway.connect(setAwayProperties);

View file

@ -101,12 +101,15 @@
this.isReady = function(controllerData, deltaTime) {
var now = Date.now();
this.triggersPressed(controllerData, now);
if ((HMD.active && !this.mouseActivity.expired(now)) && _this.handControllerActivity.expired()) {
Reticle.visible = true;
return ControllerDispatcherUtils.makeRunningValues(true, [], []);
}
if (HMD.active) {
Reticle.visible = false;
if (!this.mouseActivity.expired(now) && _this.handControllerActivity.expired()) {
Reticle.visible = true;
return ControllerDispatcherUtils.makeRunningValues(true, [], []);
} else {
Reticle.visible = false;
}
} else if (!Reticle.visible) {
Reticle.visible = true;
}
return ControllerDispatcherUtils.makeRunningValues(false, [], []);

View file

@ -1,309 +1,315 @@
print("Loading hfudt")
-- create the HFUDT protocol
p_hfudt = Proto("hfudt", "HFUDT Protocol")
-- create fields shared between packets in HFUDT
local f_data = ProtoField.string("hfudt.data", "Data")
-- create the fields for data packets in HFUDT
local f_length = ProtoField.uint16("hfudt.length", "Length", base.DEC)
local f_control_bit = ProtoField.uint8("hfudt.control", "Control Bit", base.DEC)
local f_reliable_bit = ProtoField.uint8("hfudt.reliable", "Reliability Bit", base.DEC)
local f_message_bit = ProtoField.uint8("hfudt.message", "Message Bit", base.DEC)
local f_obfuscation_level = ProtoField.uint8("hfudt.obfuscation_level", "Obfuscation Level", base.DEC)
local f_sequence_number = ProtoField.uint32("hfudt.sequence_number", "Sequence Number", base.DEC)
local f_message_position = ProtoField.uint8("hfudt.message_position", "Message Position", base.DEC)
local f_message_number = ProtoField.uint32("hfudt.message_number", "Message Number", base.DEC)
local f_message_part_number = ProtoField.uint32("hfudt.message_part_number", "Message Part Number", base.DEC)
local f_type = ProtoField.uint8("hfudt.type", "Type", base.DEC)
local f_version = ProtoField.uint8("hfudt.version", "Version", base.DEC)
local f_type_text = ProtoField.string("hfudt.type_text", "TypeText")
local f_sender_id = ProtoField.uint16("hfudt.sender_id", "Sender ID", base.DEC)
local f_hmac_hash = ProtoField.bytes("hfudt.hmac_hash", "HMAC Hash")
-- create the fields for control packets in HFUDT
local f_control_type = ProtoField.uint16("hfudt.control_type", "Control Type", base.DEC)
local f_control_type_text = ProtoField.string("hfudt.control_type_text", "Control Type Text", base.ASCII)
local f_ack_sequence_number = ProtoField.uint32("hfudt.ack_sequence_number", "ACKed Sequence Number", base.DEC)
local f_control_sub_sequence = ProtoField.uint32("hfudt.control_sub_sequence", "Control Sub-Sequence Number", base.DEC)
local f_nak_sequence_number = ProtoField.uint32("hfudt.nak_sequence_number", "NAKed Sequence Number", base.DEC)
local f_nak_range_end = ProtoField.uint32("hfudt.nak_range_end", "NAK Range End", base.DEC)
local SEQUENCE_NUMBER_MASK = 0x07FFFFFF
p_hfudt.fields = {
f_length,
f_control_bit, f_reliable_bit, f_message_bit, f_sequence_number, f_type, f_type_text, f_version,
f_sender_id, f_hmac_hash,
f_message_position, f_message_number, f_message_part_number, f_obfuscation_level,
f_control_type, f_control_type_text, f_control_sub_sequence, f_ack_sequence_number, f_nak_sequence_number, f_nak_range_end,
f_data
}
local control_types = {
[0] = { "ACK", "Acknowledgement" },
[1] = { "ACK2", "Acknowledgement of acknowledgement" },
[2] = { "LightACK", "Light Acknowledgement" },
[3] = { "NAK", "Loss report (NAK)" },
[4] = { "TimeoutNAK", "Loss report re-transmission (TimeoutNAK)" },
[5] = { "Handshake", "Handshake" },
[6] = { "HandshakeACK", "Acknowledgement of Handshake" },
[7] = { "ProbeTail", "Probe tail" },
[8] = { "HandshakeRequest", "Request a Handshake" }
}
local message_positions = {
[0] = "ONLY",
[1] = "LAST",
[2] = "FIRST",
[3] = "MIDDLE"
}
local packet_types = {
[0] = "Unknown",
[1] = "StunResponse",
[2] = "DomainList",
[3] = "Ping",
[4] = "PingReply",
[5] = "KillAvatar",
[6] = "AvatarData",
[7] = "InjectAudio",
[8] = "MixedAudio",
[9] = "MicrophoneAudioNoEcho",
[10] = "MicrophoneAudioWithEcho",
[11] = "BulkAvatarData",
[12] = "SilentAudioFrame",
[13] = "DomainListRequest",
[14] = "RequestAssignment",
[15] = "CreateAssignment",
[16] = "DomainConnectionDenied",
[17] = "MuteEnvironment",
[18] = "AudioStreamStats",
[19] = "DomainServerPathQuery",
[20] = "DomainServerPathResponse",
[21] = "DomainServerAddedNode",
[22] = "ICEServerPeerInformation",
[23] = "ICEServerQuery",
[24] = "OctreeStats",
[25] = "Jurisdiction",
[26] = "JurisdictionRequest",
[27] = "AssignmentClientStatus",
[28] = "NoisyMute",
[29] = "AvatarIdentity",
[30] = "AvatarBillboard",
[31] = "DomainConnectRequest",
[32] = "DomainServerRequireDTLS",
[33] = "NodeJsonStats",
[34] = "OctreeDataNack",
[35] = "StopNode",
[36] = "AudioEnvironment",
[37] = "EntityEditNack",
[38] = "ICEServerHeartbeat",
[39] = "ICEPing",
[40] = "ICEPingReply",
[41] = "EntityData",
[42] = "EntityQuery",
[43] = "EntityAdd",
[44] = "EntityErase",
[45] = "EntityEdit",
[46] = "DomainServerConnectionToken",
[47] = "DomainSettingsRequest",
[48] = "DomainSettings",
[49] = "AssetGet",
[50] = "AssetGetReply",
[51] = "AssetUpload",
[52] = "AssetUploadReply",
[53] = "AssetGetInfo",
[54] = "AssetGetInfoReply"
}
local unsourced_packet_types = {
["DomainList"] = true
}
function p_hfudt.dissector(buf, pinfo, tree)
-- make sure this isn't a STUN packet - those don't follow HFUDT format
if pinfo.dst == Address.ip("stun.highfidelity.io") then return end
-- validate that the packet length is at least the minimum control packet size
if buf:len() < 4 then return end
-- create a subtree for HFUDT
subtree = tree:add(p_hfudt, buf(0))
-- set the packet length
subtree:add(f_length, buf:len())
-- pull out the entire first word
local first_word = buf(0, 4):le_uint()
-- pull out the control bit and add it to the subtree
local control_bit = bit32.rshift(first_word, 31)
subtree:add(f_control_bit, control_bit)
local data_length = 0
if control_bit == 1 then
-- dissect the control packet
pinfo.cols.protocol = p_hfudt.name .. " Control"
-- remove the control bit and shift to the right to get the type value
local shifted_type = bit32.rshift(bit32.lshift(first_word, 1), 17)
local type = subtree:add(f_control_type, shifted_type)
if control_types[shifted_type] ~= nil then
-- if we know this type then add the name
type:append_text(" (".. control_types[shifted_type][1] .. ")")
subtree:add(f_control_type_text, control_types[shifted_type][1])
end
if shifted_type == 0 or shifted_type == 1 then
-- this has a sub-sequence number
local second_word = buf(4, 4):le_uint()
subtree:add(f_control_sub_sequence, bit32.band(second_word, SEQUENCE_NUMBER_MASK))
local data_index = 8
if shifted_type == 0 then
-- if this is an ACK let's read out the sequence number
local sequence_number = buf(8, 4):le_uint()
subtree:add(f_ack_sequence_number, bit32.band(sequence_number, SEQUENCE_NUMBER_MASK))
data_index = data_index + 4
end
data_length = buf:len() - data_index
-- set the data from whatever is left in the packet
subtree:add(f_data, buf(data_index, data_length))
elseif shifted_type == 2 then
-- this is a Light ACK let's read out the sequence number
local sequence_number = buf(4, 4):le_uint()
subtree:add(f_ack_sequence_number, bit32.band(sequence_number, SEQUENCE_NUMBER_MASK))
data_length = buf:len() - 4
-- set the data from whatever is left in the packet
subtree:add(f_data, buf(4, data_length))
elseif shifted_type == 3 or shifted_type == 4 then
if buf:len() <= 12 then
-- this is a NAK pull the sequence number or range
local sequence_number = buf(4, 4):le_uint()
subtree:add(f_nak_sequence_number, bit32.band(sequence_number, SEQUENCE_NUMBER_MASK))
data_length = buf:len() - 4
if buf:len() > 8 then
local range_end = buf(8, 4):le_uint()
subtree:add(f_nak_range_end, bit32.band(range_end, SEQUENCE_NUMBER_MASK))
data_length = data_length - 4
end
end
else
data_length = buf:len() - 4
-- no sub-sequence number, just read the data
subtree:add(f_data, buf(4, data_length))
end
else
-- dissect the data packet
pinfo.cols.protocol = p_hfudt.name
-- set the reliability bit
subtree:add(f_reliable_bit, bit32.rshift(first_word, 30))
local message_bit = bit32.band(0x01, bit32.rshift(first_word, 29))
-- set the message bit
subtree:add(f_message_bit, message_bit)
-- read the obfuscation level
local obfuscation_bits = bit32.band(0x03, bit32.rshift(first_word, 27))
subtree:add(f_obfuscation_level, obfuscation_bits)
-- read the sequence number
subtree:add(f_sequence_number, bit32.band(first_word, SEQUENCE_NUMBER_MASK))
local payload_offset = 4
-- if the message bit is set, handle the second word
if message_bit == 1 then
payload_offset = 12
local second_word = buf(4, 4):le_uint()
-- read message position from upper 2 bits
local message_position = bit32.rshift(second_word, 30)
local position = subtree:add(f_message_position, message_position)
if message_positions[message_position] ~= nil then
-- if we know this position then add the name
position:append_text(" (".. message_positions[message_position] .. ")")
end
-- read message number from lower 30 bits
subtree:add(f_message_number, bit32.band(second_word, 0x3FFFFFFF))
-- read the message part number
subtree:add(f_message_part_number, buf(8, 4):le_uint())
end
-- read the type
local packet_type = buf(payload_offset, 1):le_uint()
local ptype = subtree:add_le(f_type, buf(payload_offset, 1))
local packet_type_text = packet_types[packet_type]
if packet_type_text ~= nil then
subtree:add(f_type_text, packet_type_text)
-- if we know this packet type then add the name
ptype:append_text(" (".. packet_type_text .. ")")
end
-- read the version
subtree:add_le(f_version, buf(payload_offset + 1, 1))
local i = payload_offset + 2
if unsourced_packet_types[packet_type_text] == nil then
-- read node local ID
local sender_id = buf(payload_offset + 2, 2)
subtree:add_le(f_sender_id, sender_id)
i = i + 2
-- read HMAC MD5 hash
subtree:add(f_hmac_hash, buf(i, 16))
i = i + 16
end
-- Domain packets
if packet_type_text == "DomainList" then
Dissector.get("hf-domain"):call(buf(i):tvb(), pinfo, tree)
end
-- AvatarData or BulkAvatarDataPacket
if packet_type_text == "AvatarData" or packet_type_text == "BulkAvatarData" then
Dissector.get("hf-avatar"):call(buf(i):tvb(), pinfo, tree)
end
if packet_type_text == "EntityEdit" then
Dissector.get("hf-entity"):call(buf(i):tvb(), pinfo, tree)
end
end
-- return the size of the header
return buf:len()
end
function p_hfudt.init()
local udp_dissector_table = DissectorTable.get("udp.port")
for port=1000, 65000 do
udp_dissector_table:add(port, p_hfudt)
end
end
print("Loading hfudt")
-- create the HFUDT protocol
p_hfudt = Proto("hfudt", "HFUDT Protocol")
-- create fields shared between packets in HFUDT
local f_data = ProtoField.string("hfudt.data", "Data")
-- create the fields for data packets in HFUDT
local f_length = ProtoField.uint16("hfudt.length", "Length", base.DEC)
local f_control_bit = ProtoField.uint8("hfudt.control", "Control Bit", base.DEC)
local f_reliable_bit = ProtoField.uint8("hfudt.reliable", "Reliability Bit", base.DEC)
local f_message_bit = ProtoField.uint8("hfudt.message", "Message Bit", base.DEC)
local f_obfuscation_level = ProtoField.uint8("hfudt.obfuscation_level", "Obfuscation Level", base.DEC)
local f_sequence_number = ProtoField.uint32("hfudt.sequence_number", "Sequence Number", base.DEC)
local f_message_position = ProtoField.uint8("hfudt.message_position", "Message Position", base.DEC)
local f_message_number = ProtoField.uint32("hfudt.message_number", "Message Number", base.DEC)
local f_message_part_number = ProtoField.uint32("hfudt.message_part_number", "Message Part Number", base.DEC)
local f_type = ProtoField.uint8("hfudt.type", "Type", base.DEC)
local f_version = ProtoField.uint8("hfudt.version", "Version", base.DEC)
local f_type_text = ProtoField.string("hfudt.type_text", "TypeText")
local f_sender_id = ProtoField.uint16("hfudt.sender_id", "Sender ID", base.DEC)
local f_hmac_hash = ProtoField.bytes("hfudt.hmac_hash", "HMAC Hash")
-- create the fields for control packets in HFUDT
local f_control_type = ProtoField.uint16("hfudt.control_type", "Control Type", base.DEC)
local f_control_type_text = ProtoField.string("hfudt.control_type_text", "Control Type Text", base.ASCII)
local f_ack_sequence_number = ProtoField.uint32("hfudt.ack_sequence_number", "ACKed Sequence Number", base.DEC)
local f_control_sub_sequence = ProtoField.uint32("hfudt.control_sub_sequence", "Control Sub-Sequence Number", base.DEC)
local f_nak_sequence_number = ProtoField.uint32("hfudt.nak_sequence_number", "NAKed Sequence Number", base.DEC)
local f_nak_range_end = ProtoField.uint32("hfudt.nak_range_end", "NAK Range End", base.DEC)
local SEQUENCE_NUMBER_MASK = 0x07FFFFFF
p_hfudt.fields = {
f_length,
f_control_bit, f_reliable_bit, f_message_bit, f_sequence_number, f_type, f_type_text, f_version,
f_sender_id, f_hmac_hash,
f_message_position, f_message_number, f_message_part_number, f_obfuscation_level,
f_control_type, f_control_type_text, f_control_sub_sequence, f_ack_sequence_number, f_nak_sequence_number, f_nak_range_end,
f_data
}
local control_types = {
[0] = { "ACK", "Acknowledgement" },
[1] = { "ACK2", "Acknowledgement of acknowledgement" },
[2] = { "LightACK", "Light Acknowledgement" },
[3] = { "NAK", "Loss report (NAK)" },
[4] = { "TimeoutNAK", "Loss report re-transmission (TimeoutNAK)" },
[5] = { "Handshake", "Handshake" },
[6] = { "HandshakeACK", "Acknowledgement of Handshake" },
[7] = { "ProbeTail", "Probe tail" },
[8] = { "HandshakeRequest", "Request a Handshake" }
}
local message_positions = {
[0] = "ONLY",
[1] = "LAST",
[2] = "FIRST",
[3] = "MIDDLE"
}
local packet_types = {
[0] = "Unknown",
[1] = "StunResponse",
[2] = "DomainList",
[3] = "Ping",
[4] = "PingReply",
[5] = "KillAvatar",
[6] = "AvatarData",
[7] = "InjectAudio",
[8] = "MixedAudio",
[9] = "MicrophoneAudioNoEcho",
[10] = "MicrophoneAudioWithEcho",
[11] = "BulkAvatarData",
[12] = "SilentAudioFrame",
[13] = "DomainListRequest",
[14] = "RequestAssignment",
[15] = "CreateAssignment",
[16] = "DomainConnectionDenied",
[17] = "MuteEnvironment",
[18] = "AudioStreamStats",
[19] = "DomainServerPathQuery",
[20] = "DomainServerPathResponse",
[21] = "DomainServerAddedNode",
[22] = "ICEServerPeerInformation",
[23] = "ICEServerQuery",
[24] = "OctreeStats",
[25] = "Jurisdiction",
[26] = "JurisdictionRequest",
[27] = "AssignmentClientStatus",
[28] = "NoisyMute",
[29] = "AvatarIdentity",
[30] = "AvatarBillboard",
[31] = "DomainConnectRequest",
[32] = "DomainServerRequireDTLS",
[33] = "NodeJsonStats",
[34] = "OctreeDataNack",
[35] = "StopNode",
[36] = "AudioEnvironment",
[37] = "EntityEditNack",
[38] = "ICEServerHeartbeat",
[39] = "ICEPing",
[40] = "ICEPingReply",
[41] = "EntityData",
[42] = "EntityQuery",
[43] = "EntityAdd",
[44] = "EntityErase",
[45] = "EntityEdit",
[46] = "DomainServerConnectionToken",
[47] = "DomainSettingsRequest",
[48] = "DomainSettings",
[49] = "AssetGet",
[50] = "AssetGetReply",
[51] = "AssetUpload",
[52] = "AssetUploadReply",
[53] = "AssetGetInfo",
[54] = "AssetGetInfoReply"
}
local unsourced_packet_types = {
["DomainList"] = true
}
function p_hfudt.dissector(buf, pinfo, tree)
-- make sure this isn't a STUN packet - those don't follow HFUDT format
if pinfo.dst == Address.ip("stun.highfidelity.io") then return end
-- validate that the packet length is at least the minimum control packet size
if buf:len() < 4 then return end
-- create a subtree for HFUDT
subtree = tree:add(p_hfudt, buf(0))
-- set the packet length
subtree:add(f_length, buf:len())
-- pull out the entire first word
local first_word = buf(0, 4):le_uint()
-- pull out the control bit and add it to the subtree
local control_bit = bit32.rshift(first_word, 31)
subtree:add(f_control_bit, control_bit)
local data_length = 0
if control_bit == 1 then
-- dissect the control packet
pinfo.cols.protocol = p_hfudt.name .. " Control"
-- remove the control bit and shift to the right to get the type value
local shifted_type = bit32.rshift(bit32.lshift(first_word, 1), 17)
local type = subtree:add(f_control_type, shifted_type)
if control_types[shifted_type] ~= nil then
-- if we know this type then add the name
type:append_text(" (".. control_types[shifted_type][1] .. ")")
subtree:add(f_control_type_text, control_types[shifted_type][1])
end
if shifted_type == 0 or shifted_type == 1 then
-- this has a sub-sequence number
local second_word = buf(4, 4):le_uint()
subtree:add(f_control_sub_sequence, bit32.band(second_word, SEQUENCE_NUMBER_MASK))
local data_index = 8
if shifted_type == 0 then
-- if this is an ACK let's read out the sequence number
local sequence_number = buf(8, 4):le_uint()
subtree:add(f_ack_sequence_number, bit32.band(sequence_number, SEQUENCE_NUMBER_MASK))
data_index = data_index + 4
end
data_length = buf:len() - data_index
-- set the data from whatever is left in the packet
subtree:add(f_data, buf(data_index, data_length))
elseif shifted_type == 2 then
-- this is a Light ACK let's read out the sequence number
local sequence_number = buf(4, 4):le_uint()
subtree:add(f_ack_sequence_number, bit32.band(sequence_number, SEQUENCE_NUMBER_MASK))
data_length = buf:len() - 4
-- set the data from whatever is left in the packet
subtree:add(f_data, buf(4, data_length))
elseif shifted_type == 3 or shifted_type == 4 then
if buf:len() <= 12 then
-- this is a NAK pull the sequence number or range
local sequence_number = buf(4, 4):le_uint()
subtree:add(f_nak_sequence_number, bit32.band(sequence_number, SEQUENCE_NUMBER_MASK))
data_length = buf:len() - 4
if buf:len() > 8 then
local range_end = buf(8, 4):le_uint()
subtree:add(f_nak_range_end, bit32.band(range_end, SEQUENCE_NUMBER_MASK))
data_length = data_length - 4
end
end
else
data_length = buf:len() - 4
-- no sub-sequence number, just read the data
subtree:add(f_data, buf(4, data_length))
end
else
-- dissect the data packet
pinfo.cols.protocol = p_hfudt.name
-- set the reliability bit
subtree:add(f_reliable_bit, bit32.rshift(first_word, 30))
local message_bit = bit32.band(0x01, bit32.rshift(first_word, 29))
-- set the message bit
subtree:add(f_message_bit, message_bit)
-- read the obfuscation level
local obfuscation_bits = bit32.band(0x03, bit32.rshift(first_word, 27))
subtree:add(f_obfuscation_level, obfuscation_bits)
-- read the sequence number
subtree:add(f_sequence_number, bit32.band(first_word, SEQUENCE_NUMBER_MASK))
local payload_offset = 4
-- if the message bit is set, handle the second word
if message_bit == 1 then
payload_offset = 12
local second_word = buf(4, 4):le_uint()
-- read message position from upper 2 bits
local message_position = bit32.rshift(second_word, 30)
local position = subtree:add(f_message_position, message_position)
if message_positions[message_position] ~= nil then
-- if we know this position then add the name
position:append_text(" (".. message_positions[message_position] .. ")")
end
-- read message number from lower 30 bits
subtree:add(f_message_number, bit32.band(second_word, 0x3FFFFFFF))
-- read the message part number
subtree:add(f_message_part_number, buf(8, 4):le_uint())
end
-- read the type
local packet_type = buf(payload_offset, 1):le_uint()
local ptype = subtree:add_le(f_type, buf(payload_offset, 1))
local packet_type_text = packet_types[packet_type]
if packet_type_text ~= nil then
subtree:add(f_type_text, packet_type_text)
-- if we know this packet type then add the name
ptype:append_text(" (".. packet_type_text .. ")")
end
-- read the version
subtree:add_le(f_version, buf(payload_offset + 1, 1))
local i = payload_offset + 2
if unsourced_packet_types[packet_type_text] == nil then
-- read node local ID
local sender_id = buf(payload_offset + 2, 2)
subtree:add_le(f_sender_id, sender_id)
i = i + 2
-- read HMAC MD5 hash
subtree:add(f_hmac_hash, buf(i, 16))
i = i + 16
end
-- Domain packets
if packet_type_text == "DomainList" then
Dissector.get("hf-domain"):call(buf(i):tvb(), pinfo, tree)
end
-- AvatarData or BulkAvatarDataPacket
if packet_type_text == "AvatarData" or packet_type_text == "BulkAvatarData" then
Dissector.get("hf-avatar"):call(buf(i):tvb(), pinfo, tree)
end
if packet_type_text == "EntityEdit" then
Dissector.get("hf-entity"):call(buf(i):tvb(), pinfo, tree)
end
if packet_types[packet_type] == "MicrophoneAudioNoEcho" or
packet_types[packet_type] == "MicrophoneAudioWithEcho" or
packet_types[packet_type] == "SilentAudioFrame" then
Dissector.get("hf-audio"):call(buf(i):tvb(), pinfo, tree)
end
end
-- return the size of the header
return buf:len()
end
function p_hfudt.init()
local udp_dissector_table = DissectorTable.get("udp.port")
for port=1000, 65000 do
udp_dissector_table:add(port, p_hfudt)
end
end

View file

@ -0,0 +1,46 @@
print("Loading hf-audio")
-- create the audio protocol
p_hf_audio = Proto("hf-audio", "HF Audio Protocol")
-- audio packet fields
local f_audio_sequence_number = ProtoField.uint16("hf_audio.sequence_number", "Sequence Number")
local f_audio_codec_size = ProtoField.uint32("hf_audio.codec_size", "Codec Size")
local f_audio_codec = ProtoField.string("hf_audio.codec", "Codec")
local f_audio_is_stereo = ProtoField.bool("hf_audio.is_stereo", "Is Stereo")
local f_audio_num_silent_samples = ProtoField.uint16("hf_audio.num_silent_samples", "Num Silent Samples")
p_hf_audio.fields = {
f_audio_sequence_number, f_audio_codec_size, f_audio_codec,
f_audio_is_stereo, f_audio_num_silent_samples
}
local packet_type_extractor = Field.new('hfudt.type_text')
function p_hf_audio.dissector(buf, pinfo, tree)
pinfo.cols.protocol = p_hf_audio.name
audio_subtree = tree:add(p_hf_audio, buf())
local i = 0
audio_subtree:add_le(f_audio_sequence_number, buf(i, 2))
i = i + 2
-- figure out the number of bytes the codec name takes
local codec_name_bytes = buf(i, 4):le_uint()
audio_subtree:add_le(f_audio_codec_size, buf(i, 4))
i = i + 4
audio_subtree:add(f_audio_codec, buf(i, codec_name_bytes))
i = i + codec_name_bytes
local packet_type = packet_type_extractor().value
if packet_type == "SilentAudioFrame" then
audio_subtree:add_le(f_audio_num_silent_samples, buf(i, 2))
i = i + 2
else
audio_subtree:add_le(f_audio_is_stereo, buf(i, 1))
i = i + 1
end
end

View file

@ -1,3 +1,5 @@
print("Loading hf-avatar")
-- create the avatar protocol
p_hf_avatar = Proto("hf-avatar", "HF Avatar Protocol")

View file

@ -1,3 +1,5 @@
print("Loading hf-entity")
-- create the entity protocol
p_hf_entity = Proto("hf-entity", "HF Entity Protocol")

View file

@ -1,179 +0,0 @@
//
// Interaction.js
// scripts/interaction
//
// Created by Trevor Berninger on 3/20/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function(){
print("loading interaction script");
var Avatar = false;
var NPC = false;
var previousNPC = false;
var hasCenteredOnNPC = false;
var distance = 10;
var r = 8;
var player = false;
var baselineX = 0;
var baselineY = 0;
var nodRange = 20;
var shakeRange = 20;
var ticker = false;
var heartbeatTimer = false;
function callOnNPC(message) {
if(NPC)
Messages.sendMessage("interactionComs", NPC + ":" + message);
else
Messages.sendMessage("interactionComs", previousNPC + ":" + message);
}
LimitlessSpeechRecognition.onFinishedSpeaking.connect(function(speech) {
print("Got: " + speech);
callOnNPC("voiceData:" + speech);
});
LimitlessSpeechRecognition.onReceivedTranscription.connect(function(speech) {
callOnNPC("speaking");
});
function setBaselineRotations(rot) {
baselineX = rot.x;
baselineY = rot.y;
}
function findLookedAtNPC() {
var intersection = AvatarList.findRayIntersection({origin: MyAvatar.position, direction: Quat.getFront(Camera.getOrientation())}, true);
if (intersection.intersects && intersection.distance <= distance){
var npcAvatar = AvatarList.getAvatar(intersection.avatarID);
if (npcAvatar.displayName.search("NPC") != -1) {
setBaselineRotations(Quat.safeEulerAngles(Camera.getOrientation()));
return intersection.avatarID;
}
}
return false;
}
function isStillFocusedNPC() {
var avatar = AvatarList.getAvatar(NPC);
if (avatar) {
var avatarPosition = avatar.position;
return Vec3.distance(MyAvatar.position, avatarPosition) <= distance && Math.abs(Quat.dot(Camera.getOrientation(), Quat.lookAtSimple(MyAvatar.position, avatarPosition))) > 0.6;
}
return false; // NPC reference died. Maybe it crashed or we teleported to a new world?
}
function onWeLostFocus() {
print("lost NPC: " + NPC);
callOnNPC("onLostFocused");
var baselineX = 0;
var baselineY = 0;
}
function onWeGainedFocus() {
print("found NPC: " + NPC);
callOnNPC("onFocused");
var rotation = Quat.safeEulerAngles(Camera.getOrientation());
baselineX = rotation.x;
baselineY = rotation.y;
LimitlessSpeechRecognition.setListeningToVoice(true);
}
function checkFocus() {
var newNPC = findLookedAtNPC();
if (NPC && newNPC != NPC && !isStillFocusedNPC()) {
onWeLostFocus();
previousNPC = NPC;
NPC = false;
}
if (!NPC && newNPC != false) {
NPC = newNPC;
onWeGainedFocus();
}
}
function checkGesture() {
var rotation = Quat.safeEulerAngles(Camera.getOrientation());
var deltaX = Math.abs(rotation.x - baselineX);
if (deltaX > 180) {
deltaX -= 180;
}
var deltaY = Math.abs(rotation.y - baselineY);
if (deltaY > 180) {
deltaY -= 180;
}
if (deltaX >= nodRange && deltaY <= shakeRange) {
callOnNPC("onNodReceived");
} else if (deltaY >= shakeRange && deltaX <= nodRange) {
callOnNPC("onShakeReceived");
}
}
function tick() {
checkFocus();
if (NPC) {
checkGesture();
}
}
function heartbeat() {
callOnNPC("beat");
}
Messages.subscribe("interactionComs");
Messages.messageReceived.connect(function (channel, message, sender) {
if(channel === "interactionComs" && player) {
var codeIndex = message.search('clientexec');
if(codeIndex != -1) {
var code = message.substr(codeIndex+11);
Script.evaluate(code, '');
}
}
});
this.enterEntity = function(id) {
player = true;
print("Something entered me: " + id);
LimitlessSpeechRecognition.setAuthKey("testKey");
if (!ticker) {
ticker = Script.setInterval(tick, 333);
}
if(!heartbeatTimer) {
heartbeatTimer = Script.setInterval(heartbeat, 1000);
}
};
this.leaveEntity = function(id) {
LimitlessSpeechRecognition.setListeningToVoice(false);
player = false;
print("Something left me: " + id);
if (previousNPC)
Messages.sendMessage("interactionComs", previousNPC + ":leftArea");
if (ticker) {
ticker.stop();
ticker = false;
}
if (heartbeatTimer) {
heartbeatTimer.stop();
heartbeatTimer = false;
}
};
this.unload = function() {
print("Okay. I'm Unloading!");
if (ticker) {
ticker.stop();
ticker = false;
}
};
print("finished loading interaction script");
});

View file

@ -1,179 +0,0 @@
//
// NPCHelpers.js
// scripts/interaction
//
// Created by Trevor Berninger on 3/20/17.
// Copyright 2017 High Fidelity Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var audioInjector = false;
var blocked = false;
var playingResponseAnim = false;
var storyURL = "";
var _qid = "start";
print("TESTTEST");
function strContains(str, sub) {
return str.search(sub) != -1;
}
function callbackOnCondition(conditionFunc, ms, callback, count) {
var thisCount = 0;
if (typeof count !== 'undefined') {
thisCount = count;
}
if (conditionFunc()) {
callback();
} else if (thisCount < 10) {
Script.setTimeout(function() {
callbackOnCondition(conditionFunc, ms, callback, thisCount + 1);
}, ms);
} else {
print("callbackOnCondition timeout");
}
}
function playAnim(animURL, looping, onFinished) {
print("got anim: " + animURL);
print("looping: " + looping);
// Start caching the animation if not already cached.
AnimationCache.getAnimation(animURL);
// Tell the avatar to animate so that we can tell if the animation is ready without crashing
Avatar.startAnimation(animURL, 30, 1, false, false, 0, 1);
// Continually check if the animation is ready
callbackOnCondition(function(){
var details = Avatar.getAnimationDetails();
// if we are running the request animation and are past the first frame, the anim is loaded properly
print("running: " + details.running);
print("url and animURL: " + details.url.trim().replace(/ /g, "%20") + " | " + animURL.trim().replace(/ /g, "%20"));
print("currentFrame: " + details.currentFrame);
return details.running && details.url.trim().replace(/ /g, "%20") == animURL.trim().replace(/ /g, "%20") && details.currentFrame > 0;
}, 250, function(){
var timeOfAnim = ((AnimationCache.getAnimation(animURL).frames.length / 30) * 1000) + 100; // frames to miliseconds plus a small buffer
print("animation loaded. length: " + timeOfAnim);
// Start the animation again but this time with frame information
Avatar.startAnimation(animURL, 30, 1, looping, true, 0, AnimationCache.getAnimation(animURL).frames.length);
if (typeof onFinished !== 'undefined') {
print("onFinished defined. setting the timeout with timeOfAnim");
timers.push(Script.setTimeout(onFinished, timeOfAnim));
}
});
}
function playSound(soundURL, onFinished) {
callbackOnCondition(function() {
return SoundCache.getSound(soundURL).downloaded;
}, 250, function() {
if (audioInjector) {
audioInjector.stop();
}
audioInjector = Audio.playSound(SoundCache.getSound(soundURL), {position: Avatar.position, volume: 1.0});
if (typeof onFinished !== 'undefined') {
audioInjector.finished.connect(onFinished);
}
});
}
function npcRespond(soundURL, animURL, onFinished) {
if (typeof soundURL !== 'undefined' && soundURL != '') {
print("npcRespond got soundURL!");
playSound(soundURL, function(){
print("sound finished");
var animDetails = Avatar.getAnimationDetails();
print("animDetails.lastFrame: " + animDetails.lastFrame);
print("animDetails.currentFrame: " + animDetails.currentFrame);
if (animDetails.lastFrame < animDetails.currentFrame + 1 || !playingResponseAnim) {
onFinished();
}
audioInjector = false;
});
}
if (typeof animURL !== 'undefined' && animURL != '') {
print("npcRespond got animURL!");
playingResponseAnim = true;
playAnim(animURL, false, function() {
print("anim finished");
playingResponseAnim = false;
print("injector: " + audioInjector);
if (!audioInjector || !audioInjector.isPlaying()) {
print("resetting Timer");
print("about to call onFinished");
onFinished();
}
});
}
}
function npcRespondBlocking(soundURL, animURL, onFinished) {
print("blocking response requested");
if (!blocked) {
print("not already blocked");
blocked = true;
npcRespond(soundURL, animURL, function(){
if (onFinished){
onFinished();
}blocked = false;
});
}
}
function npcContinueStory(soundURL, animURL, nextID, onFinished) {
if (!nextID) {
nextID = _qid;
}
npcRespondBlocking(soundURL, animURL, function(){
if (onFinished){
onFinished();
}setQid(nextID);
});
}
function setQid(newQid) {
print("setting quid");
print("_qid: " + _qid);
_qid = newQid;
print("_qid: " + _qid);
doActionFromServer("init");
}
function runOnClient(code) {
Messages.sendMessage("interactionComs", "clientexec:" + code);
}
function doActionFromServer(action, data, useServerCache) {
if (action == "start") {
ignoreCount = 0;
_qid = "start";
}
var xhr = new XMLHttpRequest();
xhr.open("POST", "http://gserv_devel.studiolimitless.com/story", true);
xhr.onreadystatechange = function(){
if (xhr.readyState == 4){
if (xhr.status == 200) {
print("200!");
print("evaluating: " + xhr.responseText);
Script.evaluate(xhr.responseText, "");
} else if (xhr.status == 444) {
print("Limitless Serv 444: API error: " + xhr.responseText);
} else {
print("HTTP Code: " + xhr.status + ": " + xhr.responseText);
}
}
};
xhr.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
var postData = "url=" + storyURL + "&action=" + action + "&qid=" + _qid;
if (typeof data !== 'undefined' && data != '') {
postData += "&data=" + data;
}
if (typeof useServerCache !== 'undefined' && !useServerCache) {
postData += "&nocache=true";
}
print("Sending: " + postData);
xhr.send(postData);
}

View file

@ -1,102 +0,0 @@
//
// NPC_AC.js
// scripts/interaction
//
// Created by Trevor Berninger on 3/20/17.
// Copyright 2017 High Fidelity Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var currentlyUsedIndices = [];
var timers = [];
var currentlyEngaged = false;
var questionNumber = 0;
var heartbeatTimeout = false;
function getRandomRiddle() {
var randIndex = null;
do {
randIndex = Math.floor(Math.random() * 15) + 1;
} while (randIndex in currentlyUsedIndices);
currentlyUsedIndices.push(randIndex);
return randIndex.toString();
}
Script.include("https://raw.githubusercontent.com/Delamare2112/hifi/Interaction/unpublishedScripts/interaction/NPCHelpers.js", function(){
print("NPCHelpers included.");main();
});
var idleAnim = "https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/idle.fbx";
var FST = "https://s3.amazonaws.com/hifi-public/tony/fixed-sphinx/sphinx.fst";
Agent.isAvatar = true;
Avatar.skeletonModelURL = FST;
Avatar.displayName = "NPC";
Avatar.position = {x: 0.3, y: -23.4, z: 8.0};
Avatar.orientation = {x: 0, y: 1, z: 0, w: 0};
// Avatar.position = {x: 1340.3555, y: 4.078, z: -420.1562};
// Avatar.orientation = {x: 0, y: -0.707, z: 0, w: 0.707};
Avatar.scale = 2;
Messages.subscribe("interactionComs");
function endInteraction() {
print("ending interaction");
blocked = false;
currentlyEngaged = false;
if(audioInjector)
audioInjector.stop();
for (var t in timers) {
Script.clearTimeout(timers[t]);
}
if(_qid != "Restarting") {
npcRespondBlocking(
'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/EarlyExit_0' + (Math.floor(Math.random() * 2) + 1).toString() + '.wav',
'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/reversedSphinx.fbx',
function(){
Avatar.startAnimation('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Hifi_Sphinx_Anim_Entrance_Kneel_Combined_with_Intro.fbx', 0);
}
);
}
}
function main() {
storyURL = "https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Sphinx.json";
Messages.messageReceived.connect(function (channel, message, sender) {
if(!strContains(message, 'beat'))
print(sender + " -> NPC @" + Agent.sessionUUID + ": " + message);
if (channel === "interactionComs" && strContains(message, Agent.sessionUUID)) {
if (strContains(message, 'beat')) {
if(heartbeatTimeout) {
Script.clearTimeout(heartbeatTimeout);
heartbeatTimeout = false;
}
heartbeatTimeout = Script.setTimeout(endInteraction, 1500);
}
else if (strContains(message, "onFocused") && !currentlyEngaged) {
blocked = false;
currentlyEngaged = true;
currentlyUsedIndices = [];
doActionFromServer("start");
} else if (strContains(message, "leftArea")) {
} else if (strContains(message, "speaking")) {
} else {
var voiceDataIndex = message.search("voiceData");
if (voiceDataIndex != -1) {
var words = message.substr(voiceDataIndex+10);
if (!isNaN(_qid) && (strContains(words, "repeat") || (strContains(words, "say") && strContains(words, "again")))) {
doActionFromServer("init");
} else {
doActionFromServer("words", words);
}
}
}
}
});
// Script.update.connect(updateGem);
Avatar.startAnimation("https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Hifi_Sphinx_Anim_Entrance_Kneel_Combined_with_Intro.fbx", 0);
}

View file

@ -1,159 +0,0 @@
{
"Name": "10 Questions",
"Defaults":
{
"Actions":
{
"positive": "var x=function(){if(questionNumber>=2){setQid('Finished');return;}var suffix=['A', 'B'][questionNumber++] + '_0' + (Math.floor(Math.random() * 2) + 2).toString() + '.wav';npcContinueStory('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/RightAnswer'+suffix, 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/RightAnswerB_02.fbx', getRandomRiddle());};x();",
"unknown": "var suffix=(Math.floor(Math.random() * 3) + 1).toString();npcContinueStory('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/WrongAnswer_0' + suffix + '.wav','https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/WrongAnswer_0' + suffix + '.fbx', getRandomRiddle());",
"hint": "var suffix=(Math.floor(Math.random() * 2) + 1).toString();npcContinueStory('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Hint_0' + suffix + '.wav','https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Hint_0' + suffix + '.fbx')"
},
"Responses":
{
"positive": ["yes","yup","yeah","yahoo","sure","affirmative","okay","aye","right","exactly","course","naturally","unquestionably","positively","yep","definitely","certainly","fine","absolutely","positive","love","fantastic"],
"thinking": ["oh", "think about", "i know", "what was", "well", "not sure", "one before", "hold", "one moment", "one second", "1 second", "1 sec", "one sec"],
"hint": ["hint", "heads"]
}
},
"Story":
[
{
"QID": "start",
"init": "questionNumber=0;npcContinueStory('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/HiFi_Sphinx_Anim_Combined_Entrance_Audio.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Hifi_Sphinx_Anim_Entrance_Kneel_Combined_with_Intro.fbx', getRandomRiddle());"
},
{
"QID": "1",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Blackboard.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Blackboard.fbx');",
"responses":
{
"positive": ["blackboard", "chalkboard", "chalk board", "slate"]
}
},
{
"QID": "2",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Breath.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Breath.fbx');",
"responses":
{
"positive": ["breath", "death"]
}
},
{
"QID": "3",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Clock.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Clock.fbx');",
"responses":
{
"positive": ["clock", "cock"]
}
},
{
"QID": "4",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Coffin.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Coffin.fbx');",
"responses":
{
"positive": ["coffin", "casket", "possum"]
}
},
{
"QID": "5",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Coin.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Coin.fbx');",
"responses":
{
"positive": ["coin", "boing", "coinage", "coin piece", "change", "join"]
}
},
{
"QID": "6",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Corn.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Corn.fbx');",
"responses":
{
"positive": ["corn", "born", "maize", "maze", "means", "torn", "horn", "worn", "porn"]
}
},
{
"QID": "7",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Darkness.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Darkness.fbx');",
"responses":
{
"positive": ["darkness", "dark", "blackness"]
}
},
{
"QID": "8",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Gloves.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Gloves.fbx');",
"responses":
{
"positive": ["gloves", "love"]
}
},
{
"QID": "9",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Gold.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Gold.fbx');",
"responses":
{
"positive": ["gold", "old", "bold", "cold", "told"]
}
},
{
"QID": "10",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_River.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_River.fbx');",
"responses":
{
"positive": ["river", "bigger", "stream", "creek", "brook"]
}
},
{
"QID": "11",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Secret.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Secret.fbx');",
"responses":
{
"positive": ["secret"]
}
},
{
"QID": "12",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Shadow.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Shadow.fbx');",
"responses":
{
"positive": ["shadow"]
}
},
{
"QID": "13",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Silence.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Silence.fbx');",
"responses":
{
"positive": ["silence", "lance", "quiet"]
}
},
{
"QID": "14",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Stairs.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Stairs.fbx');",
"responses":
{
"positive": ["stairs", "steps", "stair", "stairwell", "there's", "stairway"]
}
},
{
"QID": "15",
"init": "npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/Riddle_Umbrella.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Riddle_Umbrella.fbx');",
"responses":
{
"positive": ["umbrella"]
}
},
{
"QID": "Finished",
"init": "Script.clearTimeout(heartbeatTimeout);heartbeatTimeout = false;npcRespondBlocking('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/ScratchDialogue/ConclusionRight_02.wav', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/ConclusionRight_02.fbx', function(){runOnClient('MyAvatar.goToLocation({x: 5, y: -29, z: -63}, true, true);');setQid('Restarting');});",
"positive": "",
"negative": "",
"unknown": ""
},
{
"QID": "Restarting",
"init": "npcRespondBlocking('', 'https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/reversedSphinx.fbx', function(){Avatar.startAnimation('https://storage.googleapis.com/limitlessserv-144100.appspot.com/hifi%20assets/Animation/Hifi_Sphinx_Anim_Entrance_Kneel_Combined_with_Intro.fbx', 0);_qid='';});",
"positive": "",
"negative": "",
"unknown": ""
}
]
}