mirror of
https://github.com/lubosz/overte.git
synced 2025-04-09 05:03:52 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into proto-received-message
This commit is contained in:
commit
dac491ef41
144 changed files with 4750 additions and 1640 deletions
|
@ -202,6 +202,8 @@ if (NOT ANDROID)
|
|||
set_target_properties(ice-server PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(interface)
|
||||
set_target_properties(interface PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(stack-manager)
|
||||
set_target_properties(stack-manager PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(tests)
|
||||
add_subdirectory(plugins)
|
||||
add_subdirectory(tools)
|
||||
|
|
|
@ -10,5 +10,4 @@ link_hifi_libraries(
|
|||
)
|
||||
|
||||
include_application_version()
|
||||
|
||||
copy_dlls_beside_windows_executable()
|
||||
package_libraries_for_deployment()
|
||||
|
|
|
@ -17,11 +17,13 @@
|
|||
#include <QtNetwork/QNetworkReply>
|
||||
|
||||
#include <AvatarHashMap.h>
|
||||
#include <AssetClient.h>
|
||||
#include <MessagesClient.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
#include <NodeList.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <ResourceCache.h>
|
||||
#include <ScriptCache.h>
|
||||
#include <SoundCache.h>
|
||||
#include <UUID.h>
|
||||
|
||||
|
@ -50,6 +52,14 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
{
|
||||
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
|
||||
|
||||
auto assetClient = DependencyManager::set<AssetClient>();
|
||||
|
||||
QThread* assetThread = new QThread;
|
||||
assetThread->setObjectName("Asset Thread");
|
||||
assetClient->moveToThread(assetThread);
|
||||
connect(assetThread, &QThread::started, assetClient.data(), &AssetClient::init);
|
||||
assetThread->start();
|
||||
|
||||
DependencyManager::set<ResourceCacheSharedItems>();
|
||||
DependencyManager::set<SoundCache>();
|
||||
DependencyManager::set<recording::Deck>();
|
||||
|
@ -116,6 +126,11 @@ void Agent::handleAudioPacket(QSharedPointer<ReceivedMessage> message) {
|
|||
const QString AGENT_LOGGING_NAME = "agent";
|
||||
|
||||
void Agent::run() {
|
||||
|
||||
// make sure we request our script once the agent connects to the domain
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
connect(&nodeList->getDomainHandler(), &DomainHandler::connectedToDomain, this, &Agent::requestScript);
|
||||
|
||||
ThreadedAssignment::commonInit(AGENT_LOGGING_NAME, NodeType::Agent);
|
||||
|
||||
// Setup MessagesClient
|
||||
|
@ -125,72 +140,98 @@ void Agent::run() {
|
|||
messagesClient->moveToThread(messagesThread);
|
||||
connect(messagesThread, &QThread::started, messagesClient.data(), &MessagesClient::init);
|
||||
messagesThread->start();
|
||||
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet({
|
||||
NodeType::AudioMixer, NodeType::AvatarMixer, NodeType::EntityServer, NodeType::MessagesMixer, NodeType::AssetServer
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
void Agent::requestScript() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet(NodeSet()
|
||||
<< NodeType::AudioMixer
|
||||
<< NodeType::AvatarMixer
|
||||
<< NodeType::EntityServer
|
||||
<< NodeType::MessagesMixer
|
||||
);
|
||||
|
||||
disconnect(&nodeList->getDomainHandler(), &DomainHandler::connectedToDomain, this, &Agent::requestScript);
|
||||
|
||||
// figure out the URL for the script for this agent assignment
|
||||
QUrl scriptURL;
|
||||
if (_payload.isEmpty()) {
|
||||
scriptURL = QUrl(QString("http://%1:%2/assignment/%3")
|
||||
.arg(DependencyManager::get<NodeList>()->getDomainHandler().getIP().toString())
|
||||
.arg(DOMAIN_SERVER_HTTP_PORT)
|
||||
.arg(uuidStringWithoutCurlyBraces(_uuid)));
|
||||
scriptURL = QUrl(QString("http://%1:%2/assignment/%3/")
|
||||
.arg(nodeList->getDomainHandler().getIP().toString())
|
||||
.arg(DOMAIN_SERVER_HTTP_PORT)
|
||||
.arg(uuidStringWithoutCurlyBraces(nodeList->getSessionUUID())));
|
||||
} else {
|
||||
scriptURL = QUrl(_payload);
|
||||
}
|
||||
|
||||
|
||||
// setup a network access manager and
|
||||
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
|
||||
QNetworkRequest networkRequest = QNetworkRequest(scriptURL);
|
||||
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||
QNetworkReply* reply = networkAccessManager.get(networkRequest);
|
||||
|
||||
|
||||
QNetworkDiskCache* cache = new QNetworkDiskCache();
|
||||
QString cachePath = QStandardPaths::writableLocation(QStandardPaths::DataLocation);
|
||||
cache->setCacheDirectory(!cachePath.isEmpty() ? cachePath : "agentCache");
|
||||
networkAccessManager.setCache(cache);
|
||||
|
||||
|
||||
QNetworkRequest networkRequest = QNetworkRequest(scriptURL);
|
||||
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||
|
||||
// setup a timeout for script request
|
||||
static const int SCRIPT_TIMEOUT_MS = 10000;
|
||||
_scriptRequestTimeout = new QTimer(this);
|
||||
connect(_scriptRequestTimeout, &QTimer::timeout, this, &Agent::scriptRequestFinished);
|
||||
_scriptRequestTimeout->start(SCRIPT_TIMEOUT_MS);
|
||||
|
||||
qDebug() << "Downloading script at" << scriptURL.toString();
|
||||
QNetworkReply* reply = networkAccessManager.get(networkRequest);
|
||||
connect(reply, &QNetworkReply::finished, this, &Agent::scriptRequestFinished);
|
||||
}
|
||||
|
||||
QEventLoop loop;
|
||||
QObject::connect(reply, SIGNAL(finished()), &loop, SLOT(quit()));
|
||||
void Agent::scriptRequestFinished() {
|
||||
auto reply = qobject_cast<QNetworkReply*>(sender());
|
||||
|
||||
loop.exec();
|
||||
_scriptRequestTimeout->stop();
|
||||
|
||||
if (reply && reply->error() == QNetworkReply::NoError) {
|
||||
_scriptContents = reply->readAll();
|
||||
qDebug() << "Downloaded script:" << _scriptContents;
|
||||
|
||||
// we could just call executeScript directly - we use a QueuedConnection to allow scriptRequestFinished
|
||||
// to return before calling executeScript
|
||||
QMetaObject::invokeMethod(this, "executeScript", Qt::QueuedConnection);
|
||||
} else {
|
||||
if (reply) {
|
||||
qDebug() << "Failed to download script at" << reply->url().toString() << " - bailing on assignment.";
|
||||
qDebug() << "QNetworkReply error was" << reply->errorString();
|
||||
} else {
|
||||
qDebug() << "Failed to download script - request timed out. Bailing on assignment.";
|
||||
}
|
||||
|
||||
setFinished(true);
|
||||
}
|
||||
|
||||
reply->deleteLater();
|
||||
}
|
||||
|
||||
QString scriptContents(reply->readAll());
|
||||
delete reply;
|
||||
|
||||
qDebug() << "Downloaded script:" << scriptContents;
|
||||
|
||||
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(scriptContents, _payload));
|
||||
void Agent::executeScript() {
|
||||
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(_scriptContents, _payload));
|
||||
_scriptEngine->setParent(this); // be the parent of the script engine so it gets moved when we do
|
||||
|
||||
|
||||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
connect(_scriptEngine.get(), SIGNAL(update(float)), scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
|
||||
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar->setFaceModelURL(QUrl());
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
// give this AvatarData object to the script engine
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
|
||||
|
||||
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
// FIXME how to deal with driving multiple avatars locally?
|
||||
// FIXME how to deal with driving multiple avatars locally?
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this, scriptedAvatar](Frame::ConstPointer frame) {
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::AUDIO_FRAME_NAME);
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
|
||||
|
@ -201,32 +242,30 @@ void Agent::run() {
|
|||
audioTransform.setRotation(scriptedAvatar->getOrientation());
|
||||
AbstractAudioInterface::emitAudioPacket(audio.data(), audio.size(), audioSequenceNumber, audioTransform, PacketType::MicrophoneAudioNoEcho);
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
|
||||
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerListener(PacketType::BulkAvatarData, avatarHashMap.data(), "processAvatarDataPacket");
|
||||
packetReceiver.registerListener(PacketType::KillAvatar, avatarHashMap.data(), "processKillAvatar");
|
||||
packetReceiver.registerListener(PacketType::AvatarIdentity, avatarHashMap.data(), "processAvatarIdentityPacket");
|
||||
packetReceiver.registerListener(PacketType::AvatarBillboard, avatarHashMap.data(), "processAvatarBillboardPacket");
|
||||
|
||||
|
||||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", this);
|
||||
|
||||
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
|
||||
|
||||
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
|
||||
// viewers would need this called.
|
||||
//_scriptEngine->init(); // must be done before we set up the viewers
|
||||
|
||||
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCache>().data());
|
||||
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
|
||||
|
||||
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
// we need to make sure that init has been called for our EntityScriptingInterface
|
||||
|
@ -237,18 +276,23 @@ void Agent::run() {
|
|||
_entityViewer.init();
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
|
||||
// wire up our additional agent related processing to the update signal
|
||||
QObject::connect(_scriptEngine.get(), &ScriptEngine::update, this, &Agent::processAgentAvatarAndAudio);
|
||||
|
||||
|
||||
_scriptEngine->run();
|
||||
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
|
||||
setFinished(true);
|
||||
}
|
||||
|
||||
QUuid Agent::getSessionUUID() const {
|
||||
return DependencyManager::get<NodeList>()->getSessionUUID();
|
||||
}
|
||||
|
||||
|
||||
void Agent::setIsAvatar(bool isAvatar) {
|
||||
_isAvatar = isAvatar;
|
||||
|
||||
|
@ -413,4 +457,10 @@ void Agent::aboutToFinish() {
|
|||
|
||||
// our entity tree is going to go away so tell that to the EntityScriptingInterface
|
||||
DependencyManager::get<EntityScriptingInterface>()->setEntityTree(NULL);
|
||||
|
||||
// cleanup the AssetClient thread
|
||||
QThread* assetThread = DependencyManager::get<AssetClient>()->thread();
|
||||
DependencyManager::destroy<AssetClient>();
|
||||
assetThread->quit();
|
||||
assetThread->wait();
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
#include <QtScript/QScriptEngine>
|
||||
#include <QtCore/QObject>
|
||||
#include <QtCore/QUrl>
|
||||
#include <QUuid>
|
||||
|
||||
#include <EntityEditPacketSender.h>
|
||||
#include <EntityTree.h>
|
||||
|
@ -35,6 +36,8 @@ class Agent : public ThreadedAssignment {
|
|||
Q_PROPERTY(bool isPlayingAvatarSound READ isPlayingAvatarSound)
|
||||
Q_PROPERTY(bool isListeningToAudioStream READ isListeningToAudioStream WRITE setIsListeningToAudioStream)
|
||||
Q_PROPERTY(float lastReceivedAudioLoudness READ getLastReceivedAudioLoudness)
|
||||
Q_PROPERTY(QUuid sessionUUID READ getSessionUUID)
|
||||
|
||||
public:
|
||||
Agent(ReceivedMessage& message);
|
||||
|
||||
|
@ -47,6 +50,7 @@ public:
|
|||
void setIsListeningToAudioStream(bool isListeningToAudioStream) { _isListeningToAudioStream = isListeningToAudioStream; }
|
||||
|
||||
float getLastReceivedAudioLoudness() const { return _lastReceivedAudioLoudness; }
|
||||
QUuid getSessionUUID() const;
|
||||
|
||||
virtual void aboutToFinish();
|
||||
|
||||
|
@ -55,9 +59,14 @@ public slots:
|
|||
void playAvatarSound(Sound* avatarSound) { setAvatarSound(avatarSound); }
|
||||
|
||||
private slots:
|
||||
void requestScript();
|
||||
void scriptRequestFinished();
|
||||
void executeScript();
|
||||
|
||||
void handleAudioPacket(QSharedPointer<ReceivedMessage> packet);
|
||||
void handleOctreePacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer senderNode);
|
||||
void handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
|
||||
void processAgentAvatarAndAudio(float deltaTime);
|
||||
|
||||
private:
|
||||
|
@ -73,6 +82,8 @@ private:
|
|||
void sendAvatarIdentityPacket();
|
||||
void sendAvatarBillboardPacket();
|
||||
|
||||
QString _scriptContents;
|
||||
QTimer* _scriptRequestTimeout { nullptr };
|
||||
bool _isListeningToAudioStream = false;
|
||||
Sound* _avatarSound = nullptr;
|
||||
int _numAvatarSoundSentBytes = 0;
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <memory>
|
||||
#include <signal.h>
|
||||
|
||||
#include <AddressManager.h>
|
||||
|
@ -227,8 +228,9 @@ void AssignmentClientMonitor::handleChildStatusPacket(QSharedPointer<ReceivedMes
|
|||
matchingNode = DependencyManager::get<LimitedNodeList>()->addOrUpdateNode
|
||||
(senderID, NodeType::Unassigned, senderSockAddr, senderSockAddr, false, false);
|
||||
|
||||
childData = new AssignmentClientChildData(Assignment::Type::AllTypes);
|
||||
matchingNode->setLinkedData(childData);
|
||||
auto childData = std::unique_ptr<AssignmentClientChildData>
|
||||
{ new AssignmentClientChildData(Assignment::Type::AllTypes) };
|
||||
matchingNode->setLinkedData(std::move(childData));
|
||||
} else {
|
||||
// tell unknown assignment-client child to exit.
|
||||
qDebug() << "Asking unknown child at" << senderSockAddr << "to exit.";
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <math.h>
|
||||
#include <memory>
|
||||
#include <signal.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
@ -661,7 +662,7 @@ void AudioMixer::domainSettingsRequestComplete() {
|
|||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [](Node* node) {
|
||||
node->setLinkedData(new AudioMixerClientData());
|
||||
node->setLinkedData(std::unique_ptr<AudioMixerClientData> { new AudioMixerClientData });
|
||||
};
|
||||
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
|
||||
#include <cfloat>
|
||||
#include <random>
|
||||
#include <memory>
|
||||
|
||||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QDateTime>
|
||||
|
@ -536,7 +537,7 @@ void AvatarMixer::domainSettingsRequestComplete() {
|
|||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [] (Node* node) {
|
||||
node->setLinkedData(new AvatarMixerClientData());
|
||||
node->setLinkedData(std::unique_ptr<AvatarMixerClientData> { new AvatarMixerClientData });
|
||||
};
|
||||
|
||||
// parse the settings to pull out the values we need
|
||||
|
|
|
@ -46,8 +46,8 @@ void EntityServer::handleEntityPacket(QSharedPointer<ReceivedMessage> message, S
|
|||
}
|
||||
}
|
||||
|
||||
OctreeQueryNode* EntityServer::createOctreeQueryNode() {
|
||||
return new EntityNodeData();
|
||||
std::unique_ptr<OctreeQueryNode> EntityServer::createOctreeQueryNode() {
|
||||
return std::unique_ptr<OctreeQueryNode> { new EntityNodeData() };
|
||||
}
|
||||
|
||||
OctreePointer EntityServer::createTree() {
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
#include "../octree/OctreeServer.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "EntityItem.h"
|
||||
#include "EntityServerConsts.h"
|
||||
#include "EntityTree.h"
|
||||
|
@ -26,7 +28,7 @@ public:
|
|||
~EntityServer();
|
||||
|
||||
// Subclasses must implement these methods
|
||||
virtual OctreeQueryNode* createOctreeQueryNode() override ;
|
||||
virtual std::unique_ptr<OctreeQueryNode> createOctreeQueryNode() override ;
|
||||
virtual char getMyNodeType() const override { return NodeType::EntityServer; }
|
||||
virtual PacketType getMyQueryMessageType() const override { return PacketType::EntityQuery; }
|
||||
virtual const char* getMyServerName() const override { return MODEL_SERVER_NAME; }
|
||||
|
|
|
@ -12,123 +12,76 @@
|
|||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QBuffer>
|
||||
|
||||
#include <LogHandler.h>
|
||||
#include <MessagesClient.h>
|
||||
#include <NodeList.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
|
||||
#include "MessagesMixer.h"
|
||||
|
||||
const QString MESSAGES_MIXER_LOGGING_NAME = "messages-mixer";
|
||||
|
||||
MessagesMixer::MessagesMixer(ReceivedMessage& message) :
|
||||
ThreadedAssignment(message)
|
||||
MessagesMixer::MessagesMixer(ReceivedMessage& message) : ThreadedAssignment(message)
|
||||
{
|
||||
// make sure we hear about node kills so we can tell the other nodes
|
||||
connect(DependencyManager::get<NodeList>().data(), &NodeList::nodeKilled, this, &MessagesMixer::nodeKilled);
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerListener(PacketType::MessagesData, this, "handleMessages");
|
||||
packetReceiver.registerListener(PacketType::MessagesSubscribe, this, "handleMessagesSubscribe");
|
||||
packetReceiver.registerListener(PacketType::MessagesUnsubscribe, this, "handleMessagesUnsubscribe");
|
||||
}
|
||||
|
||||
MessagesMixer::~MessagesMixer() {
|
||||
}
|
||||
|
||||
void MessagesMixer::nodeKilled(SharedNodePointer killedNode) {
|
||||
for (auto& channel : _channelSubscribers) {
|
||||
channel.remove(killedNode->getUUID());
|
||||
}
|
||||
}
|
||||
|
||||
void MessagesMixer::handleMessages(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
Q_ASSERT(message->getType() == PacketType::MessagesData);
|
||||
void MessagesMixer::handleMessages(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode) {
|
||||
QString channel, message;
|
||||
QUuid senderID;
|
||||
MessagesClient::decodeMessagesPacket(receivedMessage, channel, message, senderID);
|
||||
|
||||
QByteArray packetData = message->getMessage();
|
||||
QBuffer packet{ &packetData };
|
||||
packet.open(QIODevice::ReadOnly);
|
||||
|
||||
quint16 channelLength;
|
||||
packet.read(reinterpret_cast<char*>(&channelLength), sizeof(channelLength));
|
||||
auto channelData = packet.read(channelLength);
|
||||
QString channel = QString::fromUtf8(channelData);
|
||||
|
||||
quint16 messageLength;
|
||||
packet.read(reinterpret_cast<char*>(&messageLength), sizeof(messageLength));
|
||||
auto messageData = packet.read(messageLength);
|
||||
QString messageRaw = QString::fromUtf8(messageData);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
|
||||
return node->getType() == NodeType::Agent && node->getActiveSocket() &&
|
||||
_channelSubscribers[channel].contains(node->getUUID());
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
|
||||
auto packetList = NLPacketList::create(PacketType::MessagesData, QByteArray(), true, true);
|
||||
|
||||
auto channelUtf8 = channel.toUtf8();
|
||||
quint16 channelLength = channelUtf8.length();
|
||||
packetList->writePrimitive(channelLength);
|
||||
packetList->write(channelUtf8);
|
||||
|
||||
auto messageUtf8 = messageRaw.toUtf8();
|
||||
quint16 messageLength = messageUtf8.length();
|
||||
packetList->writePrimitive(messageLength);
|
||||
packetList->write(messageUtf8);
|
||||
|
||||
auto packetList = MessagesClient::encodeMessagesPacket(channel, message, senderID);
|
||||
nodeList->sendPacketList(std::move(packetList), *node);
|
||||
});
|
||||
}
|
||||
|
||||
void MessagesMixer::handleMessagesSubscribe(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
Q_ASSERT(message->getType() == PacketType::MessagesSubscribe);
|
||||
QString channel = QString::fromUtf8(message->getMessage());
|
||||
qDebug() << "Node [" << senderNode->getUUID() << "] subscribed to channel:" << channel;
|
||||
_channelSubscribers[channel] << senderNode->getUUID();
|
||||
}
|
||||
|
||||
void MessagesMixer::handleMessagesUnsubscribe(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
Q_ASSERT(message->getType() == PacketType::MessagesUnsubscribe);
|
||||
QString channel = QString::fromUtf8(message->getMessage());
|
||||
qDebug() << "Node [" << senderNode->getUUID() << "] unsubscribed from channel:" << channel;
|
||||
|
||||
if (_channelSubscribers.contains(channel)) {
|
||||
_channelSubscribers[channel].remove(senderNode->getUUID());
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME - make these stats relevant
|
||||
void MessagesMixer::sendStatsPacket() {
|
||||
QJsonObject statsObject;
|
||||
QJsonObject messagesObject;
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
QJsonObject statsObject, messagesMixerObject;
|
||||
|
||||
// add stats for each listerner
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
QJsonObject messagesStats;
|
||||
|
||||
// add the key to ask the domain-server for a username replacement, if it has it
|
||||
messagesStats[USERNAME_UUID_REPLACEMENT_STATS_KEY] = uuidStringWithoutCurlyBraces(node->getUUID());
|
||||
messagesStats["outbound_kbps"] = node->getOutboundBandwidth();
|
||||
messagesStats["inbound_kbps"] = node->getInboundBandwidth();
|
||||
|
||||
messagesObject[uuidStringWithoutCurlyBraces(node->getUUID())] = messagesStats;
|
||||
DependencyManager::get<NodeList>()->eachNode([&](const SharedNodePointer& node) {
|
||||
QJsonObject clientStats;
|
||||
clientStats[USERNAME_UUID_REPLACEMENT_STATS_KEY] = uuidStringWithoutCurlyBraces(node->getUUID());
|
||||
clientStats["outbound_kbps"] = node->getOutboundBandwidth();
|
||||
clientStats["inbound_kbps"] = node->getInboundBandwidth();
|
||||
messagesMixerObject[uuidStringWithoutCurlyBraces(node->getUUID())] = clientStats;
|
||||
});
|
||||
|
||||
statsObject["messages"] = messagesObject;
|
||||
statsObject["messages"] = messagesMixerObject;
|
||||
ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject);
|
||||
}
|
||||
|
||||
void MessagesMixer::run() {
|
||||
ThreadedAssignment::commonInit(MESSAGES_MIXER_LOGGING_NAME, NodeType::MessagesMixer);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
// The messages-mixer currently does currently have any domain settings. If it did, they would be
|
||||
// synchronously grabbed here.
|
||||
}
|
||||
DependencyManager::get<NodeList>()->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
}
|
|
@ -22,7 +22,6 @@ class MessagesMixer : public ThreadedAssignment {
|
|||
Q_OBJECT
|
||||
public:
|
||||
MessagesMixer(ReceivedMessage& message);
|
||||
~MessagesMixer();
|
||||
|
||||
public slots:
|
||||
void run();
|
||||
|
|
|
@ -1113,9 +1113,9 @@ void OctreeServer::domainSettingsRequestComplete() {
|
|||
#endif
|
||||
|
||||
nodeList->linkedDataCreateCallback = [] (Node* node) {
|
||||
OctreeQueryNode* newQueryNodeData = _instance->createOctreeQueryNode();
|
||||
newQueryNodeData->init();
|
||||
node->setLinkedData(newQueryNodeData);
|
||||
auto queryNodeData = _instance->createOctreeQueryNode();
|
||||
queryNodeData->init();
|
||||
node->setLinkedData(std::move(queryNodeData));
|
||||
};
|
||||
|
||||
srand((unsigned)time(0));
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
#ifndef hifi_OctreeServer_h
|
||||
#define hifi_OctreeServer_h
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QStringList>
|
||||
#include <QDateTime>
|
||||
#include <QtCore/QCoreApplication>
|
||||
|
@ -64,7 +66,7 @@ public:
|
|||
QByteArray getPersistFileContents() const { return (_persistThread) ? _persistThread->getPersistFileContents() : QByteArray(); }
|
||||
|
||||
// Subclasses must implement these methods
|
||||
virtual OctreeQueryNode* createOctreeQueryNode() = 0;
|
||||
virtual std::unique_ptr<OctreeQueryNode> createOctreeQueryNode() = 0;
|
||||
virtual char getMyNodeType() const = 0;
|
||||
virtual PacketType getMyQueryMessageType() const = 0;
|
||||
virtual const char* getMyServerName() const = 0;
|
||||
|
|
43
cmake/externals/quazip/CMakeLists.txt
vendored
Normal file
43
cmake/externals/quazip/CMakeLists.txt
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
set(EXTERNAL_NAME quazip)
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
cmake_policy(SET CMP0046 OLD)
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://s3-us-west-1.amazonaws.com/hifi-production/dependencies/quazip-0.6.2.zip
|
||||
URL_MD5 514851970f1a14d815bdc3ad6267af4d
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DCMAKE_PREFIX_PATH=$ENV{QT_CMAKE_PREFIX_PATH} -DCMAKE_INSTALL_NAME_DIR:PATH=<INSTALL_DIR>/lib -DZLIB_ROOT=${ZLIB_ROOT}
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
add_dependencies(quazip zlib)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES
|
||||
FOLDER "hidden/externals"
|
||||
INSTALL_NAME_DIR ${INSTALL_DIR}/lib
|
||||
BUILD_WITH_INSTALL_RPATH True)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include CACHE PATH "List of QuaZip include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${${EXTERNAL_NAME_UPPER}_INCLUDE_DIR} CACHE PATH "List of QuaZip include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${INSTALL_DIR}/lib CACHE FILEPATH "Location of QuaZip DLL")
|
||||
|
||||
if (APPLE)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
|
||||
elseif (WIN32)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/quazip.lib CACHE FILEPATH "Location of QuaZip release library")
|
||||
else ()
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip.so CACHE FILEPATH "Location of QuaZip release library")
|
||||
endif ()
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(${EXTERNAL_NAME_UPPER})
|
||||
|
||||
# Force selected libraries into the cache
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY ${${EXTERNAL_NAME_UPPER}_LIBRARY} CACHE FILEPATH "Location of QuaZip libraries")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Location of QuaZip libraries")
|
15
cmake/externals/zlib/CMakeLists.txt
vendored
15
cmake/externals/zlib/CMakeLists.txt
vendored
|
@ -4,19 +4,20 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
|||
include(ExternalProject)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://zlib.net/zlib128.zip
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
${EXTERNAL_NAME}
|
||||
URL http://zlib.net/zlib128.zip
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
set(${EXTERNAL_NAME_UPPER}_ROOT ${INSTALL_DIR} CACHE PATH "Path for Zlib install root")
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include CACHE PATH "List of zlib include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${${EXTERNAL_NAME_UPPER}_INCLUDE_DIR} CACHE PATH "List of zlib include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${INSTALL_DIR}/bin CACHE FILEPATH "Location of ZLib DLL")
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
macro(COPY_DLLS_BESIDE_WINDOWS_EXECUTABLE)
|
||||
macro(PACKAGE_LIBRARIES_FOR_DEPLOYMENT)
|
||||
|
||||
if (WIN32)
|
||||
configure_file(
|
||||
|
@ -18,11 +18,7 @@ macro(COPY_DLLS_BESIDE_WINDOWS_EXECUTABLE)
|
|||
@ONLY
|
||||
)
|
||||
|
||||
if (APPLE)
|
||||
set(PLUGIN_PATH "interface.app/Contents/MacOS/plugins")
|
||||
else()
|
||||
set(PLUGIN_PATH "plugins")
|
||||
endif()
|
||||
set(PLUGIN_PATH "plugins")
|
||||
|
||||
# add a post-build command to copy DLLs beside the executable
|
||||
add_custom_command(
|
||||
|
@ -46,5 +42,18 @@ macro(COPY_DLLS_BESIDE_WINDOWS_EXECUTABLE)
|
|||
POST_BUILD
|
||||
COMMAND CMD /C "SET PATH=%PATH%;${QT_DIR}/bin && ${WINDEPLOYQT_COMMAND} $<$<OR:$<CONFIG:Release>,$<CONFIG:MinSizeRel>,$<CONFIG:RelWithDebInfo>>:--release> $<TARGET_FILE:${TARGET_NAME}>"
|
||||
)
|
||||
elseif (DEFINED BUILD_BUNDLE AND BUILD_BUNDLE AND APPLE)
|
||||
find_program(MACDEPLOYQT_COMMAND macdeployqt PATHS ${QT_DIR}/bin NO_DEFAULT_PATH)
|
||||
|
||||
if (NOT MACDEPLOYQT_COMMAND)
|
||||
message(FATAL_ERROR "Could not find macdeployqt at ${QT_DIR}/bin. macdeployqt is required.")
|
||||
endif ()
|
||||
|
||||
# add a post-build command to call macdeployqt to copy Qt plugins
|
||||
add_custom_command(
|
||||
TARGET ${TARGET_NAME}
|
||||
POST_BUILD
|
||||
COMMAND ${MACDEPLOYQT_COMMAND} ${CMAKE_CURRENT_BINARY_DIR}/\${CONFIGURATION}/${TARGET_NAME}.app -verbose 0
|
||||
)
|
||||
endif ()
|
||||
endmacro()
|
||||
endmacro()
|
||||
|
|
|
@ -22,8 +22,11 @@ macro(SETUP_HIFI_PROJECT)
|
|||
endif ()
|
||||
endforeach()
|
||||
|
||||
# add the executable, include additional optional sources
|
||||
add_executable(${TARGET_NAME} ${TARGET_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
||||
if (DEFINED BUILD_BUNDLE AND BUILD_BUNDLE AND APPLE)
|
||||
add_executable(${TARGET_NAME} MACOSX_BUNDLE ${TARGET_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
||||
else ()
|
||||
add_executable(${TARGET_NAME} ${TARGET_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
||||
endif()
|
||||
|
||||
set(${TARGET_NAME}_DEPENDENCY_QT_MODULES ${ARGN})
|
||||
list(APPEND ${TARGET_NAME}_DEPENDENCY_QT_MODULES Core)
|
||||
|
|
16
cmake/macros/TargetQuazip.cmake
Normal file
16
cmake/macros/TargetQuazip.cmake
Normal file
|
@ -0,0 +1,16 @@
|
|||
#
|
||||
# Copyright 2015 High Fidelity, Inc.
|
||||
# Created by Leonardo Murillo on 2015/11/20
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
macro(TARGET_QUAZIP)
|
||||
add_dependency_external_projects(quazip)
|
||||
find_package(QuaZip REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${QUAZIP_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${QUAZIP_LIBRARIES})
|
||||
if (WIN32)
|
||||
add_paths_to_fixup_libs(${QUAZIP_DLL_PATH})
|
||||
endif ()
|
||||
endmacro()
|
29
cmake/modules/FindQuaZip.cmake
Normal file
29
cmake/modules/FindQuaZip.cmake
Normal file
|
@ -0,0 +1,29 @@
|
|||
#
|
||||
# FindQuaZip.h
|
||||
# StackManagerQt/cmake/modules
|
||||
#
|
||||
# Created by Mohammed Nafees.
|
||||
# Copyright (c) 2014 High Fidelity. All rights reserved.
|
||||
#
|
||||
|
||||
# QUAZIP_FOUND - QuaZip library was found
|
||||
# QUAZIP_INCLUDE_DIR - Path to QuaZip include dir
|
||||
# QUAZIP_INCLUDE_DIRS - Path to QuaZip and zlib include dir (combined from QUAZIP_INCLUDE_DIR + ZLIB_INCLUDE_DIR)
|
||||
# QUAZIP_LIBRARIES - List of QuaZip libraries
|
||||
# QUAZIP_ZLIB_INCLUDE_DIR - The include dir of zlib headers
|
||||
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("quazip")
|
||||
|
||||
if (WIN32)
|
||||
find_path(QUAZIP_INCLUDE_DIRS quazip.h PATH_SUFFIXES include/quazip HINTS ${QUAZIP_SEARCH_DIRS})
|
||||
elseif (APPLE)
|
||||
find_path(QUAZIP_INCLUDE_DIRS quazip.h PATH_SUFFIXES include/quazip HINTS ${QUAZIP_SEARCH_DIRS})
|
||||
else ()
|
||||
find_path(QUAZIP_INCLUDE_DIRS quazip.h PATH_SUFFIXES quazip HINTS ${QUAZIP_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(QUAZIP DEFAULT_MSG QUAZIP_INCLUDE_DIRS)
|
||||
|
||||
mark_as_advanced(QUAZIP_INCLUDE_DIRS QUAZIP_SEARCH_DIRS)
|
|
@ -37,4 +37,4 @@ if (UNIX)
|
|||
endif (UNIX)
|
||||
|
||||
include_application_version()
|
||||
copy_dlls_beside_windows_executable()
|
||||
package_libraries_for_deployment()
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
|
||||
#include "DomainServer.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QDir>
|
||||
#include <QJsonDocument>
|
||||
#include <QJsonObject>
|
||||
|
@ -1097,29 +1099,37 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
|
||||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation
|
||||
&& assignmentRegex.indexIn(url.path()) != -1) {
|
||||
QUuid matchingUUID = QUuid(assignmentRegex.cap(1));
|
||||
|
||||
SharedAssignmentPointer matchingAssignment = _allAssignments.value(matchingUUID);
|
||||
if (!matchingAssignment) {
|
||||
// check if we have a pending assignment that matches this temp UUID, and it is a scripted assignment
|
||||
QUuid assignmentUUID = _gatekeeper.assignmentUUIDForPendingAssignment(matchingUUID);
|
||||
if (!assignmentUUID.isNull()) {
|
||||
matchingAssignment = _allAssignments.value(assignmentUUID);
|
||||
|
||||
if (matchingAssignment && matchingAssignment->getType() == Assignment::AgentType) {
|
||||
// we have a matching assignment and it is for the right type, have the HTTP manager handle it
|
||||
// via correct URL for the script so the client can download
|
||||
|
||||
QUrl scriptURL = url;
|
||||
scriptURL.setPath(URI_ASSIGNMENT + "/scripts/"
|
||||
+ uuidStringWithoutCurlyBraces(assignmentUUID));
|
||||
|
||||
// have the HTTPManager serve the appropriate script file
|
||||
return _httpManager.handleHTTPRequest(connection, scriptURL, true);
|
||||
}
|
||||
}
|
||||
QUuid nodeUUID = QUuid(assignmentRegex.cap(1));
|
||||
|
||||
auto matchingNode = nodeList->nodeWithUUID(nodeUUID);
|
||||
|
||||
// don't handle if we don't have a matching node
|
||||
if (!matchingNode) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
auto nodeData = dynamic_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
|
||||
|
||||
// don't handle if we don't have node data for this node
|
||||
if (!nodeData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
SharedAssignmentPointer matchingAssignment = _allAssignments.value(nodeData->getAssignmentUUID());
|
||||
|
||||
// check if we have an assignment that matches this temp UUID, and it is a scripted assignment
|
||||
if (matchingAssignment && matchingAssignment->getType() == Assignment::AgentType) {
|
||||
// we have a matching assignment and it is for the right type, have the HTTP manager handle it
|
||||
// via correct URL for the script so the client can download
|
||||
|
||||
QUrl scriptURL = url;
|
||||
scriptURL.setPath(URI_ASSIGNMENT + "/scripts/"
|
||||
+ uuidStringWithoutCurlyBraces(matchingAssignment->getUUID()));
|
||||
|
||||
// have the HTTPManager serve the appropriate script file
|
||||
return _httpManager.handleHTTPRequest(connection, scriptURL, true);
|
||||
}
|
||||
|
||||
// request not handled
|
||||
return false;
|
||||
}
|
||||
|
@ -1640,7 +1650,7 @@ void DomainServer::refreshStaticAssignmentAndAddToQueue(SharedAssignmentPointer&
|
|||
|
||||
void DomainServer::nodeAdded(SharedNodePointer node) {
|
||||
// we don't use updateNodeWithData, so add the DomainServerNodeData to the node here
|
||||
node->setLinkedData(new DomainServerNodeData());
|
||||
node->setLinkedData(std::unique_ptr<DomainServerNodeData> { new DomainServerNodeData() });
|
||||
}
|
||||
|
||||
void DomainServer::nodeKilled(SharedNodePointer node) {
|
||||
|
@ -1835,14 +1845,25 @@ void DomainServer::processNodeDisconnectRequestPacket(QSharedPointer<ReceivedMes
|
|||
|
||||
qDebug() << "Received a disconnect request from node with UUID" << nodeUUID;
|
||||
|
||||
if (limitedNodeList->killNodeWithUUID(nodeUUID)) {
|
||||
// we want to check what type this node was before going to kill it so that we can avoid sending the RemovedNode
|
||||
// packet to nodes that don't care about this type
|
||||
auto nodeToKill = limitedNodeList->nodeWithUUID(nodeUUID);
|
||||
|
||||
if (nodeToKill) {
|
||||
auto nodeType = nodeToKill->getType();
|
||||
limitedNodeList->killNodeWithUUID(nodeUUID);
|
||||
|
||||
static auto removedNodePacket = NLPacket::create(PacketType::DomainServerRemovedNode, NUM_BYTES_RFC4122_UUID);
|
||||
|
||||
removedNodePacket->reset();
|
||||
removedNodePacket->write(nodeUUID.toRfc4122());
|
||||
|
||||
// broadcast out the DomainServerRemovedNode message
|
||||
limitedNodeList->eachNode([&limitedNodeList](const SharedNodePointer& otherNode){
|
||||
limitedNodeList->eachMatchingNode([&nodeType](const SharedNodePointer& otherNode) -> bool {
|
||||
// only send the removed node packet to nodes that care about the type of node this was
|
||||
auto nodeLinkedData = dynamic_cast<DomainServerNodeData*>(otherNode->getLinkedData());
|
||||
return (nodeLinkedData != nullptr) && nodeLinkedData->getNodeInterestSet().contains(nodeType);
|
||||
}, [&limitedNodeList](const SharedNodePointer& otherNode){
|
||||
limitedNodeList->sendUnreliablePacket(*removedNodePacket, *otherNode);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -271,7 +271,7 @@ function loadBirds(howMany) {
|
|||
dimensions: { x: BIRD_SIZE, y: BIRD_SIZE, z: BIRD_SIZE },
|
||||
gravity: { x: 0, y: BIRD_GRAVITY, z: 0 },
|
||||
velocity: { x: 0, y: -0.1, z: 0 },
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
damping: LINEAR_DAMPING,
|
||||
collisionsWillMove: true,
|
||||
lifetime: STARTING_LIFETIME,
|
||||
color: colors[whichBird]
|
||||
|
|
|
@ -1,673 +0,0 @@
|
|||
//
|
||||
// bot_procedural.js
|
||||
// hifi
|
||||
//
|
||||
// Created by Ben Arnold on 7/29/2013
|
||||
//
|
||||
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
||||
//
|
||||
// This is an example script that demonstrates an NPC avatar.
|
||||
//
|
||||
//
|
||||
|
||||
//For procedural walk animation
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
Script.include(HIFI_PUBLIC_BUCKET + "scripts/acScripts/proceduralAnimationAPI.js");
|
||||
|
||||
var procAnimAPI = new ProcAnimAPI();
|
||||
|
||||
function getRandomFloat(min, max) {
|
||||
return Math.random() * (max - min) + min;
|
||||
}
|
||||
|
||||
function getRandomInt (min, max) {
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||
}
|
||||
|
||||
function printVector(string, vector) {
|
||||
print(string + " " + vector.x + ", " + vector.y + ", " + vector.z);
|
||||
}
|
||||
|
||||
var CHANCE_OF_MOVING = 0.005;
|
||||
var CHANCE_OF_SOUND = 0.005;
|
||||
var CHANCE_OF_HEAD_TURNING = 0.01;
|
||||
var CHANCE_OF_BIG_MOVE = 1.0;
|
||||
|
||||
var isMoving = false;
|
||||
var isTurningHead = false;
|
||||
var isPlayingAudio = false;
|
||||
|
||||
var X_MIN = 0.50;
|
||||
var X_MAX = 15.60;
|
||||
var Z_MIN = 0.50;
|
||||
var Z_MAX = 15.10;
|
||||
var Y_FEET = 0.0;
|
||||
var AVATAR_PELVIS_HEIGHT = 0.84;
|
||||
var Y_PELVIS = Y_FEET + AVATAR_PELVIS_HEIGHT;
|
||||
var MAX_PELVIS_DELTA = 2.5;
|
||||
|
||||
var MOVE_RANGE_SMALL = 3.0;
|
||||
var MOVE_RANGE_BIG = 10.0;
|
||||
var TURN_RANGE = 70.0;
|
||||
var STOP_TOLERANCE = 0.05;
|
||||
var MOVE_RATE = 0.05;
|
||||
var TURN_RATE = 0.2;
|
||||
var HEAD_TURN_RATE = 0.05;
|
||||
var PITCH_RANGE = 15.0;
|
||||
var YAW_RANGE = 35.0;
|
||||
|
||||
var firstPosition = { x: getRandomFloat(X_MIN, X_MAX), y: Y_PELVIS, z: getRandomFloat(Z_MIN, Z_MAX) };
|
||||
var targetPosition = { x: 0, y: 0, z: 0 };
|
||||
var targetOrientation = { x: 0, y: 0, z: 0, w: 0 };
|
||||
var currentOrientation = { x: 0, y: 0, z: 0, w: 0 };
|
||||
var targetHeadPitch = 0.0;
|
||||
var targetHeadYaw = 0.0;
|
||||
|
||||
var basePelvisHeight = 0.0;
|
||||
var pelvisOscillatorPosition = 0.0;
|
||||
var pelvisOscillatorVelocity = 0.0;
|
||||
|
||||
function clamp(val, min, max){
|
||||
return Math.max(min, Math.min(max, val))
|
||||
}
|
||||
|
||||
//Array of all valid bot numbers
|
||||
var validBotNumbers = [];
|
||||
|
||||
// right now we only use bot 63, since many other bots have messed up skeletons and LOD issues
|
||||
var botNumber = 63;//getRandomInt(0, 99);
|
||||
|
||||
var newFaceFilePrefix = "ron";
|
||||
|
||||
var newBodyFilePrefix = "bot" + botNumber;
|
||||
|
||||
// set the face model fst using the bot number
|
||||
// there is no need to change the body model - we're using the default
|
||||
Avatar.faceModelURL = HIFI_PUBLIC_BUCKET + "meshes/" + newFaceFilePrefix + ".fst";
|
||||
Avatar.skeletonModelURL = HIFI_PUBLIC_BUCKET + "meshes/" + newBodyFilePrefix + "_a.fst";
|
||||
Avatar.billboardURL = HIFI_PUBLIC_BUCKET + "meshes/billboards/bot" + botNumber + ".png";
|
||||
|
||||
Agent.isAvatar = true;
|
||||
Agent.isListeningToAudioStream = true;
|
||||
|
||||
// change the avatar's position to the random one
|
||||
Avatar.position = firstPosition;
|
||||
basePelvisHeight = firstPosition.y;
|
||||
printVector("New dancer, position = ", Avatar.position);
|
||||
|
||||
function loadSounds() {
|
||||
var sound_filenames = ["AB1.raw", "Anchorman2.raw", "B1.raw", "B1.raw", "Bale1.raw", "Bandcamp.raw",
|
||||
"Big1.raw", "Big2.raw", "Brian1.raw", "Buster1.raw", "CES1.raw", "CES2.raw", "CES3.raw", "CES4.raw",
|
||||
"Carrie1.raw", "Carrie3.raw", "Charlotte1.raw", "EN1.raw", "EN2.raw", "EN3.raw", "Eugene1.raw", "Francesco1.raw",
|
||||
"Italian1.raw", "Japanese1.raw", "Leigh1.raw", "Lucille1.raw", "Lucille2.raw", "MeanGirls.raw", "Murray2.raw",
|
||||
"Nigel1.raw", "PennyLane.raw", "Pitt1.raw", "Ricardo.raw", "SN.raw", "Sake1.raw", "Samantha1.raw", "Samantha2.raw",
|
||||
"Spicoli1.raw", "Supernatural.raw", "Swearengen1.raw", "TheDude.raw", "Tony.raw", "Triumph1.raw", "Uma1.raw",
|
||||
"Walken1.raw", "Walken2.raw", "Z1.raw", "Z2.raw"
|
||||
];
|
||||
|
||||
var footstep_filenames = ["FootstepW2Left-12db.wav", "FootstepW2Right-12db.wav", "FootstepW3Left-12db.wav", "FootstepW3Right-12db.wav",
|
||||
"FootstepW5Left-12db.wav", "FootstepW5Right-12db.wav"];
|
||||
|
||||
var SOUND_BASE_URL = HIFI_PUBLIC_BUCKET + "sounds/Cocktail+Party+Snippets/Raws/";
|
||||
|
||||
var FOOTSTEP_BASE_URL = HIFI_PUBLIC_BUCKET + "sounds/Footsteps/";
|
||||
|
||||
for (var i = 0; i < sound_filenames.length; i++) {
|
||||
sounds.push(SoundCache.getSound(SOUND_BASE_URL + sound_filenames[i]));
|
||||
}
|
||||
|
||||
for (var i = 0; i < footstep_filenames.length; i++) {
|
||||
footstepSounds.push(SoundCache.getSound(FOOTSTEP_BASE_URL + footstep_filenames[i]));
|
||||
}
|
||||
}
|
||||
|
||||
var sounds = [];
|
||||
var footstepSounds = [];
|
||||
loadSounds();
|
||||
|
||||
|
||||
function playRandomSound() {
|
||||
if (!Agent.isPlayingAvatarSound) {
|
||||
var whichSound = Math.floor((Math.random() * sounds.length));
|
||||
Agent.playAvatarSound(sounds[whichSound]);
|
||||
}
|
||||
}
|
||||
|
||||
function playRandomFootstepSound() {
|
||||
var whichSound = Math.floor((Math.random() * footstepSounds.length));
|
||||
Audio.playSound(footstepSounds[whichSound], {
|
||||
position: Avatar.position,
|
||||
volume: 1.0
|
||||
});
|
||||
}
|
||||
|
||||
// ************************************ Facial Animation **********************************
|
||||
var allBlendShapes = [];
|
||||
var targetBlendCoefficient = [];
|
||||
var currentBlendCoefficient = [];
|
||||
|
||||
//Blendshape constructor
|
||||
function addBlendshapeToPose(pose, shapeIndex, val) {
|
||||
var index = pose.blendShapes.length;
|
||||
pose.blendShapes[index] = {shapeIndex: shapeIndex, val: val };
|
||||
}
|
||||
//The mood of the avatar, determines face. 0 = happy, 1 = angry, 2 = sad.
|
||||
|
||||
//Randomly pick avatar mood. 80% happy, 10% mad 10% sad
|
||||
var randMood = Math.floor(Math.random() * 11);
|
||||
var avatarMood;
|
||||
if (randMood == 0) {
|
||||
avatarMood = 1;
|
||||
} else if (randMood == 2) {
|
||||
avatarMood = 2;
|
||||
} else {
|
||||
avatarMood = 0;
|
||||
}
|
||||
|
||||
var currentExpression = -1;
|
||||
//Face pose constructor
|
||||
var happyPoses = [];
|
||||
|
||||
happyPoses[0] = {blendShapes: []};
|
||||
addBlendshapeToPose(happyPoses[0], 28, 0.7); //MouthSmile_L
|
||||
addBlendshapeToPose(happyPoses[0], 29, 0.7); //MouthSmile_R
|
||||
|
||||
happyPoses[1] = {blendShapes: []};
|
||||
addBlendshapeToPose(happyPoses[1], 28, 1.0); //MouthSmile_L
|
||||
addBlendshapeToPose(happyPoses[1], 29, 1.0); //MouthSmile_R
|
||||
addBlendshapeToPose(happyPoses[1], 21, 0.2); //JawOpen
|
||||
|
||||
happyPoses[2] = {blendShapes: []};
|
||||
addBlendshapeToPose(happyPoses[2], 28, 1.0); //MouthSmile_L
|
||||
addBlendshapeToPose(happyPoses[2], 29, 1.0); //MouthSmile_R
|
||||
addBlendshapeToPose(happyPoses[2], 21, 0.5); //JawOpen
|
||||
addBlendshapeToPose(happyPoses[2], 46, 1.0); //CheekSquint_L
|
||||
addBlendshapeToPose(happyPoses[2], 47, 1.0); //CheekSquint_R
|
||||
addBlendshapeToPose(happyPoses[2], 17, 1.0); //BrowsU_L
|
||||
addBlendshapeToPose(happyPoses[2], 18, 1.0); //BrowsU_R
|
||||
|
||||
var angryPoses = [];
|
||||
|
||||
angryPoses[0] = {blendShapes: []};
|
||||
addBlendshapeToPose(angryPoses[0], 26, 0.6); //MouthFrown_L
|
||||
addBlendshapeToPose(angryPoses[0], 27, 0.6); //MouthFrown_R
|
||||
addBlendshapeToPose(angryPoses[0], 14, 0.6); //BrowsD_L
|
||||
addBlendshapeToPose(angryPoses[0], 15, 0.6); //BrowsD_R
|
||||
|
||||
angryPoses[1] = {blendShapes: []};
|
||||
addBlendshapeToPose(angryPoses[1], 26, 0.9); //MouthFrown_L
|
||||
addBlendshapeToPose(angryPoses[1], 27, 0.9); //MouthFrown_R
|
||||
addBlendshapeToPose(angryPoses[1], 14, 0.9); //BrowsD_L
|
||||
addBlendshapeToPose(angryPoses[1], 15, 0.9); //BrowsD_R
|
||||
|
||||
angryPoses[2] = {blendShapes: []};
|
||||
addBlendshapeToPose(angryPoses[2], 26, 1.0); //MouthFrown_L
|
||||
addBlendshapeToPose(angryPoses[2], 27, 1.0); //MouthFrown_R
|
||||
addBlendshapeToPose(angryPoses[2], 14, 1.0); //BrowsD_L
|
||||
addBlendshapeToPose(angryPoses[2], 15, 1.0); //BrowsD_R
|
||||
addBlendshapeToPose(angryPoses[2], 21, 0.5); //JawOpen
|
||||
addBlendshapeToPose(angryPoses[2], 46, 1.0); //CheekSquint_L
|
||||
addBlendshapeToPose(angryPoses[2], 47, 1.0); //CheekSquint_R
|
||||
|
||||
var sadPoses = [];
|
||||
|
||||
sadPoses[0] = {blendShapes: []};
|
||||
addBlendshapeToPose(sadPoses[0], 26, 0.6); //MouthFrown_L
|
||||
addBlendshapeToPose(sadPoses[0], 27, 0.6); //MouthFrown_R
|
||||
addBlendshapeToPose(sadPoses[0], 16, 0.2); //BrowsU_C
|
||||
addBlendshapeToPose(sadPoses[0], 2, 0.6); //EyeSquint_L
|
||||
addBlendshapeToPose(sadPoses[0], 3, 0.6); //EyeSquint_R
|
||||
|
||||
sadPoses[1] = {blendShapes: []};
|
||||
addBlendshapeToPose(sadPoses[1], 26, 0.9); //MouthFrown_L
|
||||
addBlendshapeToPose(sadPoses[1], 27, 0.9); //MouthFrown_R
|
||||
addBlendshapeToPose(sadPoses[1], 16, 0.6); //BrowsU_C
|
||||
addBlendshapeToPose(sadPoses[1], 2, 0.9); //EyeSquint_L
|
||||
addBlendshapeToPose(sadPoses[1], 3, 0.9); //EyeSquint_R
|
||||
|
||||
sadPoses[2] = {blendShapes: []};
|
||||
addBlendshapeToPose(sadPoses[2], 26, 1.0); //MouthFrown_L
|
||||
addBlendshapeToPose(sadPoses[2], 27, 1.0); //MouthFrown_R
|
||||
addBlendshapeToPose(sadPoses[2], 16, 0.1); //BrowsU_C
|
||||
addBlendshapeToPose(sadPoses[2], 2, 1.0); //EyeSquint_L
|
||||
addBlendshapeToPose(sadPoses[2], 3, 1.0); //EyeSquint_R
|
||||
addBlendshapeToPose(sadPoses[2], 21, 0.3); //JawOpen
|
||||
|
||||
var facePoses = [];
|
||||
facePoses[0] = happyPoses;
|
||||
facePoses[1] = angryPoses;
|
||||
facePoses[2] = sadPoses;
|
||||
|
||||
|
||||
function addBlendShape(s) {
|
||||
allBlendShapes[allBlendShapes.length] = s;
|
||||
}
|
||||
|
||||
//It is imperative that the following blendshapes are all present and are in the correct order
|
||||
addBlendShape("EyeBlink_L"); //0
|
||||
addBlendShape("EyeBlink_R"); //1
|
||||
addBlendShape("EyeSquint_L"); //2
|
||||
addBlendShape("EyeSquint_R"); //3
|
||||
addBlendShape("EyeDown_L"); //4
|
||||
addBlendShape("EyeDown_R"); //5
|
||||
addBlendShape("EyeIn_L"); //6
|
||||
addBlendShape("EyeIn_R"); //7
|
||||
addBlendShape("EyeOpen_L"); //8
|
||||
addBlendShape("EyeOpen_R"); //9
|
||||
addBlendShape("EyeOut_L"); //10
|
||||
addBlendShape("EyeOut_R"); //11
|
||||
addBlendShape("EyeUp_L"); //12
|
||||
addBlendShape("EyeUp_R"); //13
|
||||
addBlendShape("BrowsD_L"); //14
|
||||
addBlendShape("BrowsD_R"); //15
|
||||
addBlendShape("BrowsU_C"); //16
|
||||
addBlendShape("BrowsU_L"); //17
|
||||
addBlendShape("BrowsU_R"); //18
|
||||
addBlendShape("JawFwd"); //19
|
||||
addBlendShape("JawLeft"); //20
|
||||
addBlendShape("JawOpen"); //21
|
||||
addBlendShape("JawChew"); //22
|
||||
addBlendShape("JawRight"); //23
|
||||
addBlendShape("MouthLeft"); //24
|
||||
addBlendShape("MouthRight"); //25
|
||||
addBlendShape("MouthFrown_L"); //26
|
||||
addBlendShape("MouthFrown_R"); //27
|
||||
addBlendShape("MouthSmile_L"); //28
|
||||
addBlendShape("MouthSmile_R"); //29
|
||||
addBlendShape("MouthDimple_L"); //30
|
||||
addBlendShape("MouthDimple_R"); //31
|
||||
addBlendShape("LipsStretch_L"); //32
|
||||
addBlendShape("LipsStretch_R"); //33
|
||||
addBlendShape("LipsUpperClose"); //34
|
||||
addBlendShape("LipsLowerClose"); //35
|
||||
addBlendShape("LipsUpperUp"); //36
|
||||
addBlendShape("LipsLowerDown"); //37
|
||||
addBlendShape("LipsUpperOpen"); //38
|
||||
addBlendShape("LipsLowerOpen"); //39
|
||||
addBlendShape("LipsFunnel"); //40
|
||||
addBlendShape("LipsPucker"); //41
|
||||
addBlendShape("ChinLowerRaise"); //42
|
||||
addBlendShape("ChinUpperRaise"); //43
|
||||
addBlendShape("Sneer"); //44
|
||||
addBlendShape("Puff"); //45
|
||||
addBlendShape("CheekSquint_L"); //46
|
||||
addBlendShape("CheekSquint_R"); //47
|
||||
|
||||
for (var i = 0; i < allBlendShapes.length; i++) {
|
||||
targetBlendCoefficient[i] = 0;
|
||||
currentBlendCoefficient[i] = 0;
|
||||
}
|
||||
|
||||
function setRandomExpression() {
|
||||
|
||||
//Clear all expression data for current expression
|
||||
if (currentExpression != -1) {
|
||||
var expression = facePoses[avatarMood][currentExpression];
|
||||
for (var i = 0; i < expression.blendShapes.length; i++) {
|
||||
targetBlendCoefficient[expression.blendShapes[i].shapeIndex] = 0.0;
|
||||
}
|
||||
}
|
||||
//Get a new current expression
|
||||
currentExpression = Math.floor(Math.random() * facePoses[avatarMood].length);
|
||||
var expression = facePoses[avatarMood][currentExpression];
|
||||
for (var i = 0; i < expression.blendShapes.length; i++) {
|
||||
targetBlendCoefficient[expression.blendShapes[i].shapeIndex] = expression.blendShapes[i].val;
|
||||
}
|
||||
}
|
||||
|
||||
var expressionChangeSpeed = 0.1;
|
||||
function updateBlendShapes(deltaTime) {
|
||||
|
||||
for (var i = 0; i < allBlendShapes.length; i++) {
|
||||
currentBlendCoefficient[i] += (targetBlendCoefficient[i] - currentBlendCoefficient[i]) * expressionChangeSpeed;
|
||||
Avatar.setBlendshape(allBlendShapes[i], currentBlendCoefficient[i]);
|
||||
}
|
||||
}
|
||||
|
||||
var BLINK_SPEED = 0.15;
|
||||
var CHANCE_TO_BLINK = 0.0025;
|
||||
var MAX_BLINK = 0.85;
|
||||
var blink = 0.0;
|
||||
var isBlinking = false;
|
||||
function updateBlinking(deltaTime) {
|
||||
if (isBlinking == false) {
|
||||
if (Math.random() < CHANCE_TO_BLINK) {
|
||||
isBlinking = true;
|
||||
} else {
|
||||
blink -= BLINK_SPEED;
|
||||
if (blink < 0.0) blink = 0.0;
|
||||
}
|
||||
} else {
|
||||
blink += BLINK_SPEED;
|
||||
if (blink > MAX_BLINK) {
|
||||
blink = MAX_BLINK;
|
||||
isBlinking = false;
|
||||
}
|
||||
}
|
||||
|
||||
currentBlendCoefficient[0] = blink;
|
||||
currentBlendCoefficient[1] = blink;
|
||||
targetBlendCoefficient[0] = blink;
|
||||
targetBlendCoefficient[1] = blink;
|
||||
}
|
||||
|
||||
// *************************************************************************************
|
||||
|
||||
//Procedural walk animation using two keyframes
|
||||
//We use a separate array for front and back joints
|
||||
//Pitch, yaw, and roll for the joints
|
||||
var rightAngles = [];
|
||||
var leftAngles = [];
|
||||
//for non mirrored joints such as the spine
|
||||
var middleAngles = [];
|
||||
|
||||
//Actual joint mappings
|
||||
var SHOULDER_JOINT_NUMBER = 15;
|
||||
var ELBOW_JOINT_NUMBER = 16;
|
||||
var JOINT_R_HIP = 1;
|
||||
var JOINT_R_KNEE = 2;
|
||||
var JOINT_L_HIP = 6;
|
||||
var JOINT_L_KNEE = 7;
|
||||
var JOINT_R_ARM = 15;
|
||||
var JOINT_R_FOREARM = 16;
|
||||
var JOINT_L_ARM = 39;
|
||||
var JOINT_L_FOREARM = 40;
|
||||
var JOINT_SPINE = 11;
|
||||
var JOINT_R_FOOT = 3;
|
||||
var JOINT_L_FOOT = 8;
|
||||
var JOINT_R_TOE = 4;
|
||||
var JOINT_L_TOE = 9;
|
||||
|
||||
// ******************************* Animation Is Defined Below *************************************
|
||||
|
||||
var NUM_FRAMES = 2;
|
||||
for (var i = 0; i < NUM_FRAMES; i++) {
|
||||
rightAngles[i] = [];
|
||||
leftAngles[i] = [];
|
||||
middleAngles[i] = [];
|
||||
}
|
||||
//Joint order for actual joint mappings, should be interleaved R,L,R,L,...S,S,S for R = right, L = left, S = single
|
||||
var JOINT_ORDER = [];
|
||||
//*** right / left joints ***
|
||||
var HIP = 0;
|
||||
JOINT_ORDER.push(JOINT_R_HIP);
|
||||
JOINT_ORDER.push(JOINT_L_HIP);
|
||||
var KNEE = 1;
|
||||
JOINT_ORDER.push(JOINT_R_KNEE);
|
||||
JOINT_ORDER.push(JOINT_L_KNEE);
|
||||
var ARM = 2;
|
||||
JOINT_ORDER.push(JOINT_R_ARM);
|
||||
JOINT_ORDER.push(JOINT_L_ARM);
|
||||
var FOREARM = 3;
|
||||
JOINT_ORDER.push(JOINT_R_FOREARM);
|
||||
JOINT_ORDER.push(JOINT_L_FOREARM);
|
||||
var FOOT = 4;
|
||||
JOINT_ORDER.push(JOINT_R_FOOT);
|
||||
JOINT_ORDER.push(JOINT_L_FOOT);
|
||||
var TOE = 5;
|
||||
JOINT_ORDER.push(JOINT_R_TOE);
|
||||
JOINT_ORDER.push(JOINT_L_TOE);
|
||||
//*** middle joints ***
|
||||
var SPINE = 0;
|
||||
JOINT_ORDER.push(JOINT_SPINE);
|
||||
|
||||
//We have to store the angles so we can invert yaw and roll when making the animation
|
||||
//symmetrical
|
||||
|
||||
//Front refers to leg, not arm.
|
||||
//Legs Extending
|
||||
rightAngles[0][HIP] = [30.0, 0.0, 8.0];
|
||||
rightAngles[0][KNEE] = [-15.0, 0.0, 0.0];
|
||||
rightAngles[0][ARM] = [85.0, -25.0, 0.0];
|
||||
rightAngles[0][FOREARM] = [0.0, 0.0, -15.0];
|
||||
rightAngles[0][FOOT] = [0.0, 0.0, 0.0];
|
||||
rightAngles[0][TOE] = [0.0, 0.0, 0.0];
|
||||
|
||||
leftAngles[0][HIP] = [-15, 0.0, 8.0];
|
||||
leftAngles[0][KNEE] = [-26, 0.0, 0.0];
|
||||
leftAngles[0][ARM] = [85.0, 20.0, 0.0];
|
||||
leftAngles[0][FOREARM] = [10.0, 0.0, -25.0];
|
||||
leftAngles[0][FOOT] = [-13.0, 0.0, 0.0];
|
||||
leftAngles[0][TOE] = [34.0, 0.0, 0.0];
|
||||
|
||||
middleAngles[0][SPINE] = [0.0, -15.0, 5.0];
|
||||
|
||||
//Legs Passing
|
||||
rightAngles[1][HIP] = [6.0, 0.0, 8.0];
|
||||
rightAngles[1][KNEE] = [-12.0, 0.0, 0.0];
|
||||
rightAngles[1][ARM] = [85.0, 0.0, 0.0];
|
||||
rightAngles[1][FOREARM] = [0.0, 0.0, -15.0];
|
||||
rightAngles[1][FOOT] = [6.0, -8.0, 0.0];
|
||||
rightAngles[1][TOE] = [0.0, 0.0, 0.0];
|
||||
|
||||
leftAngles[1][HIP] = [10.0, 0.0, 8.0];
|
||||
leftAngles[1][KNEE] = [-60.0, 0.0, 0.0];
|
||||
leftAngles[1][ARM] = [85.0, 0.0, 0.0];
|
||||
leftAngles[1][FOREARM] = [0.0, 0.0, -15.0];
|
||||
leftAngles[1][FOOT] = [0.0, 0.0, 0.0];
|
||||
leftAngles[1][TOE] = [0.0, 0.0, 0.0];
|
||||
|
||||
middleAngles[1][SPINE] = [0.0, 0.0, 0.0];
|
||||
|
||||
//Actual keyframes for the animation
|
||||
var walkKeyFrames = procAnimAPI.generateKeyframes(rightAngles, leftAngles, middleAngles, NUM_FRAMES);
|
||||
|
||||
// ******************************* Animation Is Defined Above *************************************
|
||||
|
||||
// ********************************** Standing Key Frame ******************************************
|
||||
//We don't have to do any mirroring or anything, since this is just a single pose.
|
||||
var rightQuats = [];
|
||||
var leftQuats = [];
|
||||
var middleQuats = [];
|
||||
|
||||
rightQuats[HIP] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 7.0);
|
||||
rightQuats[KNEE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
|
||||
rightQuats[ARM] = Quat.fromPitchYawRollDegrees(85.0, 0.0, 0.0);
|
||||
rightQuats[FOREARM] = Quat.fromPitchYawRollDegrees(0.0, 0.0, -10.0);
|
||||
rightQuats[FOOT] = Quat.fromPitchYawRollDegrees(0.0, -8.0, 0.0);
|
||||
rightQuats[TOE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
|
||||
|
||||
leftQuats[HIP] = Quat.fromPitchYawRollDegrees(0, 0.0, -7.0);
|
||||
leftQuats[KNEE] = Quat.fromPitchYawRollDegrees(0, 0.0, 0.0);
|
||||
leftQuats[ARM] = Quat.fromPitchYawRollDegrees(85.0, 0.0, 0.0);
|
||||
leftQuats[FOREARM] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 10.0);
|
||||
leftQuats[FOOT] = Quat.fromPitchYawRollDegrees(0.0, 8.0, 0.0);
|
||||
leftQuats[TOE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
|
||||
|
||||
middleQuats[SPINE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
|
||||
|
||||
var standingKeyFrame = new procAnimAPI.KeyFrame(rightQuats, leftQuats, middleQuats);
|
||||
|
||||
// ************************************************************************************************
|
||||
|
||||
|
||||
var currentFrame = 0;
|
||||
|
||||
var walkTime = 0.0;
|
||||
|
||||
var walkWheelRadius = 0.5;
|
||||
var walkWheelRate = 2.0 * 3.141592 * walkWheelRadius / 8.0;
|
||||
|
||||
var avatarAcceleration = 0.75;
|
||||
var avatarVelocity = 0.0;
|
||||
var avatarMaxVelocity = 1.4;
|
||||
|
||||
function handleAnimation(deltaTime) {
|
||||
|
||||
updateBlinking(deltaTime);
|
||||
updateBlendShapes(deltaTime);
|
||||
|
||||
if (Math.random() < 0.01) {
|
||||
setRandomExpression();
|
||||
}
|
||||
|
||||
if (avatarVelocity == 0.0) {
|
||||
walkTime = 0.0;
|
||||
currentFrame = 0;
|
||||
} else {
|
||||
walkTime += avatarVelocity * deltaTime;
|
||||
if (walkTime > walkWheelRate) {
|
||||
walkTime = 0.0;
|
||||
currentFrame++;
|
||||
if (currentFrame % 2 == 1) {
|
||||
playRandomFootstepSound();
|
||||
}
|
||||
if (currentFrame > 3) {
|
||||
currentFrame = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var frame = walkKeyFrames[currentFrame];
|
||||
|
||||
var walkInterp = walkTime / walkWheelRate;
|
||||
var animInterp = avatarVelocity / (avatarMaxVelocity / 1.3);
|
||||
if (animInterp > 1.0) animInterp = 1.0;
|
||||
|
||||
for (var i = 0; i < JOINT_ORDER.length; i++) {
|
||||
var walkJoint = procAnimAPI.deCasteljau(frame.rotations[i], frame.nextFrame.rotations[i], frame.controlPoints[i][0], frame.controlPoints[i][1], walkInterp);
|
||||
var standJoint = standingKeyFrame.rotations[i];
|
||||
var finalJoint = Quat.mix(standJoint, walkJoint, animInterp);
|
||||
Avatar.setJointData(JOINT_ORDER[i], finalJoint);
|
||||
}
|
||||
}
|
||||
|
||||
function jumpWithLoudness(deltaTime) {
|
||||
// potentially change pelvis height depending on trailing average loudness
|
||||
|
||||
pelvisOscillatorVelocity += deltaTime * Agent.lastReceivedAudioLoudness * 700.0 ;
|
||||
|
||||
pelvisOscillatorVelocity -= pelvisOscillatorPosition * 0.75;
|
||||
pelvisOscillatorVelocity *= 0.97;
|
||||
pelvisOscillatorPosition += deltaTime * pelvisOscillatorVelocity;
|
||||
Avatar.headPitch = pelvisOscillatorPosition * 60.0;
|
||||
|
||||
var pelvisPosition = Avatar.position;
|
||||
pelvisPosition.y = (Y_PELVIS - 0.35) + pelvisOscillatorPosition;
|
||||
|
||||
if (pelvisPosition.y < Y_PELVIS) {
|
||||
pelvisPosition.y = Y_PELVIS;
|
||||
} else if (pelvisPosition.y > Y_PELVIS + 1.0) {
|
||||
pelvisPosition.y = Y_PELVIS + 1.0;
|
||||
}
|
||||
|
||||
Avatar.position = pelvisPosition;
|
||||
}
|
||||
|
||||
var forcedMove = false;
|
||||
|
||||
var wasMovingLastFrame = false;
|
||||
|
||||
function handleHeadTurn() {
|
||||
if (!isTurningHead && (Math.random() < CHANCE_OF_HEAD_TURNING)) {
|
||||
targetHeadPitch = getRandomFloat(-PITCH_RANGE, PITCH_RANGE);
|
||||
targetHeadYaw = getRandomFloat(-YAW_RANGE, YAW_RANGE);
|
||||
isTurningHead = true;
|
||||
} else {
|
||||
Avatar.headPitch = Avatar.headPitch + (targetHeadPitch - Avatar.headPitch) * HEAD_TURN_RATE;
|
||||
Avatar.headYaw = Avatar.headYaw + (targetHeadYaw - Avatar.headYaw) * HEAD_TURN_RATE;
|
||||
if (Math.abs(Avatar.headPitch - targetHeadPitch) < STOP_TOLERANCE &&
|
||||
Math.abs(Avatar.headYaw - targetHeadYaw) < STOP_TOLERANCE) {
|
||||
isTurningHead = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function stopWalking() {
|
||||
avatarVelocity = 0.0;
|
||||
isMoving = false;
|
||||
}
|
||||
|
||||
var MAX_ATTEMPTS = 40;
|
||||
function handleWalking(deltaTime) {
|
||||
|
||||
if (forcedMove || (!isMoving && Math.random() < CHANCE_OF_MOVING)) {
|
||||
// Set new target location
|
||||
|
||||
var moveRange;
|
||||
if (Math.random() < CHANCE_OF_BIG_MOVE) {
|
||||
moveRange = MOVE_RANGE_BIG;
|
||||
} else {
|
||||
moveRange = MOVE_RANGE_SMALL;
|
||||
}
|
||||
|
||||
//Keep trying new orientations if the desired target location is out of bounds
|
||||
var attempts = 0;
|
||||
do {
|
||||
targetOrientation = Quat.multiply(Avatar.orientation, Quat.angleAxis(getRandomFloat(-TURN_RANGE, TURN_RANGE), { x:0, y:1, z:0 }));
|
||||
var front = Quat.getFront(targetOrientation);
|
||||
|
||||
targetPosition = Vec3.sum(Avatar.position, Vec3.multiply(front, getRandomFloat(0.0, moveRange)));
|
||||
}
|
||||
while ((targetPosition.x < X_MIN || targetPosition.x > X_MAX || targetPosition.z < Z_MIN || targetPosition.z > Z_MAX)
|
||||
&& attempts < MAX_ATTEMPTS);
|
||||
|
||||
targetPosition.x = clamp(targetPosition.x, X_MIN, X_MAX);
|
||||
targetPosition.z = clamp(targetPosition.z, Z_MIN, Z_MAX);
|
||||
targetPosition.y = Y_PELVIS;
|
||||
|
||||
wasMovingLastFrame = true;
|
||||
isMoving = true;
|
||||
forcedMove = false;
|
||||
} else if (isMoving) {
|
||||
|
||||
var targetVector = Vec3.subtract(targetPosition, Avatar.position);
|
||||
var distance = Vec3.length(targetVector);
|
||||
if (distance <= avatarVelocity * deltaTime) {
|
||||
Avatar.position = targetPosition;
|
||||
stopWalking();
|
||||
} else {
|
||||
var direction = Vec3.normalize(targetVector);
|
||||
//Figure out if we should be slowing down
|
||||
var t = avatarVelocity / avatarAcceleration;
|
||||
var d = (avatarVelocity / 2.0) * t;
|
||||
if (distance < d) {
|
||||
avatarVelocity -= avatarAcceleration * deltaTime;
|
||||
if (avatarVelocity <= 0) {
|
||||
stopWalking();
|
||||
}
|
||||
} else {
|
||||
avatarVelocity += avatarAcceleration * deltaTime;
|
||||
if (avatarVelocity > avatarMaxVelocity) avatarVelocity = avatarMaxVelocity;
|
||||
}
|
||||
Avatar.position = Vec3.sum(Avatar.position, Vec3.multiply(direction, avatarVelocity * deltaTime));
|
||||
Avatar.orientation = Quat.mix(Avatar.orientation, targetOrientation, TURN_RATE);
|
||||
|
||||
wasMovingLastFrame = true;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleTalking() {
|
||||
if (Math.random() < CHANCE_OF_SOUND) {
|
||||
playRandomSound();
|
||||
}
|
||||
}
|
||||
|
||||
function changePelvisHeight(newHeight) {
|
||||
var newPosition = Avatar.position;
|
||||
newPosition.y = newHeight;
|
||||
Avatar.position = newPosition;
|
||||
}
|
||||
|
||||
function updateBehavior(deltaTime) {
|
||||
|
||||
if (AvatarList.containsAvatarWithDisplayName("mrdj")) {
|
||||
if (wasMovingLastFrame) {
|
||||
isMoving = false;
|
||||
}
|
||||
|
||||
// we have a DJ, shouldn't we be dancing?
|
||||
jumpWithLoudness(deltaTime);
|
||||
} else {
|
||||
|
||||
// no DJ, let's just chill on the dancefloor - randomly walking and talking
|
||||
handleHeadTurn();
|
||||
handleAnimation(deltaTime);
|
||||
handleWalking(deltaTime);
|
||||
handleTalking();
|
||||
}
|
||||
}
|
||||
|
||||
Script.update.connect(updateBehavior);
|
164
examples/acScripts/playbackAgents.js
Normal file
164
examples/acScripts/playbackAgents.js
Normal file
|
@ -0,0 +1,164 @@
|
|||
//
|
||||
// playbackAgents.js
|
||||
// acScripts
|
||||
//
|
||||
// Created by Edgar Pironti on 11/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set the following variables to the values needed
|
||||
var commandChannel = "com.highfidelity.PlaybackChannel1";
|
||||
var clip_url = null;
|
||||
var playFromCurrentLocation = true;
|
||||
var useDisplayName = true;
|
||||
var useAttachments = true;
|
||||
var useAvatarModel = true;
|
||||
|
||||
// ID of the agent. Two agents can't have the same ID.
|
||||
var announceIDChannel = "com.highfidelity.playbackAgent.announceID";
|
||||
var UNKNOWN_AGENT_ID = -2;
|
||||
var id = UNKNOWN_AGENT_ID; // unknown until aknowledged
|
||||
|
||||
// Set position/orientation/scale here if playFromCurrentLocation is true
|
||||
Avatar.position = { x:0, y: 0, z: 0 };
|
||||
Avatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
|
||||
Avatar.scale = 1.0;
|
||||
|
||||
var totalTime = 0;
|
||||
var subscribed = false;
|
||||
var WAIT_FOR_AUDIO_MIXER = 1;
|
||||
|
||||
// Script. DO NOT MODIFY BEYOND THIS LINE.
|
||||
var DO_NOTHING = 0;
|
||||
var PLAY = 1;
|
||||
var PLAY_LOOP = 2;
|
||||
var STOP = 3;
|
||||
var SHOW = 4;
|
||||
var HIDE = 5;
|
||||
var LOAD = 6;
|
||||
|
||||
Recording.setPlayFromCurrentLocation(playFromCurrentLocation);
|
||||
Recording.setPlayerUseDisplayName(useDisplayName);
|
||||
Recording.setPlayerUseAttachments(useAttachments);
|
||||
Recording.setPlayerUseHeadModel(false);
|
||||
Recording.setPlayerUseSkeletonModel(useAvatarModel);
|
||||
|
||||
function getAction(channel, message, senderID) {
|
||||
if(subscribed) {
|
||||
var command = JSON.parse(message);
|
||||
print("I'm the agent " + id + " and I received this: ID: " + command.id_key + " Action: " + command.action_key + " URL: " + command.clip_url_key);
|
||||
|
||||
if (command.id_key == id || command.id_key == -1) {
|
||||
if (command.action_key === 6) {
|
||||
clip_url = command.clip_url_key;
|
||||
}
|
||||
|
||||
action = command.action_key;
|
||||
print("That command was for me!");
|
||||
print("My clip is: " + clip_url);
|
||||
} else {
|
||||
action = DO_NOTHING;
|
||||
}
|
||||
|
||||
switch(action) {
|
||||
case PLAY:
|
||||
print("Play");
|
||||
if (!Agent.isAvatar) {
|
||||
Agent.isAvatar = true;
|
||||
}
|
||||
if (!Recording.isPlaying()) {
|
||||
Recording.startPlaying();
|
||||
}
|
||||
Recording.setPlayerLoop(false);
|
||||
break;
|
||||
case PLAY_LOOP:
|
||||
print("Play loop");
|
||||
if (!Agent.isAvatar) {
|
||||
Agent.isAvatar = true;
|
||||
}
|
||||
if (!Recording.isPlaying()) {
|
||||
Recording.startPlaying();
|
||||
}
|
||||
Recording.setPlayerLoop(true);
|
||||
break;
|
||||
case STOP:
|
||||
print("Stop");
|
||||
if (Recording.isPlaying()) {
|
||||
Recording.stopPlaying();
|
||||
}
|
||||
break;
|
||||
case SHOW:
|
||||
print("Show");
|
||||
if (!Agent.isAvatar) {
|
||||
Agent.isAvatar = true;
|
||||
}
|
||||
break;
|
||||
case HIDE:
|
||||
print("Hide");
|
||||
if (Recording.isPlaying()) {
|
||||
Recording.stopPlaying();
|
||||
}
|
||||
Agent.isAvatar = false;
|
||||
break;
|
||||
case LOAD:
|
||||
print("Load");
|
||||
if(clip_url !== null) {
|
||||
Recording.loadRecording(clip_url);
|
||||
}
|
||||
break;
|
||||
case DO_NOTHING:
|
||||
break;
|
||||
default:
|
||||
print("Unknown action: " + action);
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
if (Recording.isPlaying()) {
|
||||
Recording.play();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function update(deltaTime) {
|
||||
|
||||
totalTime += deltaTime;
|
||||
|
||||
if (totalTime > WAIT_FOR_AUDIO_MIXER) {
|
||||
if (!subscribed) {
|
||||
Messages.subscribe(commandChannel); // command channel
|
||||
Messages.subscribe(announceIDChannel); // id announce channel
|
||||
subscribed = true;
|
||||
print("I'm the agent and I am ready to receive!");
|
||||
}
|
||||
if (subscribed && id == UNKNOWN_AGENT_ID) {
|
||||
print("sending ready, id:" + id);
|
||||
Messages.sendMessage(announceIDChannel, "ready");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Messages.messageReceived.connect(function (channel, message, senderID) {
|
||||
if (channel == announceIDChannel && message != "ready") {
|
||||
// If I don't yet know if my ID has been recieved, then check to see if the master has acknowledged me
|
||||
if (id == UNKNOWN_AGENT_ID) {
|
||||
var parts = message.split(".");
|
||||
var agentID = parts[0];
|
||||
var agentIndex = parts[1];
|
||||
if (agentID == Agent.sessionUUID) {
|
||||
id = agentIndex;
|
||||
Messages.unsubscribe(announceIDChannel); // id announce channel
|
||||
}
|
||||
}
|
||||
}
|
||||
if (channel == commandChannel) {
|
||||
getAction(channel, message, senderID);
|
||||
}
|
||||
});
|
||||
|
||||
Script.update.connect(update);
|
284
examples/acScripts/playbackMaster.js
Normal file
284
examples/acScripts/playbackMaster.js
Normal file
|
@ -0,0 +1,284 @@
|
|||
//
|
||||
// playbackMaster.js
|
||||
// acScripts
|
||||
//
|
||||
// Created by Edgar Pironti on 11/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
|
||||
|
||||
var ac_number = 1; // This is the default number of ACs. Their ID need to be unique and between 0 (included) and ac_number (excluded)
|
||||
var names = new Array(); // It is possible to specify the name of the ACs in this array. ACs names ordered by IDs (Default name is "ACx", x = ID + 1))
|
||||
var channel = "com.highfidelity.PlaybackChannel1";
|
||||
var subscribed = false;
|
||||
var clip_url = null;
|
||||
var input_text = null;
|
||||
|
||||
var knownAgents = new Array; // We will add our known agents here when we discover them
|
||||
|
||||
// available playbackAgents will announce their sessionID here.
|
||||
var announceIDChannel = "com.highfidelity.playbackAgent.announceID";
|
||||
|
||||
// Script. DO NOT MODIFY BEYOND THIS LINE.
|
||||
Script.include("../libraries/toolBars.js");
|
||||
|
||||
var DO_NOTHING = 0;
|
||||
var PLAY = 1;
|
||||
var PLAY_LOOP = 2;
|
||||
var STOP = 3;
|
||||
var SHOW = 4;
|
||||
var HIDE = 5;
|
||||
var LOAD = 6;
|
||||
|
||||
var windowDimensions = Controller.getViewportDimensions();
|
||||
var TOOL_ICON_URL = HIFI_PUBLIC_BUCKET + "images/tools/";
|
||||
var ALPHA_ON = 1.0;
|
||||
var ALPHA_OFF = 0.7;
|
||||
var COLOR_TOOL_BAR = { red: 0, green: 0, blue: 0 };
|
||||
var COLOR_MASTER = { red: 0, green: 0, blue: 0 };
|
||||
var TEXT_HEIGHT = 12;
|
||||
var TEXT_MARGIN = 3;
|
||||
|
||||
var toolBars = new Array();
|
||||
var nameOverlays = new Array();
|
||||
var onOffIcon = new Array();
|
||||
var playIcon = new Array();
|
||||
var playLoopIcon = new Array();
|
||||
var stopIcon = new Array();
|
||||
var loadIcon = new Array();
|
||||
|
||||
setupPlayback();
|
||||
|
||||
function setupPlayback() {
|
||||
ac_number = Window.prompt("Insert number of agents: ","1");
|
||||
if (ac_number === "" || ac_number === null) {
|
||||
ac_number = 1;
|
||||
}
|
||||
Messages.subscribe(channel);
|
||||
subscribed = true;
|
||||
setupToolBars();
|
||||
}
|
||||
|
||||
function setupToolBars() {
|
||||
if (toolBars.length > 0) {
|
||||
print("Multiple calls to Recorder.js:setupToolBars()");
|
||||
return;
|
||||
}
|
||||
Tool.IMAGE_HEIGHT /= 2;
|
||||
Tool.IMAGE_WIDTH /= 2;
|
||||
|
||||
for (i = 0; i <= ac_number; i++) {
|
||||
toolBars.push(new ToolBar(0, 0, ToolBar.HORIZONTAL));
|
||||
toolBars[i].setBack((i == ac_number) ? COLOR_MASTER : COLOR_TOOL_BAR, ALPHA_OFF);
|
||||
|
||||
onOffIcon.push(toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "ac-on-off.svg",
|
||||
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
x: 0, y: 0,
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_ON,
|
||||
visible: true
|
||||
}, true, true));
|
||||
|
||||
playIcon[i] = toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "play.svg",
|
||||
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}, false);
|
||||
|
||||
var playLoopWidthFactor = 1.65;
|
||||
playLoopIcon[i] = toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "play-and-loop.svg",
|
||||
subImage: { x: 0, y: 0, width: playLoopWidthFactor * Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
width: playLoopWidthFactor * Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}, false);
|
||||
|
||||
stopIcon[i] = toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "recording-stop.svg",
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}, false);
|
||||
|
||||
loadIcon[i] = toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "recording-upload.svg",
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}, false);
|
||||
|
||||
nameOverlays.push(Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 0, green: 0, blue: 0 },
|
||||
font: { size: TEXT_HEIGHT },
|
||||
text: (i == ac_number) ? "Master" : i + ". " +
|
||||
((i < names.length) ? names[i] :
|
||||
"AC" + i),
|
||||
x: 0, y: 0,
|
||||
width: toolBars[i].width + ToolBar.SPACING,
|
||||
height: TEXT_HEIGHT + TEXT_MARGIN,
|
||||
leftMargin: TEXT_MARGIN,
|
||||
topMargin: TEXT_MARGIN,
|
||||
alpha: ALPHA_OFF,
|
||||
backgroundAlpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
function sendCommand(id, action) {
|
||||
if (action === SHOW) {
|
||||
toolBars[id].selectTool(onOffIcon[id], false);
|
||||
toolBars[id].setAlpha(ALPHA_ON, playIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_ON, playLoopIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_ON, stopIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_ON, loadIcon[id]);
|
||||
} else if (action === HIDE) {
|
||||
toolBars[id].selectTool(onOffIcon[id], true);
|
||||
toolBars[id].setAlpha(ALPHA_OFF, playIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_OFF, playLoopIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_OFF, stopIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_OFF, loadIcon[id]);
|
||||
} else if (toolBars[id].toolSelected(onOffIcon[id])) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (id == (toolBars.length - 1)) {
|
||||
id = -1; // Master command becomes broadcast.
|
||||
}
|
||||
|
||||
var message = {
|
||||
id_key: id,
|
||||
action_key: action,
|
||||
clip_url_key: clip_url
|
||||
};
|
||||
|
||||
if(subscribed){
|
||||
Messages.sendMessage(channel, JSON.stringify(message));
|
||||
print("Message sent!");
|
||||
clip_url = null;
|
||||
}
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
|
||||
|
||||
// Check master control
|
||||
var i = toolBars.length - 1;
|
||||
if (onOffIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
if (toolBars[i].toolSelected(onOffIcon[i])) {
|
||||
sendCommand(i, SHOW);
|
||||
} else {
|
||||
sendCommand(i, HIDE);
|
||||
}
|
||||
} else if (playIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, PLAY);
|
||||
} else if (playLoopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, PLAY_LOOP);
|
||||
} else if (stopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, STOP);
|
||||
} else if (loadIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
input_text = Window.prompt("Insert the url of the clip: ","");
|
||||
if (!(input_text === "" || input_text === null)) {
|
||||
clip_url = input_text;
|
||||
sendCommand(i, LOAD);
|
||||
}
|
||||
} else {
|
||||
// Check individual controls
|
||||
for (i = 0; i < ac_number; i++) {
|
||||
if (onOffIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
if (toolBars[i].toolSelected(onOffIcon[i], false)) {
|
||||
sendCommand(i, SHOW);
|
||||
} else {
|
||||
sendCommand(i, HIDE);
|
||||
}
|
||||
} else if (playIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, PLAY);
|
||||
} else if (playLoopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, PLAY_LOOP);
|
||||
} else if (stopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, STOP);
|
||||
} else if (loadIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
input_text = Window.prompt("Insert the url of the clip: ","");
|
||||
if (!(input_text === "" || input_text === null)) {
|
||||
clip_url = input_text;
|
||||
sendCommand(i, LOAD);
|
||||
}
|
||||
} else {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function moveUI() {
|
||||
var textSize = TEXT_HEIGHT + 2 * TEXT_MARGIN;
|
||||
var relative = { x: 70, y: 75 + (ac_number) * (Tool.IMAGE_HEIGHT + ToolBar.SPACING + textSize) };
|
||||
|
||||
for (i = 0; i <= ac_number; i++) {
|
||||
toolBars[i].move(relative.x,
|
||||
windowDimensions.y - relative.y +
|
||||
i * (Tool.IMAGE_HEIGHT + ToolBar.SPACING + textSize));
|
||||
|
||||
Overlays.editOverlay(nameOverlays[i], {
|
||||
x: toolBars[i].x - ToolBar.SPACING,
|
||||
y: toolBars[i].y - textSize
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function update() {
|
||||
var newDimensions = Controller.getViewportDimensions();
|
||||
if (windowDimensions.x != newDimensions.x ||
|
||||
windowDimensions.y != newDimensions.y) {
|
||||
windowDimensions = newDimensions;
|
||||
moveUI();
|
||||
}
|
||||
}
|
||||
|
||||
function scriptEnding() {
|
||||
for (i = 0; i <= ac_number; i++) {
|
||||
toolBars[i].cleanup();
|
||||
Overlays.deleteOverlay(nameOverlays[i]);
|
||||
}
|
||||
|
||||
if (subscribed) {
|
||||
Messages.unsubscribe(channel);
|
||||
}
|
||||
Messages.unsubscribe(announceIDChannel);
|
||||
}
|
||||
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Script.update.connect(update);
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
|
||||
|
||||
|
||||
Messages.subscribe(announceIDChannel);
|
||||
Messages.messageReceived.connect(function (channel, message, senderID) {
|
||||
if (channel == announceIDChannel && message == "ready") {
|
||||
// check to see if we know about this agent
|
||||
if (knownAgents.indexOf(senderID) < 0) {
|
||||
var indexOfNewAgent = knownAgents.length;
|
||||
knownAgents[indexOfNewAgent] = senderID;
|
||||
var acknowledgeMessage = senderID + "." + indexOfNewAgent;
|
||||
Messages.sendMessage(announceIDChannel, acknowledgeMessage);
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
moveUI();
|
|
@ -37,9 +37,21 @@ var BUMPER_ON_VALUE = 0.5;
|
|||
var DISTANCE_HOLDING_RADIUS_FACTOR = 5; // multiplied by distance between hand and object
|
||||
var DISTANCE_HOLDING_ACTION_TIMEFRAME = 0.1; // how quickly objects move to their new position
|
||||
var DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR = 2.0; // object rotates this much more than hand did
|
||||
var NO_INTERSECT_COLOR = { red: 10, green: 10, blue: 255}; // line color when pick misses
|
||||
var INTERSECT_COLOR = { red: 250, green: 10, blue: 10}; // line color when pick hits
|
||||
var LINE_ENTITY_DIMENSIONS = { x: 1000, y: 1000,z: 1000};
|
||||
var NO_INTERSECT_COLOR = {
|
||||
red: 10,
|
||||
green: 10,
|
||||
blue: 255
|
||||
}; // line color when pick misses
|
||||
var INTERSECT_COLOR = {
|
||||
red: 250,
|
||||
green: 10,
|
||||
blue: 10
|
||||
}; // line color when pick hits
|
||||
var LINE_ENTITY_DIMENSIONS = {
|
||||
x: 1000,
|
||||
y: 1000,
|
||||
z: 1000
|
||||
};
|
||||
var LINE_LENGTH = 500;
|
||||
var PICK_MAX_DISTANCE = 500; // max length of pick-ray
|
||||
|
||||
|
@ -84,12 +96,13 @@ var ACTION_TTL_REFRESH = 5;
|
|||
var PICKS_PER_SECOND_PER_HAND = 5;
|
||||
var MSECS_PER_SEC = 1000.0;
|
||||
var GRABBABLE_PROPERTIES = ["position",
|
||||
"rotation",
|
||||
"gravity",
|
||||
"ignoreForCollisions",
|
||||
"collisionsWillMove",
|
||||
"locked",
|
||||
"name"];
|
||||
"rotation",
|
||||
"gravity",
|
||||
"ignoreForCollisions",
|
||||
"collisionsWillMove",
|
||||
"locked",
|
||||
"name"
|
||||
];
|
||||
|
||||
|
||||
var GRABBABLE_DATA_KEY = "grabbableKey"; // shared with grab.js
|
||||
|
@ -100,7 +113,7 @@ var DEFAULT_GRABBABLE_DATA = {
|
|||
invertSolidWhileHeld: false
|
||||
};
|
||||
|
||||
var disabledHand ='none';
|
||||
var disabledHand = 'none';
|
||||
|
||||
|
||||
// states for the state machine
|
||||
|
@ -125,40 +138,40 @@ var STATE_EQUIP_SPRING = 16;
|
|||
|
||||
function stateToName(state) {
|
||||
switch (state) {
|
||||
case STATE_OFF:
|
||||
return "off";
|
||||
case STATE_SEARCHING:
|
||||
return "searching";
|
||||
case STATE_DISTANCE_HOLDING:
|
||||
return "distance_holding";
|
||||
case STATE_CONTINUE_DISTANCE_HOLDING:
|
||||
return "continue_distance_holding";
|
||||
case STATE_NEAR_GRABBING:
|
||||
return "near_grabbing";
|
||||
case STATE_CONTINUE_NEAR_GRABBING:
|
||||
return "continue_near_grabbing";
|
||||
case STATE_NEAR_TRIGGER:
|
||||
return "near_trigger";
|
||||
case STATE_CONTINUE_NEAR_TRIGGER:
|
||||
return "continue_near_trigger";
|
||||
case STATE_FAR_TRIGGER:
|
||||
return "far_trigger";
|
||||
case STATE_CONTINUE_FAR_TRIGGER:
|
||||
return "continue_far_trigger";
|
||||
case STATE_RELEASE:
|
||||
return "release";
|
||||
case STATE_EQUIP_SEARCHING:
|
||||
return "equip_searching";
|
||||
case STATE_EQUIP:
|
||||
return "equip";
|
||||
case STATE_CONTINUE_EQUIP_BD:
|
||||
return "continue_equip_bd";
|
||||
case STATE_CONTINUE_EQUIP:
|
||||
return "continue_equip";
|
||||
case STATE_WAITING_FOR_BUMPER_RELEASE:
|
||||
return "waiting_for_bumper_release";
|
||||
case STATE_EQUIP_SPRING:
|
||||
return "state_equip_spring";
|
||||
case STATE_OFF:
|
||||
return "off";
|
||||
case STATE_SEARCHING:
|
||||
return "searching";
|
||||
case STATE_DISTANCE_HOLDING:
|
||||
return "distance_holding";
|
||||
case STATE_CONTINUE_DISTANCE_HOLDING:
|
||||
return "continue_distance_holding";
|
||||
case STATE_NEAR_GRABBING:
|
||||
return "near_grabbing";
|
||||
case STATE_CONTINUE_NEAR_GRABBING:
|
||||
return "continue_near_grabbing";
|
||||
case STATE_NEAR_TRIGGER:
|
||||
return "near_trigger";
|
||||
case STATE_CONTINUE_NEAR_TRIGGER:
|
||||
return "continue_near_trigger";
|
||||
case STATE_FAR_TRIGGER:
|
||||
return "far_trigger";
|
||||
case STATE_CONTINUE_FAR_TRIGGER:
|
||||
return "continue_far_trigger";
|
||||
case STATE_RELEASE:
|
||||
return "release";
|
||||
case STATE_EQUIP_SEARCHING:
|
||||
return "equip_searching";
|
||||
case STATE_EQUIP:
|
||||
return "equip";
|
||||
case STATE_CONTINUE_EQUIP_BD:
|
||||
return "continue_equip_bd";
|
||||
case STATE_CONTINUE_EQUIP:
|
||||
return "continue_equip";
|
||||
case STATE_WAITING_FOR_BUMPER_RELEASE:
|
||||
return "waiting_for_bumper_release";
|
||||
case STATE_EQUIP_SPRING:
|
||||
return "state_equip_spring";
|
||||
}
|
||||
|
||||
return "unknown";
|
||||
|
@ -187,7 +200,6 @@ function entityIsGrabbedByOther(entityID) {
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
function MyController(hand) {
|
||||
this.hand = hand;
|
||||
if (this.hand === RIGHT_HAND) {
|
||||
|
@ -211,8 +223,17 @@ function MyController(hand) {
|
|||
this.rawTriggerValue = 0;
|
||||
this.rawBumperValue = 0;
|
||||
|
||||
this.offsetPosition = { x: 0.0, y: 0.0, z: 0.0 };
|
||||
this.offsetRotation = { x: 0.0, y: 0.0, z: 0.0, w: 1.0 };
|
||||
this.offsetPosition = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0
|
||||
};
|
||||
this.offsetRotation = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0,
|
||||
w: 1.0
|
||||
};
|
||||
|
||||
var _this = this;
|
||||
|
||||
|
@ -277,7 +298,7 @@ function MyController(hand) {
|
|||
this.state = newState;
|
||||
}
|
||||
|
||||
this.debugLine = function(closePoint, farPoint, color){
|
||||
this.debugLine = function(closePoint, farPoint, color) {
|
||||
Entities.addEntity({
|
||||
type: "Line",
|
||||
name: "Grab Debug Entity",
|
||||
|
@ -321,16 +342,16 @@ function MyController(hand) {
|
|||
this.pointer = null;
|
||||
};
|
||||
|
||||
this.triggerPress = function (value) {
|
||||
this.triggerPress = function(value) {
|
||||
_this.rawTriggerValue = value;
|
||||
};
|
||||
|
||||
this.bumperPress = function (value) {
|
||||
this.bumperPress = function(value) {
|
||||
_this.rawBumperValue = value;
|
||||
};
|
||||
|
||||
|
||||
this.updateSmoothedTrigger = function () {
|
||||
this.updateSmoothedTrigger = function() {
|
||||
var triggerValue = this.rawTriggerValue;
|
||||
// smooth out trigger value
|
||||
this.triggerValue = (this.triggerValue * TRIGGER_SMOOTH_RATIO) +
|
||||
|
@ -401,7 +422,7 @@ function MyController(hand) {
|
|||
this.lastPickTime = now;
|
||||
}
|
||||
|
||||
for (var index=0; index < pickRays.length; ++index) {
|
||||
for (var index = 0; index < pickRays.length; ++index) {
|
||||
var pickRay = pickRays[index];
|
||||
var directionNormalized = Vec3.normalize(pickRay.direction);
|
||||
var directionBacked = Vec3.multiply(directionNormalized, PICK_BACKOFF_DISTANCE);
|
||||
|
@ -466,10 +487,9 @@ function MyController(hand) {
|
|||
}
|
||||
return;
|
||||
}
|
||||
} else if (! entityIsGrabbedByOther(intersection.entityID)) {
|
||||
} else if (!entityIsGrabbedByOther(intersection.entityID)) {
|
||||
// don't allow two people to distance grab the same object
|
||||
if (intersection.properties.collisionsWillMove
|
||||
&& !intersection.properties.locked) {
|
||||
if (intersection.properties.collisionsWillMove && !intersection.properties.locked) {
|
||||
// the hand is far from the intersected object. go into distance-holding mode
|
||||
this.grabbedEntity = intersection.entityID;
|
||||
if (typeof grabbableData.spatialKey !== 'undefined' && this.state == STATE_EQUIP_SEARCHING) {
|
||||
|
@ -494,10 +514,18 @@ function MyController(hand) {
|
|||
Entities.addEntity({
|
||||
type: "Sphere",
|
||||
name: "Grab Debug Entity",
|
||||
dimensions: {x: GRAB_RADIUS, y: GRAB_RADIUS, z: GRAB_RADIUS},
|
||||
dimensions: {
|
||||
x: GRAB_RADIUS,
|
||||
y: GRAB_RADIUS,
|
||||
z: GRAB_RADIUS
|
||||
},
|
||||
visible: true,
|
||||
position: handPosition,
|
||||
color: { red: 0, green: 255, blue: 0},
|
||||
color: {
|
||||
red: 0,
|
||||
green: 255,
|
||||
blue: 0
|
||||
},
|
||||
lifetime: 0.1
|
||||
});
|
||||
}
|
||||
|
@ -604,6 +632,7 @@ function MyController(hand) {
|
|||
} else {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
|
||||
}
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startDistantGrab");
|
||||
}
|
||||
|
||||
|
@ -639,7 +668,7 @@ function MyController(hand) {
|
|||
|
||||
// the action was set up on a previous call. update the targets.
|
||||
var radius = Math.max(Vec3.distance(this.currentObjectPosition, handControllerPosition) *
|
||||
DISTANCE_HOLDING_RADIUS_FACTOR, DISTANCE_HOLDING_RADIUS_FACTOR);
|
||||
DISTANCE_HOLDING_RADIUS_FACTOR, DISTANCE_HOLDING_RADIUS_FACTOR);
|
||||
// how far did avatar move this timestep?
|
||||
var currentPosition = MyAvatar.position;
|
||||
var avatarDeltaPosition = Vec3.subtract(currentPosition, this.currentAvatarPosition);
|
||||
|
@ -688,9 +717,9 @@ function MyController(hand) {
|
|||
|
||||
// this doubles hand rotation
|
||||
var handChange = Quat.multiply(Quat.slerp(this.handPreviousRotation,
|
||||
handRotation,
|
||||
DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR),
|
||||
Quat.inverse(this.handPreviousRotation));
|
||||
handRotation,
|
||||
DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR),
|
||||
Quat.inverse(this.handPreviousRotation));
|
||||
this.handPreviousRotation = handRotation;
|
||||
this.currentObjectRotation = Quat.multiply(handChange, this.currentObjectRotation);
|
||||
|
||||
|
@ -773,6 +802,8 @@ function MyController(hand) {
|
|||
this.setState(STATE_CONTINUE_NEAR_GRABBING);
|
||||
} else {
|
||||
// equipping
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startEquip", [JSON.stringify(this.hand)]);
|
||||
this.startHandGrasp();
|
||||
this.setState(STATE_CONTINUE_EQUIP_BD);
|
||||
}
|
||||
|
||||
|
@ -781,6 +812,9 @@ function MyController(hand) {
|
|||
} else {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
|
||||
}
|
||||
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
|
||||
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startNearGrab");
|
||||
|
||||
}
|
||||
|
@ -807,6 +841,7 @@ function MyController(hand) {
|
|||
}
|
||||
if (this.state == STATE_CONTINUE_NEAR_GRABBING && this.bumperSqueezed()) {
|
||||
this.setState(STATE_CONTINUE_EQUIP_BD);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startEquip", [JSON.stringify(this.hand)]);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -827,6 +862,10 @@ function MyController(hand) {
|
|||
this.currentObjectTime = now;
|
||||
Entities.callEntityMethod(this.grabbedEntity, "continueNearGrab");
|
||||
|
||||
if (this.state === STATE_CONTINUE_EQUIP_BD) {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "continueEquip");
|
||||
}
|
||||
|
||||
if (this.actionTimeout - now < ACTION_TTL_REFRESH * MSEC_PER_SEC) {
|
||||
// if less than a 5 seconds left, refresh the actions ttl
|
||||
Entities.updateAction(this.grabbedEntity, this.actionID, {
|
||||
|
@ -846,6 +885,8 @@ function MyController(hand) {
|
|||
if (this.bumperReleased()) {
|
||||
this.setState(STATE_RELEASE);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "releaseGrab");
|
||||
Entities.callEntityMethod(this.grabbedEntity, "unequip");
|
||||
this.endHandGrasp();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -856,8 +897,17 @@ function MyController(hand) {
|
|||
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, this.grabbedEntity, DEFAULT_GRABBABLE_DATA);
|
||||
|
||||
// use a spring to pull the object to where it will be when equipped
|
||||
var relativeRotation = { x: 0.0, y: 0.0, z: 0.0, w: 1.0 };
|
||||
var relativePosition = { x: 0.0, y: 0.0, z: 0.0 };
|
||||
var relativeRotation = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0,
|
||||
w: 1.0
|
||||
};
|
||||
var relativePosition = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0
|
||||
};
|
||||
if (grabbableData.spatialKey.relativePosition) {
|
||||
relativePosition = grabbableData.spatialKey.relativePosition;
|
||||
}
|
||||
|
@ -913,6 +963,9 @@ function MyController(hand) {
|
|||
} else {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
|
||||
}
|
||||
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
|
||||
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startNearTrigger");
|
||||
this.setState(STATE_CONTINUE_NEAR_TRIGGER);
|
||||
};
|
||||
|
@ -929,6 +982,7 @@ function MyController(hand) {
|
|||
} else {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
|
||||
}
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startFarTrigger");
|
||||
this.setState(STATE_CONTINUE_FAR_TRIGGER);
|
||||
};
|
||||
|
@ -1040,7 +1094,7 @@ function MyController(hand) {
|
|||
|
||||
this.release = function() {
|
||||
|
||||
if(this.hand !== disabledHand){
|
||||
if (this.hand !== disabledHand) {
|
||||
//release the disabled hand when we let go with the main one
|
||||
disabledHand = 'none';
|
||||
}
|
||||
|
@ -1061,6 +1115,7 @@ function MyController(hand) {
|
|||
|
||||
this.cleanup = function() {
|
||||
this.release();
|
||||
this.endHandGrasp();
|
||||
};
|
||||
|
||||
this.activateEntity = function(entityID, grabbedProperties) {
|
||||
|
@ -1075,9 +1130,15 @@ function MyController(hand) {
|
|||
data["gravity"] = grabbedProperties.gravity;
|
||||
data["ignoreForCollisions"] = grabbedProperties.ignoreForCollisions;
|
||||
data["collisionsWillMove"] = grabbedProperties.collisionsWillMove;
|
||||
var whileHeldProperties = {gravity: {x:0, y:0, z:0}};
|
||||
var whileHeldProperties = {
|
||||
gravity: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
}
|
||||
};
|
||||
if (invertSolidWhileHeld) {
|
||||
whileHeldProperties["ignoreForCollisions"] = ! grabbedProperties.ignoreForCollisions;
|
||||
whileHeldProperties["ignoreForCollisions"] = !grabbedProperties.ignoreForCollisions;
|
||||
}
|
||||
Entities.editEntity(entityID, whileHeldProperties);
|
||||
}
|
||||
|
@ -1103,6 +1164,44 @@ function MyController(hand) {
|
|||
}
|
||||
setEntityCustomData(GRAB_USER_DATA_KEY, entityID, data);
|
||||
};
|
||||
|
||||
|
||||
//this is our handler, where we do the actual work of changing animation settings
|
||||
this.graspHand = function(animationProperties) {
|
||||
var result = {};
|
||||
//full alpha on overlay for this hand
|
||||
//set grab to true
|
||||
//set idle to false
|
||||
//full alpha on the blend btw open and grab
|
||||
if (_this.hand === RIGHT_HAND) {
|
||||
result['rightHandOverlayAlpha'] = 1.0;
|
||||
result['isRightHandGrab'] = true;
|
||||
result['isRightHandIdle'] = false;
|
||||
result['rightHandGrabBlend'] = 1.0;
|
||||
} else if (_this.hand === LEFT_HAND) {
|
||||
result['leftHandOverlayAlpha'] = 1.0;
|
||||
result['isLeftHandGrab'] = true;
|
||||
result['isLeftHandIdle'] = false;
|
||||
result['leftHandGrabBlend'] = 1.0;
|
||||
}
|
||||
//return an object with our updated settings
|
||||
return result;
|
||||
}
|
||||
|
||||
this.graspHandler = null
|
||||
this.startHandGrasp = function() {
|
||||
if (this.hand === RIGHT_HAND) {
|
||||
this.graspHandler = MyAvatar.addAnimationStateHandler(this.graspHand, ['isRightHandGrab']);
|
||||
} else if (this.hand === LEFT_HAND) {
|
||||
this.graspHandler = MyAvatar.addAnimationStateHandler(this.graspHand, ['isLeftHandGrab']);
|
||||
}
|
||||
}
|
||||
|
||||
this.endHandGrasp = function() {
|
||||
// Tell the animation system we don't need any more callbacks.
|
||||
MyAvatar.removeAnimationStateHandler(this.graspHandler);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var rightController = new MyController(RIGHT_HAND);
|
||||
|
@ -1132,4 +1231,4 @@ function cleanup() {
|
|||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
Script.update.connect(update);
|
||||
Script.update.connect(update);
|
|
@ -19,4 +19,3 @@ Script.load("controllers/handControllerGrab.js");
|
|||
Script.load("grab.js");
|
||||
Script.load("directory.js");
|
||||
Script.load("dialTone.js");
|
||||
Script.load("libraries/omniTool.js");
|
||||
|
|
56
examples/entityScripts/messagesReceiverEntityExample.js
Normal file
56
examples/entityScripts/messagesReceiverEntityExample.js
Normal file
|
@ -0,0 +1,56 @@
|
|||
//
|
||||
// messagesReceiverEntityExample.js
|
||||
// examples/entityScripts
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 11/18/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// This is an example of an entity script which when assigned to an entity, will detect when the entity is being grabbed by the hydraGrab script
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
(function () {
|
||||
|
||||
var _this;
|
||||
|
||||
var messageReceived = function (channel, message, senderID) {
|
||||
print("message received on channel:" + channel + ", message:" + message + ", senderID:" + senderID);
|
||||
};
|
||||
|
||||
// this is the "constructor" for the entity as a JS object we don't do much here, but we do want to remember
|
||||
// our this object, so we can access it in cases where we're called without a this (like in the case of various global signals)
|
||||
MessagesReceiver = function () {
|
||||
_this = this;
|
||||
};
|
||||
|
||||
MessagesReceiver.prototype = {
|
||||
|
||||
// preload() will be called when the entity has become visible (or known) to the interface
|
||||
// it gives us a chance to set our local JavaScript object up. In this case it means:
|
||||
// * remembering our entityID, so we can access it in cases where we're called without an entityID
|
||||
// * unsubscribing from messages
|
||||
// * connectingf to the messageReceived signal
|
||||
preload: function (entityID) {
|
||||
this.entityID = entityID;
|
||||
|
||||
print("---- subscribing ----");
|
||||
Messages.subscribe("example");
|
||||
Messages.messageReceived.connect(messageReceived);
|
||||
},
|
||||
|
||||
// unload() will be called when the entity has become no longer known to the interface
|
||||
// it gives us a chance to clean up our local JavaScript object. In this case it means:
|
||||
// * unsubscribing from messages
|
||||
// * disconnecting from the messageReceived signal
|
||||
unload: function (entityID) {
|
||||
print("---- unsubscribing ----");
|
||||
Messages.unsubscribe("example");
|
||||
Messages.messageReceived.disconnect(messageReceived);
|
||||
},
|
||||
};
|
||||
|
||||
// entity scripts always need to return a newly constructed object of our type
|
||||
return new MessagesReceiver();
|
||||
})
|
|
@ -16,24 +16,39 @@
|
|||
|
||||
var _this;
|
||||
var isAvatarRecording = false;
|
||||
var channel = "groupRecordingChannel";
|
||||
var startMessage = "RECONDING STARTED";
|
||||
var stopMessage = "RECONDING ENDED";
|
||||
var MASTER_TO_CLIENTS_CHANNEL = "startStopChannel";
|
||||
var CLIENTS_TO_MASTER_CHANNEL = "resultsChannel";
|
||||
var START_MESSAGE = "recordingStarted";
|
||||
var STOP_MESSAGE = "recordingEnded";
|
||||
var PARTICIPATING_MESSAGE = "participatingToRecording";
|
||||
var RECORDING_ICON_URL = "http://cdn.highfidelity.com/alan/production/icons/ICO_rec-active.svg";
|
||||
var NOT_RECORDING_ICON_URL = "http://cdn.highfidelity.com/alan/production/icons/ICO_rec-inactive.svg";
|
||||
var ICON_WIDTH = 60;
|
||||
var ICON_HEIGHT = 60;
|
||||
var overlay = null;
|
||||
|
||||
|
||||
function recordingEntity() {
|
||||
_this = this;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
function receivingMessage(channel, message, senderID) {
|
||||
print("message received on channel:" + channel + ", message:" + message + ", senderID:" + senderID);
|
||||
if(message === startMessage) {
|
||||
_this.startRecording();
|
||||
} else if(message === stopMessage) {
|
||||
_this.stopRecording();
|
||||
if (channel === MASTER_TO_CLIENTS_CHANNEL) {
|
||||
print("CLIENT received message:" + message);
|
||||
if (message === START_MESSAGE) {
|
||||
_this.startRecording();
|
||||
} else if (message === STOP_MESSAGE) {
|
||||
_this.stopRecording();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function getClipUrl(url) {
|
||||
Messages.sendMessage(CLIENTS_TO_MASTER_CHANNEL, url); //send back the url to the master
|
||||
print("clip uploaded and url sent to master");
|
||||
};
|
||||
|
||||
recordingEntity.prototype = {
|
||||
|
||||
preload: function (entityID) {
|
||||
|
@ -50,21 +65,32 @@
|
|||
|
||||
enterEntity: function (entityID) {
|
||||
print("entering in the recording area");
|
||||
Messages.subscribe(channel);
|
||||
|
||||
Messages.subscribe(MASTER_TO_CLIENTS_CHANNEL);
|
||||
overlay = Overlays.addOverlay("image", {
|
||||
imageURL: NOT_RECORDING_ICON_URL,
|
||||
width: ICON_HEIGHT,
|
||||
height: ICON_WIDTH,
|
||||
x: 275,
|
||||
y: 0,
|
||||
visible: true
|
||||
});
|
||||
},
|
||||
|
||||
leaveEntity: function (entityID) {
|
||||
print("leaving the recording area");
|
||||
_this.stopRecording();
|
||||
Messages.unsubscribe(channel);
|
||||
Messages.unsubscribe(MASTER_TO_CLIENTS_CHANNEL);
|
||||
Overlays.deleteOverlay(overlay);
|
||||
overlay = null;
|
||||
},
|
||||
|
||||
startRecording: function (entityID) {
|
||||
if (!isAvatarRecording) {
|
||||
print("RECORDING STARTED");
|
||||
Messages.sendMessage(CLIENTS_TO_MASTER_CHANNEL, PARTICIPATING_MESSAGE); //tell to master that I'm participating
|
||||
Recording.startRecording();
|
||||
isAvatarRecording = true;
|
||||
Overlays.editOverlay(overlay, {imageURL: RECORDING_ICON_URL});
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -73,18 +99,20 @@
|
|||
print("RECORDING ENDED");
|
||||
Recording.stopRecording();
|
||||
isAvatarRecording = false;
|
||||
recordingFile = Window.save("Save recording to file", "./groupRecording", "Recordings (*.hfr)");
|
||||
if (!(recordingFile === "null" || recordingFile === null || recordingFile === "")) {
|
||||
Recording.saveRecording(recordingFile);
|
||||
}
|
||||
Recording.saveRecordingToAsset(getClipUrl); //save the clip to the asset and link a callback to get its url
|
||||
Overlays.editOverlay(overlay, {imageURL: NOT_RECORDING_ICON_URL});
|
||||
}
|
||||
},
|
||||
|
||||
unload: function (entityID) {
|
||||
print("RECORDING ENTITY UNLOAD");
|
||||
_this.stopRecording();
|
||||
Messages.unsubscribe(channel);
|
||||
Messages.unsubscribe(MASTER_TO_CLIENTS_CHANNEL);
|
||||
Messages.messageReceived.disconnect(receivingMessage);
|
||||
if(overlay !== null){
|
||||
Overlays.deleteOverlay(overlay);
|
||||
overlay = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,12 +22,24 @@ var TOOL_ICON_URL = HIFI_PUBLIC_BUCKET + "images/tools/";
|
|||
var ALPHA_ON = 1.0;
|
||||
var ALPHA_OFF = 0.7;
|
||||
var COLOR_TOOL_BAR = { red: 0, green: 0, blue: 0 };
|
||||
var MASTER_TO_CLIENTS_CHANNEL = "startStopChannel";
|
||||
var CLIENTS_TO_MASTER_CHANNEL = "resultsChannel";
|
||||
var START_MESSAGE = "recordingStarted";
|
||||
var STOP_MESSAGE = "recordingEnded";
|
||||
var PARTICIPATING_MESSAGE = "participatingToRecording";
|
||||
var TIMEOUT = 20;
|
||||
|
||||
var toolBar = null;
|
||||
var recordIcon;
|
||||
var isRecording = false;
|
||||
var channel = "groupRecordingChannel";
|
||||
Messages.subscribe(channel);
|
||||
var performanceJSON = { "avatarClips" : [] };
|
||||
var responsesExpected = 0;
|
||||
var waitingForPerformanceFile = true;
|
||||
var totalWaitingTime = 0;
|
||||
var extension = "txt";
|
||||
|
||||
|
||||
Messages.subscribe(CLIENTS_TO_MASTER_CHANNEL);
|
||||
setupToolBar();
|
||||
|
||||
function setupToolBar() {
|
||||
|
@ -50,28 +62,82 @@ function setupToolBar() {
|
|||
visible: true,
|
||||
}, true, isRecording);
|
||||
}
|
||||
toolBar.selectTool(recordIcon, !isRecording);
|
||||
|
||||
function mousePressEvent(event) {
|
||||
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
|
||||
if (recordIcon === toolBar.clicked(clickedOverlay, false)) {
|
||||
if (!isRecording) {
|
||||
print("I'm the master. I want to start recording");
|
||||
var message = "RECONDING STARTED";
|
||||
Messages.sendMessage(channel, message);
|
||||
Messages.sendMessage(MASTER_TO_CLIENTS_CHANNEL, START_MESSAGE);
|
||||
isRecording = true;
|
||||
} else {
|
||||
print("I want to stop recording");
|
||||
var message = "RECONDING ENDED";
|
||||
Messages.sendMessage(channel, message);
|
||||
waitingForPerformanceFile = true;
|
||||
Script.update.connect(update);
|
||||
Messages.sendMessage(MASTER_TO_CLIENTS_CHANNEL, STOP_MESSAGE);
|
||||
isRecording = false;
|
||||
}
|
||||
toolBar.selectTool(recordIcon, !isRecording);
|
||||
}
|
||||
}
|
||||
|
||||
function masterReceivingMessage(channel, message, senderID) {
|
||||
if (channel === CLIENTS_TO_MASTER_CHANNEL) {
|
||||
print("MASTER received message:" + message );
|
||||
if (message === PARTICIPATING_MESSAGE) {
|
||||
//increment the counter of all the participants
|
||||
responsesExpected++;
|
||||
} else if (waitingForPerformanceFile) {
|
||||
//I get an atp url from one participant
|
||||
performanceJSON.avatarClips[performanceJSON.avatarClips.length] = message;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function update(deltaTime) {
|
||||
if (waitingForPerformanceFile) {
|
||||
totalWaitingTime += deltaTime;
|
||||
if (totalWaitingTime > TIMEOUT || performanceJSON.avatarClips.length === responsesExpected) {
|
||||
if (performanceJSON.avatarClips.length !== 0) {
|
||||
print("UPLOADING PERFORMANCE FILE");
|
||||
//I can upload the performance file on the asset
|
||||
Assets.uploadData(JSON.stringify(performanceJSON), extension, uploadFinished);
|
||||
} else {
|
||||
print("PERFORMANCE FILE EMPTY");
|
||||
}
|
||||
//clean things after upload performance file to asset
|
||||
waitingForPerformanceFile = false;
|
||||
responsesExpected = 0;
|
||||
totalWaitingTime = 0;
|
||||
Script.update.disconnect(update);
|
||||
performanceJSON = { "avatarClips" : [] };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function uploadFinished(url){
|
||||
//need to print somehow the url here this way the master can copy the url
|
||||
print("PERFORMANCE FILE URL: " + url);
|
||||
Assets.downloadData(url, function (data) {
|
||||
printPerformanceJSON(JSON.parse(data));
|
||||
});
|
||||
}
|
||||
|
||||
function printPerformanceJSON(obj) {
|
||||
print("some info:");
|
||||
print("downloaded performance file from asset and examinating its content...");
|
||||
var avatarClips = obj.avatarClips;
|
||||
avatarClips.forEach(function(param) {
|
||||
print("clip url obtained: " + param);
|
||||
});
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
toolBar.cleanup();
|
||||
Messages.unsubscribe(channel);
|
||||
Messages.unsubscribe(CLIENTS_TO_MASTER_CHANNEL);
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Messages.messageReceived.connect(masterReceivingMessage);
|
|
@ -1,90 +0,0 @@
|
|||
//
|
||||
// synchronizerEntityScript.js
|
||||
// examples/entityScripts
|
||||
//
|
||||
// Created by Alessandro Signa on 11/12/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
|
||||
// This script shows how to create a synchronized event between avatars trhough an entity.
|
||||
// It works using the entity's userData: the master change its value and every client checks it every frame
|
||||
// This entity prints a message when the event starts and when it ends.
|
||||
// The client running synchronizerMaster.js is the event master and it decides when the event starts/ends by pressing a button.
|
||||
// All the avatars in the area when the master presses the button will receive a message.
|
||||
//
|
||||
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
|
||||
|
||||
|
||||
(function() {
|
||||
var insideArea = false;
|
||||
var isJoiningTheEvent = false;
|
||||
var _this;
|
||||
|
||||
|
||||
|
||||
function ParamsEntity() {
|
||||
_this = this;
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
ParamsEntity.prototype = {
|
||||
update: function(){
|
||||
var userData = JSON.parse(Entities.getEntityProperties(_this.entityID, ["userData"]).userData);
|
||||
var valueToCheck = userData.myKey.valueToCheck;
|
||||
if(valueToCheck && !isJoiningTheEvent){
|
||||
_this.sendMessage();
|
||||
}else if((!valueToCheck && isJoiningTheEvent) || (isJoiningTheEvent && !insideArea)){
|
||||
_this.stopMessage();
|
||||
}
|
||||
},
|
||||
preload: function(entityID) {
|
||||
print('entity loaded')
|
||||
this.entityID = entityID;
|
||||
Script.update.connect(_this.update);
|
||||
},
|
||||
enterEntity: function(entityID) {
|
||||
print("enterEntity("+entityID+")");
|
||||
var userData = JSON.parse(Entities.getEntityProperties(_this.entityID, ["userData"]).userData);
|
||||
var valueToCheck = userData.myKey.valueToCheck;
|
||||
if(!valueToCheck){
|
||||
//i'm in the area in time (before the event starts)
|
||||
insideArea = true;
|
||||
}
|
||||
change(entityID);
|
||||
},
|
||||
leaveEntity: function(entityID) {
|
||||
print("leaveEntity("+entityID+")");
|
||||
Entities.editEntity(entityID, { color: { red: 255, green: 190, blue: 20} });
|
||||
insideArea = false;
|
||||
},
|
||||
|
||||
sendMessage: function(myID){
|
||||
if(insideArea && !isJoiningTheEvent){
|
||||
print("The event started");
|
||||
isJoiningTheEvent = true;
|
||||
}
|
||||
},
|
||||
|
||||
stopMessage: function(myID){
|
||||
if(isJoiningTheEvent){
|
||||
print("The event ended");
|
||||
isJoiningTheEvent = false;
|
||||
}
|
||||
},
|
||||
clean: function(entityID) {
|
||||
Script.update.disconnect(_this.update);
|
||||
}
|
||||
}
|
||||
|
||||
function change(entityID) {
|
||||
Entities.editEntity(entityID, { color: { red: 255, green: 100, blue: 220} });
|
||||
}
|
||||
|
||||
|
||||
return new ParamsEntity();
|
||||
});
|
|
@ -1,117 +0,0 @@
|
|||
//
|
||||
// synchronizerMaster.js
|
||||
// examples/entityScripts
|
||||
//
|
||||
// Created by Alessandro Signa on 11/12/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Run this script to spawn a box (synchronizer) and drive the start/end of the event for anyone who is inside the box
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
var PARAMS_SCRIPT_URL = Script.resolvePath('synchronizerEntityScript.js');
|
||||
|
||||
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
Script.include("../libraries/toolBars.js");
|
||||
Script.include("../libraries/utils.js");
|
||||
|
||||
|
||||
|
||||
var rotation = Quat.safeEulerAngles(Camera.getOrientation());
|
||||
rotation = Quat.fromPitchYawRollDegrees(0, rotation.y, 0);
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(1, Quat.getFront(rotation)));
|
||||
|
||||
var TOOL_ICON_URL = HIFI_PUBLIC_BUCKET + "images/tools/";
|
||||
var ALPHA_ON = 1.0;
|
||||
var ALPHA_OFF = 0.7;
|
||||
var COLOR_TOOL_BAR = { red: 0, green: 0, blue: 0 };
|
||||
|
||||
var toolBar = null;
|
||||
var recordIcon;
|
||||
|
||||
|
||||
|
||||
var isHappening = false;
|
||||
|
||||
var testEntity = Entities.addEntity({
|
||||
name: 'paramsTestEntity',
|
||||
dimensions: {
|
||||
x: 2,
|
||||
y: 1,
|
||||
z: 2
|
||||
},
|
||||
type: 'Box',
|
||||
position: center,
|
||||
color: {
|
||||
red: 255,
|
||||
green: 255,
|
||||
blue: 255
|
||||
},
|
||||
visible: true,
|
||||
ignoreForCollisions: true,
|
||||
script: PARAMS_SCRIPT_URL,
|
||||
|
||||
userData: JSON.stringify({
|
||||
myKey: {
|
||||
valueToCheck: false
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
setupToolBar();
|
||||
|
||||
function setupToolBar() {
|
||||
if (toolBar != null) {
|
||||
print("Multiple calls to setupToolBar()");
|
||||
return;
|
||||
}
|
||||
Tool.IMAGE_HEIGHT /= 2;
|
||||
Tool.IMAGE_WIDTH /= 2;
|
||||
|
||||
toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL); //put the button in the up-left corner
|
||||
|
||||
toolBar.setBack(COLOR_TOOL_BAR, ALPHA_OFF);
|
||||
|
||||
recordIcon = toolBar.addTool({
|
||||
imageURL: TOOL_ICON_URL + "recording-record.svg",
|
||||
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
x: 0, y: 0,
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: MyAvatar.isPlaying() ? ALPHA_OFF : ALPHA_ON,
|
||||
visible: true
|
||||
}, true, isHappening);
|
||||
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
|
||||
if (recordIcon === toolBar.clicked(clickedOverlay, false)) {
|
||||
if (!isHappening) {
|
||||
print("I'm the event master. I want the event starts");
|
||||
isHappening = true;
|
||||
setEntityCustomData("myKey", testEntity, {valueToCheck: true});
|
||||
|
||||
} else {
|
||||
print("I want the event stops");
|
||||
isHappening = false;
|
||||
setEntityCustomData("myKey", testEntity, {valueToCheck: false});
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function cleanup() {
|
||||
toolBar.cleanup();
|
||||
Entities.callEntityMethod(testEntity, 'clean'); //have to call this before deleting to avoid the JSON warnings
|
||||
Entities.deleteEntity(testEntity);
|
||||
}
|
||||
|
||||
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
11
examples/example/assetsExample.js
Normal file
11
examples/example/assetsExample.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
var data = "this is some data";
|
||||
var extension = "txt";
|
||||
var uploadedFile;
|
||||
|
||||
Assets.uploadData(data, extension, function (url) {
|
||||
print("data uploaded to:" + url);
|
||||
uploadedFile = url;
|
||||
Assets.downloadData(url, function (data) {
|
||||
print("data downloaded from:" + url + " the data is:" + data);
|
||||
});
|
||||
});
|
68
examples/example/avatarcontrol/graspHands.js
Normal file
68
examples/example/avatarcontrol/graspHands.js
Normal file
|
@ -0,0 +1,68 @@
|
|||
// graspHands.js
|
||||
//
|
||||
// Created by James B. Pollack @imgntn -- 11/19/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Shows how to use the animation API to grasp an Avatar's hands.
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
//choose a hand. set it programatically if you'd like
|
||||
var handToGrasp = 'LEFT_HAND';
|
||||
|
||||
//this is our handler, where we do the actual work of changing animation settings
|
||||
function graspHand(animationProperties) {
|
||||
var result = {};
|
||||
//full alpha on overlay for this hand
|
||||
//set grab to true
|
||||
//set idle to false
|
||||
//full alpha on the blend btw open and grab
|
||||
if (handToGrasp === 'RIGHT_HAND') {
|
||||
result['rightHandOverlayAlpha'] = 1.0;
|
||||
result['isRightHandGrab'] = true;
|
||||
result['isRightHandIdle'] = false;
|
||||
result['rightHandGrabBlend'] = 1.0;
|
||||
} else if (handToGrasp === 'LEFT_HAND') {
|
||||
result['leftHandOverlayAlpha'] = 1.0;
|
||||
result['isLeftHandGrab'] = true;
|
||||
result['isLeftHandIdle'] = false;
|
||||
result['leftHandGrabBlend'] = 1.0;
|
||||
}
|
||||
//return an object with our updated settings
|
||||
return result;
|
||||
}
|
||||
|
||||
//keep a reference to this so we can clear it
|
||||
var handler;
|
||||
|
||||
//register our handler with the animation system
|
||||
function startHandGrasp() {
|
||||
if (handToGrasp === 'RIGHT_HAND') {
|
||||
handler = MyAvatar.addAnimationStateHandler(graspHand, ['isRightHandGrab']);
|
||||
} else if (handToGrasp === 'LEFT_HAND') {
|
||||
handler = MyAvatar.addAnimationStateHandler(graspHand, ['isLeftHandGrab']);
|
||||
}
|
||||
}
|
||||
|
||||
function endHandGrasp() {
|
||||
// Tell the animation system we don't need any more callbacks.
|
||||
MyAvatar.removeAnimationStateHandler(handler);
|
||||
}
|
||||
|
||||
//make sure to clean this up when the script ends so we don't get stuck.
|
||||
Script.scriptEnding.connect(function() {
|
||||
Script.clearInterval(graspInterval);
|
||||
endHandGrasp();
|
||||
})
|
||||
|
||||
//set an interval and toggle grasping
|
||||
var isGrasping = false;
|
||||
var graspInterval = Script.setInterval(function() {
|
||||
if (isGrasping === false) {
|
||||
startHandGrasp();
|
||||
isGrasping = true;
|
||||
} else {
|
||||
endHandGrasp();
|
||||
isGrasping = false
|
||||
}
|
||||
}, 1000)
|
|
@ -76,7 +76,7 @@ var playerSphere = Entities.addEntity({
|
|||
z: 0
|
||||
},
|
||||
collisionsWillMove: true,
|
||||
linearDamping: 0.2
|
||||
damping: 0.2
|
||||
});
|
||||
|
||||
Script.setInterval(function(){
|
||||
|
|
|
@ -19,10 +19,11 @@ var MAX_LINE_LENGTH = 40; // This must be 2 or greater;
|
|||
var DEFAULT_STROKE_WIDTH = 0.1;
|
||||
var DEFAULT_LIFETIME = 20;
|
||||
var DEFAULT_COLOR = { red: 255, green: 255, blue: 255 };
|
||||
var PolyLine = function(position, color, lifetime) {
|
||||
var PolyLine = function(position, color, lifetime, texture) {
|
||||
this.position = position;
|
||||
this.color = color;
|
||||
this.lifetime = lifetime === undefined ? DEFAULT_LIFETIME : lifetime;
|
||||
this.texture = texture ? texture : "";
|
||||
this.points = [
|
||||
];
|
||||
this.strokeWidths = [
|
||||
|
@ -37,7 +38,8 @@ var PolyLine = function(position, color, lifetime) {
|
|||
strokeWidths: this.strokeWidths,
|
||||
dimensions: LINE_DIMENSIONS,
|
||||
color: color,
|
||||
lifetime: lifetime
|
||||
lifetime: lifetime,
|
||||
textures: this.texture
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -98,26 +100,29 @@ PolyLine.prototype.destroy = function() {
|
|||
|
||||
|
||||
// InfiniteLine
|
||||
InfiniteLine = function(position, color, lifetime) {
|
||||
InfiniteLine = function(position, color, lifetime, textureBegin, textureMiddle) {
|
||||
this.position = position;
|
||||
this.color = color;
|
||||
this.lifetime = lifetime === undefined ? DEFAULT_LIFETIME : lifetime;
|
||||
this.lines = [];
|
||||
this.size = 0;
|
||||
|
||||
this.textureBegin = textureBegin ? textureBegin : "";
|
||||
this.textureMiddle = textureMiddle ? textureMiddle : "";
|
||||
};
|
||||
|
||||
InfiniteLine.prototype.enqueuePoint = function(position, strokeWidth) {
|
||||
var currentLine;
|
||||
|
||||
if (this.lines.length == 0) {
|
||||
currentLine = new PolyLine(position, this.color, this.lifetime);
|
||||
currentLine = new PolyLine(position, this.color, this.lifetime, this.textureBegin);
|
||||
this.lines.push(currentLine);
|
||||
} else {
|
||||
currentLine = this.lines[this.lines.length - 1];
|
||||
}
|
||||
|
||||
if (currentLine.isFull()) {
|
||||
var newLine = new PolyLine(currentLine.getLastPoint(), this.color, this.lifetime);
|
||||
var newLine = new PolyLine(currentLine.getLastPoint(), this.color, this.lifetime, this.textureMiddle);
|
||||
newLine.enqueuePoint(currentLine.getLastPoint(), strokeWidth);
|
||||
this.lines.push(newLine);
|
||||
currentLine = newLine;
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
var _this;
|
||||
var RIGHT_HAND = 1;
|
||||
var LEFT_HAND = 0;
|
||||
var MIN_POINT_DISTANCE = 0.01 ;
|
||||
var MIN_POINT_DISTANCE = 0.01;
|
||||
var MAX_POINT_DISTANCE = 0.5;
|
||||
var MAX_POINTS_PER_LINE = 40;
|
||||
var MAX_DISTANCE = 5;
|
||||
|
@ -29,6 +29,11 @@
|
|||
var MIN_STROKE_WIDTH = 0.0005;
|
||||
var MAX_STROKE_WIDTH = 0.03;
|
||||
|
||||
var TRIGGER_CONTROLS = [
|
||||
Controller.Standard.LT,
|
||||
Controller.Standard.RT,
|
||||
];
|
||||
|
||||
Whiteboard = function() {
|
||||
_this = this;
|
||||
};
|
||||
|
@ -51,11 +56,9 @@
|
|||
if (this.hand === RIGHT_HAND) {
|
||||
this.getHandPosition = MyAvatar.getRightPalmPosition;
|
||||
this.getHandRotation = MyAvatar.getRightPalmRotation;
|
||||
this.triggerAction = Controller.findAction("RIGHT_HAND_CLICK");
|
||||
} else if (this.hand === LEFT_HAND) {
|
||||
this.getHandPosition = MyAvatar.getLeftPalmPosition;
|
||||
this.getHandRotation = MyAvatar.getLeftPalmRotation;
|
||||
this.triggerAction = Controller.findAction("LEFT_HAND_CLICK");
|
||||
}
|
||||
Overlays.editOverlay(this.laserPointer, {
|
||||
visible: true
|
||||
|
@ -76,7 +79,7 @@
|
|||
if (this.intersection.intersects) {
|
||||
var distance = Vec3.distance(handPosition, this.intersection.intersection);
|
||||
if (distance < MAX_DISTANCE) {
|
||||
this.triggerValue = Controller.getActionValue(this.triggerAction);
|
||||
this.triggerValue = Controller.getValue(TRIGGER_CONTROLS[this.hand]);
|
||||
this.currentStrokeWidth = map(this.triggerValue, 0, 1, MIN_STROKE_WIDTH, MAX_STROKE_WIDTH);
|
||||
var displayPoint = this.intersection.intersection;
|
||||
displayPoint = Vec3.sum(displayPoint, Vec3.multiply(this.normal, 0.01));
|
||||
|
@ -184,7 +187,7 @@
|
|||
},
|
||||
|
||||
stopFarTrigger: function() {
|
||||
if(this.hand !== this.whichHand) {
|
||||
if (this.hand !== this.whichHand) {
|
||||
return;
|
||||
}
|
||||
this.stopPainting();
|
||||
|
@ -209,7 +212,7 @@
|
|||
entities.forEach(function(entity) {
|
||||
var props = Entities.getEntityProperties(entity, ["name, userData"]);
|
||||
var name = props.name;
|
||||
if(!props.userData) {
|
||||
if (!props.userData) {
|
||||
return;
|
||||
}
|
||||
var whiteboardID = JSON.parse(props.userData).whiteboard;
|
||||
|
|
|
@ -41,7 +41,7 @@ var rack = Entities.addEntity({
|
|||
y: -9.8,
|
||||
z: 0
|
||||
},
|
||||
linearDamping: 1,
|
||||
damping: 1,
|
||||
dimensions: {
|
||||
x: 0.4,
|
||||
y: 1.37,
|
||||
|
@ -83,7 +83,7 @@ function createBalls() {
|
|||
z: DIAMETER
|
||||
},
|
||||
restitution: 1.0,
|
||||
linearDamping: 0.00001,
|
||||
damping: 0.00001,
|
||||
gravity: {
|
||||
x: 0,
|
||||
y: -9.8,
|
||||
|
|
|
@ -46,7 +46,7 @@ function makeBasketball() {
|
|||
collisionSoundURL: collisionSoundURL,
|
||||
modelURL: basketballURL,
|
||||
restitution: 1.0,
|
||||
linearDamping: 0.00001,
|
||||
damping: 0.00001,
|
||||
shapeType: "sphere"
|
||||
});
|
||||
originalPosition = position;
|
||||
|
|
|
@ -88,7 +88,7 @@ var topBlock = Entities.addEntity({
|
|||
dimensions: blockDimensions,
|
||||
position: topBlock_position,
|
||||
rotation: topBlock_rotation,
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
damping: LINEAR_DAMPING,
|
||||
gravity: BLOCK_GRAVITY,
|
||||
collisionsWillMove: true,
|
||||
velocity: {
|
||||
|
@ -106,7 +106,7 @@ var sideBlock1 = Entities.addEntity({
|
|||
dimensions: blockDimensions,
|
||||
position: sideBlock1_position,
|
||||
rotation: sideBlock1_rotation,
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
damping: LINEAR_DAMPING,
|
||||
gravity: BLOCK_GRAVITY,
|
||||
collisionsWillMove: true
|
||||
});
|
||||
|
@ -120,7 +120,7 @@ var sideBlock2 = Entities.addEntity({
|
|||
position: sideBlock2_position,
|
||||
rotation: sideBlock2_rotation,
|
||||
collsionsWillMove: true,
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
damping: LINEAR_DAMPING,
|
||||
gravity: BLOCK_GRAVITY,
|
||||
collisionsWillMove: true
|
||||
});
|
||||
|
|
|
@ -12,8 +12,8 @@
|
|||
|
||||
Script.include("../../libraries/utils.js");
|
||||
|
||||
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/wand.fbx';
|
||||
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/actual_no_top_collision_hull.obj';
|
||||
var WAND_MODEL = 'http://hifi-content.s3.amazonaws.com/james/bubblewand/wand.fbx';
|
||||
var WAND_COLLISION_SHAPE = 'http://hifi-content.s3.amazonaws.com/james/bubblewand/wand_collision_hull.obj';
|
||||
|
||||
var WAND_SCRIPT_URL = Script.resolvePath("wand.js");
|
||||
|
||||
|
@ -43,5 +43,18 @@ var wand = Entities.addEntity({
|
|||
//must be enabled to be grabbable in the physics engine
|
||||
collisionsWillMove: true,
|
||||
compoundShapeURL: WAND_COLLISION_SHAPE,
|
||||
script: WAND_SCRIPT_URL
|
||||
script: WAND_SCRIPT_URL,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
invertSolidWhileHeld: true,
|
||||
spatialKey: {
|
||||
relativePosition: {
|
||||
x: 0,
|
||||
y: 0.1,
|
||||
z: 0
|
||||
},
|
||||
relativeRotation: Quat.fromPitchYawRollDegrees(0, 0, 90)
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
/*global MyAvatar, Entities, AnimationCache, SoundCache, Scene, Camera, Overlays, HMD, AvatarList, AvatarManager, Controller, UndoStack, Window, Account, GlobalServices, Script, ScriptDiscoveryService, LODManager, Menu, Vec3, Quat, AudioDevice, Paths, Clipboard, Settings, XMLHttpRequest, randFloat, randInt */
|
||||
|
||||
(function () {
|
||||
(function() {
|
||||
|
||||
Script.include("../../libraries/utils.js");
|
||||
|
||||
|
@ -28,7 +28,7 @@
|
|||
var BUBBLE_LIFETIME_MAX = 8;
|
||||
var BUBBLE_SIZE_MIN = 0.02;
|
||||
var BUBBLE_SIZE_MAX = 0.1;
|
||||
var BUBBLE_LINEAR_DAMPING = 0.4;
|
||||
var BUBBLE_LINEAR_DAMPING = 0.2;
|
||||
var BUBBLE_GRAVITY_MIN = 0.1;
|
||||
var BUBBLE_GRAVITY_MAX = 0.3;
|
||||
var GROWTH_FACTOR = 0.005;
|
||||
|
@ -58,23 +58,23 @@
|
|||
BubbleWand.prototype = {
|
||||
timePassed: null,
|
||||
currentBubble: null,
|
||||
preload: function (entityID) {
|
||||
preload: function(entityID) {
|
||||
this.entityID = entityID;
|
||||
},
|
||||
getWandTipPosition: function (properties) {
|
||||
getWandTipPosition: function(properties) {
|
||||
//the tip of the wand is going to be in a different place than the center, so we move in space relative to the model to find that position
|
||||
var upVector = Quat.getUp(properties.rotation);
|
||||
var upOffset = Vec3.multiply(upVector, WAND_TIP_OFFSET);
|
||||
var wandTipPosition = Vec3.sum(properties.position, upOffset);
|
||||
return wandTipPosition;
|
||||
},
|
||||
addCollisionsToBubbleAfterCreation: function (bubble) {
|
||||
addCollisionsToBubbleAfterCreation: function(bubble) {
|
||||
//if the bubble collide immediately, we get weird effects. so we add collisions after release
|
||||
Entities.editEntity(bubble, {
|
||||
collisionsWillMove: true
|
||||
});
|
||||
},
|
||||
randomizeBubbleGravity: function () {
|
||||
randomizeBubbleGravity: function() {
|
||||
//change up the gravity a little bit for variation in floating effects
|
||||
var randomNumber = randFloat(BUBBLE_GRAVITY_MIN, BUBBLE_GRAVITY_MAX);
|
||||
var gravity = {
|
||||
|
@ -84,7 +84,7 @@
|
|||
};
|
||||
return gravity;
|
||||
},
|
||||
growBubbleWithWandVelocity: function (properties, deltaTime) {
|
||||
growBubbleWithWandVelocity: function(properties, deltaTime) {
|
||||
//get the wand and tip position for calculations
|
||||
var wandPosition = properties.position;
|
||||
this.getWandTipPosition(properties);
|
||||
|
@ -145,7 +145,7 @@
|
|||
dimensions: dimensions
|
||||
});
|
||||
},
|
||||
createBubbleAtTipOfWand: function () {
|
||||
createBubbleAtTipOfWand: function() {
|
||||
|
||||
//create a new bubble at the tip of the wand
|
||||
var properties = Entities.getEntityProperties(this.entityID, ["position", "rotation"]);
|
||||
|
@ -162,24 +162,23 @@
|
|||
position: this.getWandTipPosition(properties),
|
||||
dimensions: BUBBLE_INITIAL_DIMENSIONS,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: false,
|
||||
linearDamping: BUBBLE_LINEAR_DAMPING,
|
||||
ignoreForCollisions: true,
|
||||
damping: BUBBLE_LINEAR_DAMPING,
|
||||
shapeType: "sphere"
|
||||
});
|
||||
|
||||
},
|
||||
startNearGrab: function () {
|
||||
startNearGrab: function() {
|
||||
//create a bubble to grow at the start of the grab
|
||||
if (this.currentBubble === null) {
|
||||
this.createBubbleAtTipOfWand();
|
||||
}
|
||||
},
|
||||
continueNearGrab: function () {
|
||||
continueNearGrab: function() {
|
||||
var deltaTime = checkInterval();
|
||||
//only get the properties that we need
|
||||
var properties = Entities.getEntityProperties(this.entityID, ["position", "rotation"]);
|
||||
|
||||
|
||||
var wandTipPosition = this.getWandTipPosition(properties);
|
||||
|
||||
//update the bubble to stay with the wand tip
|
||||
|
@ -189,7 +188,7 @@
|
|||
this.growBubbleWithWandVelocity(properties, deltaTime);
|
||||
|
||||
},
|
||||
releaseGrab: function () {
|
||||
releaseGrab: function() {
|
||||
//delete the current buble and reset state when the wand is released
|
||||
Entities.deleteEntity(this.currentBubble);
|
||||
this.currentBubble = null;
|
||||
|
|
|
@ -123,7 +123,7 @@
|
|||
type:'Sphere',
|
||||
color: BALL_COLOR,
|
||||
dimensions: BALL_DIMENSIONS,
|
||||
linearDamping: BALL_LINEAR_DAMPING,
|
||||
damping: BALL_LINEAR_DAMPING,
|
||||
gravity: BALL_GRAVITY,
|
||||
restitution: BALL_RESTITUTION,
|
||||
collisionsWillMove: true,
|
||||
|
|
|
@ -5,5 +5,4 @@ setup_hifi_project(Network)
|
|||
|
||||
# link the shared hifi libraries
|
||||
link_hifi_libraries(embedded-webserver networking shared)
|
||||
|
||||
copy_dlls_beside_windows_executable()
|
||||
package_libraries_for_deployment()
|
||||
|
|
|
@ -201,4 +201,4 @@ else (APPLE)
|
|||
endif()
|
||||
endif (APPLE)
|
||||
|
||||
copy_dlls_beside_windows_executable()
|
||||
package_libraries_for_deployment()
|
||||
|
|
|
@ -2834,14 +2834,8 @@ void Application::update(float deltaTime) {
|
|||
myAvatar->setDriveKeys(TRANSLATE_Y, userInputMapper->getActionState(controller::Action::TRANSLATE_Y));
|
||||
myAvatar->setDriveKeys(TRANSLATE_X, userInputMapper->getActionState(controller::Action::TRANSLATE_X));
|
||||
if (deltaTime > FLT_EPSILON) {
|
||||
// For rotations what we really want are meausures of "angles per second" (in order to prevent
|
||||
// fps-dependent spin rates) so we need to scale the units of the controller contribution.
|
||||
// (TODO?: maybe we should similarly scale ALL action state info, or change the expected behavior
|
||||
// controllers to provide a delta_per_second value rather than a raw delta.)
|
||||
const float EXPECTED_FRAME_RATE = 60.0f;
|
||||
float timeFactor = EXPECTED_FRAME_RATE * deltaTime;
|
||||
myAvatar->setDriveKeys(PITCH, -1.0f * userInputMapper->getActionState(controller::Action::PITCH) / timeFactor);
|
||||
myAvatar->setDriveKeys(YAW, -1.0f * userInputMapper->getActionState(controller::Action::YAW) / timeFactor);
|
||||
myAvatar->setDriveKeys(PITCH, -1.0f * userInputMapper->getActionState(controller::Action::PITCH));
|
||||
myAvatar->setDriveKeys(YAW, -1.0f * userInputMapper->getActionState(controller::Action::YAW));
|
||||
myAvatar->setDriveKeys(STEP_YAW, -1.0f * userInputMapper->getActionState(controller::Action::STEP_YAW));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1187,7 +1187,7 @@ void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
|
||||
// virtual
|
||||
void Avatar::rebuildSkeletonBody() {
|
||||
DependencyManager::get<AvatarManager>()->updateAvatarPhysicsShape(getSessionUUID());
|
||||
DependencyManager::get<AvatarManager>()->updateAvatarPhysicsShape(this);
|
||||
}
|
||||
|
||||
glm::vec3 Avatar::getLeftPalmPosition() {
|
||||
|
|
|
@ -173,7 +173,7 @@ protected:
|
|||
QVector<Model*> _attachmentModels;
|
||||
QVector<Model*> _attachmentsToRemove;
|
||||
QVector<Model*> _unusedAttachments;
|
||||
float _bodyYawDelta;
|
||||
float _bodyYawDelta; // degrees/sec
|
||||
|
||||
// These position histories and derivatives are in the world-frame.
|
||||
// The derivatives are the MEASURED results of all external and internal forces
|
||||
|
|
|
@ -327,4 +327,6 @@ void AvatarActionHold::deserialize(QByteArray serializedArguments) {
|
|||
|
||||
_active = true;
|
||||
});
|
||||
|
||||
forceBodyNonStatic();
|
||||
}
|
||||
|
|
|
@ -110,28 +110,34 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
|
|||
}
|
||||
|
||||
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||
// lock the hash for read to check the size
|
||||
QReadLocker lock(&_hashLock);
|
||||
|
||||
if (_avatarHash.size() < 2 && _avatarFades.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
lock.unlock();
|
||||
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::updateAvatars()");
|
||||
|
||||
PerformanceTimer perfTimer("otherAvatars");
|
||||
|
||||
// simulate avatars
|
||||
AvatarHash::iterator avatarIterator = _avatarHash.begin();
|
||||
while (avatarIterator != _avatarHash.end()) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
|
||||
auto hashCopy = getHashCopy();
|
||||
|
||||
AvatarHash::iterator avatarIterator = hashCopy.begin();
|
||||
while (avatarIterator != hashCopy.end()) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
|
||||
|
||||
if (avatar == _myAvatar || !avatar->isInitialized()) {
|
||||
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
|
||||
// DO NOT update or fade out uninitialized Avatars
|
||||
++avatarIterator;
|
||||
} else if (avatar->shouldDie()) {
|
||||
removeAvatarMotionState(avatar);
|
||||
_avatarFades.push_back(avatarIterator.value());
|
||||
QWriteLocker locker(&_hashLock);
|
||||
avatarIterator = _avatarHash.erase(avatarIterator);
|
||||
removeAvatar(avatarIterator.key());
|
||||
++avatarIterator;
|
||||
} else {
|
||||
avatar->startUpdate();
|
||||
avatar->simulate(deltaTime);
|
||||
|
@ -148,7 +154,7 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
QVector<AvatarSharedPointer>::iterator fadingIterator = _avatarFades.begin();
|
||||
|
||||
const float SHRINK_RATE = 0.9f;
|
||||
const float MIN_FADE_SCALE = 0.001f;
|
||||
const float MIN_FADE_SCALE = MIN_AVATAR_SCALE;
|
||||
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
|
@ -156,7 +162,7 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
auto avatar = std::static_pointer_cast<Avatar>(*fadingIterator);
|
||||
avatar->startUpdate();
|
||||
avatar->setTargetScale(avatar->getScale() * SHRINK_RATE, true);
|
||||
if (avatar->getTargetScale() < MIN_FADE_SCALE) {
|
||||
if (avatar->getTargetScale() <= MIN_FADE_SCALE) {
|
||||
avatar->removeFromScene(*fadingIterator, scene, pendingChanges);
|
||||
fadingIterator = _avatarFades.erase(fadingIterator);
|
||||
} else {
|
||||
|
@ -169,19 +175,21 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
}
|
||||
|
||||
AvatarSharedPointer AvatarManager::newSharedAvatar() {
|
||||
return AvatarSharedPointer(std::make_shared<Avatar>(std::make_shared<AvatarRig>()));
|
||||
return std::make_shared<Avatar>(std::make_shared<AvatarRig>());
|
||||
}
|
||||
|
||||
// virtual
|
||||
AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer));
|
||||
auto newAvatar = AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer);
|
||||
auto rawRenderableAvatar = std::static_pointer_cast<Avatar>(newAvatar);
|
||||
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
avatar->addToScene(avatar, scene, pendingChanges);
|
||||
rawRenderableAvatar->addToScene(rawRenderableAvatar, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
return avatar;
|
||||
|
||||
return newAvatar;
|
||||
}
|
||||
|
||||
// protected
|
||||
|
@ -200,20 +208,25 @@ void AvatarManager::removeAvatarMotionState(AvatarSharedPointer avatar) {
|
|||
|
||||
// virtual
|
||||
void AvatarManager::removeAvatar(const QUuid& sessionUUID) {
|
||||
AvatarHash::iterator avatarIterator = _avatarHash.find(sessionUUID);
|
||||
if (avatarIterator != _avatarHash.end()) {
|
||||
std::shared_ptr<Avatar> avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
|
||||
if (avatar != _myAvatar && avatar->isInitialized()) {
|
||||
removeAvatarMotionState(avatar);
|
||||
_avatarFades.push_back(avatarIterator.value());
|
||||
QWriteLocker locker(&_hashLock);
|
||||
_avatarHash.erase(avatarIterator);
|
||||
}
|
||||
QWriteLocker locker(&_hashLock);
|
||||
|
||||
auto removedAvatar = _avatarHash.take(sessionUUID);
|
||||
if (removedAvatar) {
|
||||
handleRemovedAvatar(removedAvatar);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar) {
|
||||
AvatarHashMap::handleRemovedAvatar(removedAvatar);
|
||||
|
||||
removeAvatarMotionState(removedAvatar);
|
||||
_avatarFades.push_back(removedAvatar);
|
||||
}
|
||||
|
||||
void AvatarManager::clearOtherAvatars() {
|
||||
// clear any avatars that came from an avatar-mixer
|
||||
QWriteLocker locker(&_hashLock);
|
||||
|
||||
AvatarHash::iterator avatarIterator = _avatarHash.begin();
|
||||
while (avatarIterator != _avatarHash.end()) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
|
||||
|
@ -221,10 +234,10 @@ void AvatarManager::clearOtherAvatars() {
|
|||
// don't remove myAvatar or uninitialized avatars from the list
|
||||
++avatarIterator;
|
||||
} else {
|
||||
removeAvatarMotionState(avatar);
|
||||
_avatarFades.push_back(avatarIterator.value());
|
||||
QWriteLocker locker(&_hashLock);
|
||||
auto removedAvatar = avatarIterator.value();
|
||||
avatarIterator = _avatarHash.erase(avatarIterator);
|
||||
|
||||
handleRemovedAvatar(removedAvatar);
|
||||
}
|
||||
}
|
||||
_myAvatar->clearLookAtTargetAvatar();
|
||||
|
@ -252,6 +265,7 @@ QVector<QUuid> AvatarManager::getAvatarIdentifiers() {
|
|||
QReadLocker locker(&_hashLock);
|
||||
return _avatarHash.keys().toVector();
|
||||
}
|
||||
|
||||
AvatarData* AvatarManager::getAvatar(QUuid avatarID) {
|
||||
QReadLocker locker(&_hashLock);
|
||||
return _avatarHash[avatarID].get(); // Non-obvious: A bogus avatarID answers your own avatar.
|
||||
|
@ -317,23 +331,19 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
|
|||
}
|
||||
}
|
||||
|
||||
void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
|
||||
AvatarHash::iterator avatarItr = _avatarHash.find(id);
|
||||
if (avatarItr != _avatarHash.end()) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarItr.value());
|
||||
AvatarMotionState* motionState = avatar->getMotionState();
|
||||
if (motionState) {
|
||||
motionState->addDirtyFlags(Simulation::DIRTY_SHAPE);
|
||||
} else {
|
||||
ShapeInfo shapeInfo;
|
||||
avatar->computeShapeInfo(shapeInfo);
|
||||
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
|
||||
if (shape) {
|
||||
AvatarMotionState* motionState = new AvatarMotionState(avatar.get(), shape);
|
||||
avatar->setMotionState(motionState);
|
||||
_motionStatesToAdd.insert(motionState);
|
||||
_avatarMotionStates.insert(motionState);
|
||||
}
|
||||
void AvatarManager::updateAvatarPhysicsShape(Avatar* avatar) {
|
||||
AvatarMotionState* motionState = avatar->getMotionState();
|
||||
if (motionState) {
|
||||
motionState->addDirtyFlags(Simulation::DIRTY_SHAPE);
|
||||
} else {
|
||||
ShapeInfo shapeInfo;
|
||||
avatar->computeShapeInfo(shapeInfo);
|
||||
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
|
||||
if (shape) {
|
||||
AvatarMotionState* motionState = new AvatarMotionState(avatar, shape);
|
||||
avatar->setMotionState(motionState);
|
||||
_motionStatesToAdd.insert(motionState);
|
||||
_avatarMotionStates.insert(motionState);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -341,7 +351,7 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
|
|||
void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
for (auto avatarData : _avatarHash) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarData);
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
avatar->addToScene(avatar, scene, pendingChanges);
|
||||
|
@ -349,7 +359,7 @@ void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
|
|||
}
|
||||
} else {
|
||||
for (auto avatarData : _avatarHash) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarData);
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
avatar->removeFromScene(avatar, scene, pendingChanges);
|
||||
|
@ -363,11 +373,6 @@ AvatarSharedPointer AvatarManager::getAvatarBySessionID(const QUuid& sessionID)
|
|||
if (sessionID == _myAvatar->getSessionUUID()) {
|
||||
return std::static_pointer_cast<Avatar>(_myAvatar);
|
||||
}
|
||||
QReadLocker locker(&_hashLock);
|
||||
auto iter = _avatarHash.find(sessionID);
|
||||
if (iter != _avatarHash.end()) {
|
||||
return iter.value();
|
||||
} else {
|
||||
return AvatarSharedPointer();
|
||||
}
|
||||
|
||||
return findAvatar(sessionID);
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ public:
|
|||
void handleOutgoingChanges(const VectorOfMotionStates& motionStates);
|
||||
void handleCollisionEvents(const CollisionEvents& collisionEvents);
|
||||
|
||||
void updateAvatarPhysicsShape(const QUuid& id);
|
||||
void updateAvatarPhysicsShape(Avatar* avatar);
|
||||
|
||||
public slots:
|
||||
void setShouldShowReceiveStats(bool shouldShowReceiveStats) { _shouldShowReceiveStats = shouldShowReceiveStats; }
|
||||
|
@ -79,7 +79,9 @@ private:
|
|||
virtual AvatarSharedPointer newSharedAvatar();
|
||||
virtual AvatarSharedPointer addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer);
|
||||
void removeAvatarMotionState(AvatarSharedPointer avatar);
|
||||
|
||||
virtual void removeAvatar(const QUuid& sessionUUID);
|
||||
virtual void handleRemovedAvatar(const AvatarSharedPointer& removedAvatar);
|
||||
|
||||
QVector<AvatarSharedPointer> _avatarFades;
|
||||
std::shared_ptr<MyAvatar> _myAvatar;
|
||||
|
|
|
@ -428,7 +428,7 @@ void MyAvatar::updateHMDFollowVelocity() {
|
|||
}
|
||||
if (_followSpeed > 0.0f) {
|
||||
// to compute new velocity we must rotate offset into the world-frame
|
||||
glm::quat sensorToWorldRotation = extractRotation(_sensorToWorldMatrix);
|
||||
glm::quat sensorToWorldRotation = glm::normalize(glm::quat_cast(_sensorToWorldMatrix));
|
||||
_followVelocity = _followSpeed * glm::normalize(sensorToWorldRotation * offset);
|
||||
}
|
||||
}
|
||||
|
@ -982,10 +982,8 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
|
||||
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
|
||||
|
||||
AvatarHash hash;
|
||||
DependencyManager::get<AvatarManager>()->withAvatarHash([&] (const AvatarHash& locked) {
|
||||
hash = locked; // make a shallow copy and operate on that, to minimize lock time
|
||||
});
|
||||
AvatarHash hash = DependencyManager::get<AvatarManager>()->getHashCopy();
|
||||
|
||||
foreach (const AvatarSharedPointer& avatarPointer, hash) {
|
||||
auto avatar = static_pointer_cast<Avatar>(avatarPointer);
|
||||
bool isCurrentTarget = avatar->getIsLookAtTarget();
|
||||
|
|
|
@ -801,6 +801,14 @@ void AvatarData::changeReferential(Referential* ref) {
|
|||
_referential = ref;
|
||||
}
|
||||
|
||||
void AvatarData::setRawJointData(QVector<JointData> data) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setRawJointData", Q_ARG(QVector<JointData>, data));
|
||||
return;
|
||||
}
|
||||
_jointData = data;
|
||||
}
|
||||
|
||||
void AvatarData::setJointData(int index, const glm::quat& rotation, const glm::vec3& translation) {
|
||||
if (index == -1) {
|
||||
return;
|
||||
|
@ -1537,16 +1545,15 @@ void AvatarData::fromFrame(const QByteArray& frameData, AvatarData& result) {
|
|||
QVector<JointData> jointArray;
|
||||
QJsonArray jointArrayJson = root[JSON_AVATAR_JOINT_ARRAY].toArray();
|
||||
jointArray.reserve(jointArrayJson.size());
|
||||
int i = 0;
|
||||
for (const auto& jointJson : jointArrayJson) {
|
||||
jointArray.push_back(jointDataFromJsonValue(jointJson));
|
||||
auto joint = jointDataFromJsonValue(jointJson);
|
||||
jointArray.push_back(joint);
|
||||
result.setJointData(i, joint.rotation, joint.translation);
|
||||
result._jointData[i].rotationSet = true; // Have to do that to broadcast the avatar new pose
|
||||
i++;
|
||||
}
|
||||
|
||||
QVector<glm::quat> jointRotations;
|
||||
jointRotations.reserve(jointArray.size());
|
||||
for (const auto& joint : jointArray) {
|
||||
jointRotations.push_back(joint.rotation);
|
||||
}
|
||||
result.setJointRotations(jointRotations);
|
||||
result.setRawJointData(jointArray);
|
||||
}
|
||||
|
||||
#if 0
|
||||
|
|
|
@ -247,7 +247,7 @@ public:
|
|||
Q_INVOKABLE char getHandState() const { return _handState; }
|
||||
|
||||
const QVector<JointData>& getRawJointData() const { return _jointData; }
|
||||
void setRawJointData(QVector<JointData> data) { _jointData = data; }
|
||||
Q_INVOKABLE void setRawJointData(QVector<JointData> data);
|
||||
|
||||
Q_INVOKABLE virtual void setJointData(int index, const glm::quat& rotation, const glm::vec3& translation);
|
||||
Q_INVOKABLE virtual void setJointRotation(int index, const glm::quat& rotation);
|
||||
|
|
|
@ -22,13 +22,9 @@ AvatarHashMap::AvatarHashMap() {
|
|||
connect(DependencyManager::get<NodeList>().data(), &NodeList::uuidChanged, this, &AvatarHashMap::sessionUUIDChanged);
|
||||
}
|
||||
|
||||
void AvatarHashMap::withAvatarHash(std::function<void(const AvatarHash& hash)> callback) {
|
||||
QReadLocker locker(&_hashLock);
|
||||
callback(_avatarHash);
|
||||
}
|
||||
bool AvatarHashMap::isAvatarInRange(const glm::vec3& position, const float range) {
|
||||
QReadLocker locker(&_hashLock);
|
||||
foreach(const AvatarSharedPointer& sharedAvatar, _avatarHash) {
|
||||
auto hashCopy = getHashCopy();
|
||||
foreach(const AvatarSharedPointer& sharedAvatar, hashCopy) {
|
||||
glm::vec3 avatarPosition = sharedAvatar->getPosition();
|
||||
float distance = glm::distance(avatarPosition, position);
|
||||
if (distance < range) {
|
||||
|
@ -44,18 +40,35 @@ AvatarSharedPointer AvatarHashMap::newSharedAvatar() {
|
|||
|
||||
AvatarSharedPointer AvatarHashMap::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
|
||||
qCDebug(avatars) << "Adding avatar with sessionUUID " << sessionUUID << "to AvatarHashMap.";
|
||||
|
||||
AvatarSharedPointer avatar = newSharedAvatar();
|
||||
|
||||
auto avatar = newSharedAvatar();
|
||||
avatar->setSessionUUID(sessionUUID);
|
||||
avatar->setOwningAvatarMixer(mixerWeakPointer);
|
||||
QWriteLocker locker(&_hashLock);
|
||||
|
||||
_avatarHash.insert(sessionUUID, avatar);
|
||||
emit avatarAddedEvent(sessionUUID);
|
||||
|
||||
return avatar;
|
||||
}
|
||||
|
||||
void AvatarHashMap::processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
AvatarSharedPointer AvatarHashMap::newOrExistingAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
|
||||
QWriteLocker locker(&_hashLock);
|
||||
|
||||
auto avatar = _avatarHash.value(sessionUUID);
|
||||
|
||||
if (!avatar) {
|
||||
avatar = addAvatar(sessionUUID, mixerWeakPointer);
|
||||
}
|
||||
|
||||
return avatar;
|
||||
}
|
||||
|
||||
AvatarSharedPointer AvatarHashMap::findAvatar(const QUuid& sessionUUID) {
|
||||
QReadLocker locker(&_hashLock);
|
||||
return _avatarHash.value(sessionUUID);
|
||||
}
|
||||
|
||||
void AvatarHashMap::processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
// enumerate over all of the avatars in this packet
|
||||
// only add them if mixerWeakPointer points to something (meaning that mixer is still around)
|
||||
while (message->getBytesLeftToRead()) {
|
||||
|
@ -66,10 +79,7 @@ void AvatarHashMap::processAvatarDataPacket(QSharedPointer<ReceivedMessage> mess
|
|||
QByteArray byteArray = message->readWithoutCopy(message->getBytesLeftToRead());
|
||||
|
||||
if (sessionUUID != _lastOwnerSessionUUID) {
|
||||
AvatarSharedPointer avatar = _avatarHash.value(sessionUUID);
|
||||
if (!avatar) {
|
||||
avatar = addAvatar(sessionUUID, sendingNode);
|
||||
}
|
||||
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
|
||||
|
||||
// have the matching (or new) avatar parse the data from the packet
|
||||
int bytesRead = avatar->parseDataFromBuffer(byteArray);
|
||||
|
@ -97,10 +107,8 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage>
|
|||
identityStream >> sessionUUID >> faceMeshURL >> skeletonURL >> attachmentData >> displayName;
|
||||
|
||||
// mesh URL for a UUID, find avatar in our list
|
||||
AvatarSharedPointer avatar = _avatarHash.value(sessionUUID);
|
||||
if (!avatar) {
|
||||
avatar = addAvatar(sessionUUID, sendingNode);
|
||||
}
|
||||
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
|
||||
|
||||
if (avatar->getFaceModelURL() != faceMeshURL) {
|
||||
avatar->setFaceModelURL(faceMeshURL);
|
||||
}
|
||||
|
@ -122,10 +130,7 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage>
|
|||
void AvatarHashMap::processAvatarBillboardPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
QUuid sessionUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
AvatarSharedPointer avatar = _avatarHash.value(sessionUUID);
|
||||
if (!avatar) {
|
||||
avatar = addAvatar(sessionUUID, sendingNode);
|
||||
}
|
||||
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
|
||||
|
||||
QByteArray billboard = message->read(message->getBytesLeftToRead());
|
||||
if (avatar->getBillboard() != billboard) {
|
||||
|
@ -137,13 +142,22 @@ void AvatarHashMap::processKillAvatar(QSharedPointer<ReceivedMessage> message, S
|
|||
// read the node id
|
||||
QUuid sessionUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
removeAvatar(sessionUUID);
|
||||
|
||||
}
|
||||
|
||||
void AvatarHashMap::removeAvatar(const QUuid& sessionUUID) {
|
||||
QWriteLocker locker(&_hashLock);
|
||||
_avatarHash.remove(sessionUUID);
|
||||
emit avatarRemovedEvent(sessionUUID);
|
||||
|
||||
auto removedAvatar = _avatarHash.take(sessionUUID);
|
||||
|
||||
if (removedAvatar) {
|
||||
handleRemovedAvatar(removedAvatar);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarHashMap::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar) {
|
||||
qDebug() << "Removed avatar with UUID" << uuidStringWithoutCurlyBraces(removedAvatar->getSessionUUID())
|
||||
<< "from AvatarHashMap";
|
||||
emit avatarRemovedEvent(removedAvatar->getSessionUUID());
|
||||
}
|
||||
|
||||
void AvatarHashMap::sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& oldUUID) {
|
||||
|
|
|
@ -31,7 +31,7 @@ class AvatarHashMap : public QObject, public Dependency {
|
|||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
void withAvatarHash(std::function<void(const AvatarHash& hash)>);
|
||||
AvatarHash getHashCopy() { QReadLocker lock(&_hashLock); return _avatarHash; }
|
||||
int size() { return _avatarHash.size(); }
|
||||
|
||||
signals:
|
||||
|
@ -55,7 +55,11 @@ protected:
|
|||
|
||||
virtual AvatarSharedPointer newSharedAvatar();
|
||||
virtual AvatarSharedPointer addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer);
|
||||
AvatarSharedPointer newOrExistingAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer);
|
||||
virtual AvatarSharedPointer findAvatar(const QUuid& sessionUUID); // uses a QReadLocker on the hashLock
|
||||
virtual void removeAvatar(const QUuid& sessionUUID);
|
||||
|
||||
virtual void handleRemovedAvatar(const AvatarSharedPointer& removedAvatar);
|
||||
|
||||
AvatarHash _avatarHash;
|
||||
// "Case-based safety": Most access to the _avatarHash is on the same thread. Write access is protected by a write-lock.
|
||||
|
|
|
@ -122,7 +122,7 @@ void EntityTreeRenderer::init() {
|
|||
}
|
||||
|
||||
void EntityTreeRenderer::shutdown() {
|
||||
_entitiesScriptEngine->disconnect(); // disconnect all slots/signals from the script engine
|
||||
_entitiesScriptEngine->disconnectNonEssentialSignals(); // disconnect all slots/signals from the script engine, except essential
|
||||
_shuttingDown = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,6 @@ PolyLineEntityItem(entityItemID, properties) {
|
|||
|
||||
gpu::PipelinePointer RenderablePolyLineEntityItem::_pipeline;
|
||||
gpu::Stream::FormatPointer RenderablePolyLineEntityItem::_format;
|
||||
gpu::TexturePointer RenderablePolyLineEntityItem::_texture;
|
||||
int32_t RenderablePolyLineEntityItem::PAINTSTROKE_GPU_SLOT;
|
||||
|
||||
void RenderablePolyLineEntityItem::createPipeline() {
|
||||
|
@ -44,9 +43,6 @@ void RenderablePolyLineEntityItem::createPipeline() {
|
|||
static const int COLOR_OFFSET = 24;
|
||||
static const int TEXTURE_OFFSET = 28;
|
||||
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
QString path = PathUtils::resourcesPath() + "images/paintStroke.png";
|
||||
_texture = textureCache->getImageTexture(path);
|
||||
_format.reset(new gpu::Stream::Format());
|
||||
_format->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
|
||||
_format->setAttribute(gpu::Stream::NORMAL, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), NORMAL_OFFSET);
|
||||
|
@ -132,6 +128,13 @@ void RenderablePolyLineEntityItem::render(RenderArgs* args) {
|
|||
createPipeline();
|
||||
}
|
||||
|
||||
if (!_texture || _texturesChangedFlag) {
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
QString path = _textures.isEmpty() ? PathUtils::resourcesPath() + "images/paintStroke.png" : _textures;
|
||||
_texture = textureCache->getTexture(QUrl(path));
|
||||
_texturesChangedFlag = false;
|
||||
}
|
||||
|
||||
PerformanceTimer perfTimer("RenderablePolyLineEntityItem::render");
|
||||
Q_ASSERT(getType() == EntityTypes::PolyLine);
|
||||
|
||||
|
@ -147,7 +150,11 @@ void RenderablePolyLineEntityItem::render(RenderArgs* args) {
|
|||
batch.setModelTransform(transform);
|
||||
|
||||
batch.setPipeline(_pipeline);
|
||||
batch.setResourceTexture(PAINTSTROKE_GPU_SLOT, _texture);
|
||||
if (_texture->isLoaded()) {
|
||||
batch.setResourceTexture(PAINTSTROKE_GPU_SLOT, _texture->getGPUTexture());
|
||||
} else {
|
||||
batch.setResourceTexture(PAINTSTROKE_GPU_SLOT, args->_whiteTexture);
|
||||
}
|
||||
|
||||
batch.setInputFormat(_format);
|
||||
batch.setInputBuffer(0, _verticesBuffer, 0, _format->getChannels().at(0)._stride);
|
||||
|
|
|
@ -12,10 +12,13 @@
|
|||
#ifndef hifi_RenderablePolyLineEntityItem_h
|
||||
#define hifi_RenderablePolyLineEntityItem_h
|
||||
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <PolyLineEntityItem.h>
|
||||
#include "RenderableEntityItem.h"
|
||||
#include <GeometryCache.h>
|
||||
#include <TextureCache.h>
|
||||
|
||||
#include <QReadWriteLock>
|
||||
|
||||
|
||||
|
@ -29,9 +32,10 @@ public:
|
|||
|
||||
SIMPLE_RENDERABLE();
|
||||
|
||||
NetworkTexturePointer _texture;
|
||||
|
||||
static gpu::PipelinePointer _pipeline;
|
||||
static gpu::Stream::FormatPointer _format;
|
||||
static gpu::TexturePointer _texture;
|
||||
static int32_t PAINTSTROKE_GPU_SLOT;
|
||||
|
||||
protected:
|
||||
|
|
|
@ -500,6 +500,15 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
}
|
||||
}
|
||||
|
||||
// before proceeding, check to see if this is an entity that we know has been deleted, which
|
||||
// might happen in the case of out-of-order and/or recorvered packets, if we've deleted the entity
|
||||
// we can confidently ignore this packet
|
||||
EntityTreePointer tree = getTree();
|
||||
if (tree && tree->isDeletedEntity(_id)) {
|
||||
qDebug() << "Recieved packet for previously deleted entity [" << _id << "] ignoring. (inside " << __FUNCTION__ << ")";
|
||||
ignoreServerPacket = true;
|
||||
}
|
||||
|
||||
if (ignoreServerPacket) {
|
||||
overwriteLocalData = false;
|
||||
#ifdef WANT_DEBUG
|
||||
|
|
|
@ -398,6 +398,7 @@ public:
|
|||
void getAllTerseUpdateProperties(EntityItemProperties& properties) const;
|
||||
|
||||
void flagForOwnership() { _dirtyFlags |= Simulation::DIRTY_SIMULATOR_OWNERSHIP; }
|
||||
void flagForMotionStateChange() { _dirtyFlags |= Simulation::DIRTY_MOTION_TYPE; }
|
||||
|
||||
bool addAction(EntitySimulation* simulation, EntityActionPointer action);
|
||||
bool updateAction(EntitySimulation* simulation, const QUuid& actionID, const QVariantMap& arguments);
|
||||
|
|
|
@ -432,6 +432,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
|
|||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LINE_POINTS, linePoints);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_NORMALS, normals);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_STROKE_WIDTHS, strokeWidths);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_TEXTURES, textures);
|
||||
}
|
||||
|
||||
// Sitting properties support
|
||||
|
@ -1011,6 +1012,7 @@ bool EntityItemProperties::encodeEntityEditPacket(PacketType command, EntityItem
|
|||
APPEND_ENTITY_PROPERTY(PROP_LINE_POINTS, properties.getLinePoints());
|
||||
APPEND_ENTITY_PROPERTY(PROP_NORMALS, properties.getNormals());
|
||||
APPEND_ENTITY_PROPERTY(PROP_STROKE_WIDTHS, properties.getStrokeWidths());
|
||||
APPEND_ENTITY_PROPERTY(PROP_TEXTURES, properties.getTextures());
|
||||
}
|
||||
|
||||
APPEND_ENTITY_PROPERTY(PROP_MARKETPLACE_ID, properties.getMarketplaceID());
|
||||
|
@ -1287,6 +1289,7 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
|
|||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_LINE_POINTS, QVector<glm::vec3>, setLinePoints);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_NORMALS, QVector<glm::vec3>, setNormals);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_STROKE_WIDTHS, QVector<float>, setStrokeWidths);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_TEXTURES, QString, setTextures);
|
||||
}
|
||||
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MARKETPLACE_ID, QString, setMarketplaceID);
|
||||
|
|
|
@ -68,6 +68,7 @@ void EntityTree::eraseAllOctreeElements(bool createNewRoot) {
|
|||
Octree::eraseAllOctreeElements(createNewRoot);
|
||||
|
||||
resetClientEditStats();
|
||||
clearDeletedEntities();
|
||||
}
|
||||
|
||||
bool EntityTree::handlesEditPacketType(PacketType packetType) const {
|
||||
|
@ -398,6 +399,9 @@ void EntityTree::processRemovedEntities(const DeleteEntityOperator& theOperator)
|
|||
// set up the deleted entities ID
|
||||
QWriteLocker locker(&_recentlyDeletedEntitiesLock);
|
||||
_recentlyDeletedEntityItemIDs.insert(deletedAt, theEntity->getEntityItemID());
|
||||
} else {
|
||||
// on the client side, we also remember that we deleted this entity, we don't care about the time
|
||||
trackDeletedEntity(theEntity->getEntityItemID());
|
||||
}
|
||||
|
||||
if (_simulation) {
|
||||
|
|
|
@ -228,6 +228,11 @@ public:
|
|||
|
||||
EntityTreePointer getThisPointer() { return std::static_pointer_cast<EntityTree>(shared_from_this()); }
|
||||
|
||||
bool isDeletedEntity(const QUuid& id) {
|
||||
QReadLocker locker(&_deletedEntitiesLock);
|
||||
return _deletedEntityItemIDs.contains(id);
|
||||
}
|
||||
|
||||
signals:
|
||||
void deletingEntity(const EntityItemID& entityID);
|
||||
void addingEntity(const EntityItemID& entityID);
|
||||
|
@ -235,7 +240,7 @@ signals:
|
|||
void newCollisionSoundURL(const QUrl& url);
|
||||
void clearingEntities();
|
||||
|
||||
private:
|
||||
protected:
|
||||
|
||||
void processRemovedEntities(const DeleteEntityOperator& theOperator);
|
||||
bool updateEntityWithElement(EntityItemPointer entity, const EntityItemProperties& properties,
|
||||
|
@ -252,8 +257,22 @@ private:
|
|||
QReadWriteLock _newlyCreatedHooksLock;
|
||||
QVector<NewlyCreatedEntityHook*> _newlyCreatedHooks;
|
||||
|
||||
mutable QReadWriteLock _recentlyDeletedEntitiesLock;
|
||||
QMultiMap<quint64, QUuid> _recentlyDeletedEntityItemIDs;
|
||||
mutable QReadWriteLock _recentlyDeletedEntitiesLock; /// lock of server side recent deletes
|
||||
QMultiMap<quint64, QUuid> _recentlyDeletedEntityItemIDs; /// server side recent deletes
|
||||
|
||||
mutable QReadWriteLock _deletedEntitiesLock; /// lock of client side recent deletes
|
||||
QSet<QUuid> _deletedEntityItemIDs; /// client side recent deletes
|
||||
|
||||
void clearDeletedEntities() {
|
||||
QWriteLocker locker(&_deletedEntitiesLock);
|
||||
_deletedEntityItemIDs.clear();
|
||||
}
|
||||
|
||||
void trackDeletedEntity(const QUuid& id) {
|
||||
QWriteLocker locker(&_deletedEntitiesLock);
|
||||
_deletedEntityItemIDs << id;
|
||||
}
|
||||
|
||||
EntityItemFBXService* _fbxService;
|
||||
|
||||
QHash<EntityItemID, EntityTreeElementPointer> _entityToElementMap;
|
||||
|
|
|
@ -894,12 +894,19 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
|
|||
entityItem = EntityTypes::constructEntityItem(dataAt, bytesLeftToRead, args);
|
||||
if (entityItem) {
|
||||
bytesForThisEntity = entityItem->readEntityDataFromBuffer(dataAt, bytesLeftToRead, args);
|
||||
addEntityItem(entityItem); // add this new entity to this elements entities
|
||||
entityItemID = entityItem->getEntityItemID();
|
||||
_myTree->setContainingElement(entityItemID, getThisPointer());
|
||||
_myTree->postAddEntity(entityItem);
|
||||
if (entityItem->getCreated() == UNKNOWN_CREATED_TIME) {
|
||||
entityItem->recordCreationTime();
|
||||
|
||||
// don't add if we've recently deleted....
|
||||
if (!_myTree->isDeletedEntity(entityItem->getID())) {
|
||||
addEntityItem(entityItem); // add this new entity to this elements entities
|
||||
entityItemID = entityItem->getEntityItemID();
|
||||
_myTree->setContainingElement(entityItemID, getThisPointer());
|
||||
_myTree->postAddEntity(entityItem);
|
||||
if (entityItem->getCreated() == UNKNOWN_CREATED_TIME) {
|
||||
entityItem->recordCreationTime();
|
||||
}
|
||||
} else {
|
||||
qDebug() << "Recieved packet for previously deleted entity [" <<
|
||||
entityItem->getID() << "] ignoring. (inside " << __FUNCTION__ << ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,8 @@ _pointsChanged(true),
|
|||
_points(QVector<glm::vec3>(0.0f)),
|
||||
_vertices(QVector<glm::vec3>(0.0f)),
|
||||
_normals(QVector<glm::vec3>(0.0f)),
|
||||
_strokeWidths(QVector<float>(0.0f))
|
||||
_strokeWidths(QVector<float>(0.0f)),
|
||||
_textures("")
|
||||
{
|
||||
_type = EntityTypes::PolyLine;
|
||||
_created = properties.getCreated();
|
||||
|
@ -56,6 +57,7 @@ EntityItemProperties PolyLineEntityItem::getProperties(EntityPropertyFlags desir
|
|||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(linePoints, getLinePoints);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(normals, getNormals);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(strokeWidths, getStrokeWidths);
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(textures, getTextures);
|
||||
|
||||
properties._glowLevel = getGlowLevel();
|
||||
properties._glowLevelChanged = false;
|
||||
|
@ -72,6 +74,7 @@ bool PolyLineEntityItem::setProperties(const EntityItemProperties& properties) {
|
|||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(linePoints, setLinePoints);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(normals, setNormals);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(strokeWidths, setStrokeWidths);
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(textures, setTextures);
|
||||
|
||||
if (somethingChanged) {
|
||||
bool wantDebug = false;
|
||||
|
@ -196,6 +199,7 @@ int PolyLineEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* da
|
|||
READ_ENTITY_PROPERTY(PROP_LINE_POINTS, QVector<glm::vec3>, setLinePoints);
|
||||
READ_ENTITY_PROPERTY(PROP_NORMALS, QVector<glm::vec3>, setNormals);
|
||||
READ_ENTITY_PROPERTY(PROP_STROKE_WIDTHS, QVector<float>, setStrokeWidths);
|
||||
READ_ENTITY_PROPERTY(PROP_TEXTURES, QString, setTextures);
|
||||
|
||||
return bytesRead;
|
||||
}
|
||||
|
@ -209,6 +213,7 @@ EntityPropertyFlags PolyLineEntityItem::getEntityProperties(EncodeBitstreamParam
|
|||
requestedProperties += PROP_LINE_POINTS;
|
||||
requestedProperties += PROP_NORMALS;
|
||||
requestedProperties += PROP_STROKE_WIDTHS;
|
||||
requestedProperties += PROP_TEXTURES;
|
||||
return requestedProperties;
|
||||
}
|
||||
|
||||
|
@ -228,6 +233,7 @@ void PolyLineEntityItem::appendSubclassData(OctreePacketData* packetData, Encode
|
|||
APPEND_ENTITY_PROPERTY(PROP_LINE_POINTS, getLinePoints());
|
||||
APPEND_ENTITY_PROPERTY(PROP_NORMALS, getNormals());
|
||||
APPEND_ENTITY_PROPERTY(PROP_STROKE_WIDTHS, getStrokeWidths());
|
||||
APPEND_ENTITY_PROPERTY(PROP_TEXTURES, getTextures());
|
||||
}
|
||||
|
||||
void PolyLineEntityItem::debugDump() const {
|
||||
|
|
|
@ -67,7 +67,14 @@ class PolyLineEntityItem : public EntityItem {
|
|||
|
||||
bool setStrokeWidths(const QVector<float>& strokeWidths);
|
||||
const QVector<float>& getStrokeWidths() const{ return _strokeWidths; }
|
||||
|
||||
|
||||
const QString& getTextures() const { return _textures; }
|
||||
void setTextures(const QString& textures) {
|
||||
if (_textures != textures) {
|
||||
_textures = textures;
|
||||
_texturesChangedFlag = true;
|
||||
}
|
||||
}
|
||||
|
||||
virtual ShapeType getShapeType() const { return SHAPE_TYPE_LINE; }
|
||||
|
||||
|
@ -90,6 +97,8 @@ class PolyLineEntityItem : public EntityItem {
|
|||
QVector<glm::vec3> _vertices;
|
||||
QVector<glm::vec3> _normals;
|
||||
QVector<float> _strokeWidths;
|
||||
QString _textures;
|
||||
bool _texturesChangedFlag { false };
|
||||
mutable QReadWriteLock _quadReadWriteLock;
|
||||
};
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include <QtCore/QBuffer>
|
||||
#include <QtCore/QStandardPaths>
|
||||
#include <QtCore/QThread>
|
||||
#include <QtScript/QScriptEngine>
|
||||
#include <QtNetwork/QNetworkDiskCache>
|
||||
|
||||
#include "AssetRequest.h"
|
||||
|
@ -376,3 +377,70 @@ void AssetClient::handleNodeKilled(SharedNodePointer node) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AssetScriptingInterface::uploadData(QString data, QString extension, QScriptValue callback) {
|
||||
QByteArray dataByteArray = data.toUtf8();
|
||||
auto upload = DependencyManager::get<AssetClient>()->createUpload(dataByteArray, extension);
|
||||
if (!upload) {
|
||||
qCWarning(asset_client) << "Error uploading file to asset server";
|
||||
return;
|
||||
}
|
||||
|
||||
QObject::connect(upload, &AssetUpload::finished, this, [this, callback, extension](AssetUpload* upload, const QString& hash) mutable {
|
||||
if (callback.isFunction()) {
|
||||
QString url = "atp://" + hash + "." + extension;
|
||||
QScriptValueList args { url };
|
||||
callback.call(_engine->currentContext()->thisObject(), args);
|
||||
}
|
||||
});
|
||||
upload->start();
|
||||
}
|
||||
|
||||
AssetScriptingInterface::AssetScriptingInterface(QScriptEngine* engine) :
|
||||
_engine(engine)
|
||||
{
|
||||
}
|
||||
|
||||
void AssetScriptingInterface::downloadData(QString urlString, QScriptValue callback) {
|
||||
const QString ATP_SCHEME { "atp://" };
|
||||
|
||||
if (!urlString.startsWith(ATP_SCHEME)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Make request to atp
|
||||
auto path = urlString.right(urlString.length() - ATP_SCHEME.length());
|
||||
auto parts = path.split(".", QString::SkipEmptyParts);
|
||||
auto hash = parts.length() > 0 ? parts[0] : "";
|
||||
auto extension = parts.length() > 1 ? parts[1] : "";
|
||||
|
||||
if (hash.length() != SHA256_HASH_HEX_LENGTH) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto assetClient = DependencyManager::get<AssetClient>();
|
||||
auto assetRequest = assetClient->createRequest(hash, extension);
|
||||
|
||||
if (!assetRequest) {
|
||||
return;
|
||||
}
|
||||
|
||||
_pendingRequests << assetRequest;
|
||||
|
||||
connect(assetRequest, &AssetRequest::finished, this, [this, callback](AssetRequest* request) mutable {
|
||||
Q_ASSERT(request->getState() == AssetRequest::Finished);
|
||||
|
||||
if (request->getError() == AssetRequest::Error::NoError) {
|
||||
if (callback.isFunction()) {
|
||||
QString data = QString::fromUtf8(request->getData());
|
||||
QScriptValueList args { data };
|
||||
callback.call(_engine->currentContext()->thisObject(), args);
|
||||
}
|
||||
}
|
||||
|
||||
request->deleteLater();
|
||||
_pendingRequests.remove(request);
|
||||
});
|
||||
|
||||
assetRequest->start();
|
||||
}
|
|
@ -14,6 +14,7 @@
|
|||
#define hifi_AssetClient_h
|
||||
|
||||
#include <QString>
|
||||
#include <QScriptValue>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
|
@ -22,6 +23,7 @@
|
|||
#include "NLPacket.h"
|
||||
#include "Node.h"
|
||||
#include "ReceivedMessage.h"
|
||||
#include "ResourceCache.h"
|
||||
|
||||
class AssetRequest;
|
||||
class AssetUpload;
|
||||
|
@ -75,4 +77,18 @@ private:
|
|||
friend class AssetUpload;
|
||||
};
|
||||
|
||||
|
||||
class AssetScriptingInterface : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
AssetScriptingInterface(QScriptEngine* engine);
|
||||
|
||||
Q_INVOKABLE void uploadData(QString data, QString extension, QScriptValue callback);
|
||||
Q_INVOKABLE void downloadData(QString url, QScriptValue downloadComplete);
|
||||
protected:
|
||||
QSet<AssetRequest*> _pendingRequests;
|
||||
QScriptEngine* _engine;
|
||||
};
|
||||
|
||||
|
||||
#endif
|
||||
|
|
|
@ -36,46 +36,64 @@ void MessagesClient::init() {
|
|||
}
|
||||
}
|
||||
|
||||
void MessagesClient::handleMessagesPacket(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
|
||||
QByteArray packetData = packetList->getMessage();
|
||||
QBuffer packet{ &packetData };
|
||||
packet.open(QIODevice::ReadOnly);
|
||||
|
||||
void MessagesClient::decodeMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, QString& channel, QString& message, QUuid& senderID) {
|
||||
quint16 channelLength;
|
||||
packet.read(reinterpret_cast<char*>(&channelLength), sizeof(channelLength));
|
||||
auto channelData = packet.read(channelLength);
|
||||
QString channel = QString::fromUtf8(channelData);
|
||||
receivedMessage->readPrimitive(&channelLength);
|
||||
auto channelData = receivedMessage->read(channelLength);
|
||||
channel = QString::fromUtf8(channelData);
|
||||
|
||||
quint16 messageLength;
|
||||
packet.read(reinterpret_cast<char*>(&messageLength), sizeof(messageLength));
|
||||
auto messageData = packet.read(messageLength);
|
||||
QString message = QString::fromUtf8(messageData);
|
||||
receivedMessage->readPrimitive(&messageLength);
|
||||
auto messageData = receivedMessage->read(messageLength);
|
||||
message = QString::fromUtf8(messageData);
|
||||
|
||||
emit messageReceived(channel, message, senderNode->getUUID());
|
||||
QByteArray bytesSenderID = receivedMessage->read(NUM_BYTES_RFC4122_UUID);
|
||||
if (bytesSenderID.length() == NUM_BYTES_RFC4122_UUID) {
|
||||
senderID = QUuid::fromRfc4122(bytesSenderID);
|
||||
} else {
|
||||
QUuid emptyUUID;
|
||||
senderID = emptyUUID; // packet was missing UUID use default instead
|
||||
}
|
||||
}
|
||||
|
||||
void MessagesClient::sendMessage(const QString& channel, const QString& message) {
|
||||
std::unique_ptr<NLPacketList> MessagesClient::encodeMessagesPacket(QString channel, QString message, QUuid senderID) {
|
||||
auto packetList = NLPacketList::create(PacketType::MessagesData, QByteArray(), true, true);
|
||||
|
||||
auto channelUtf8 = channel.toUtf8();
|
||||
quint16 channelLength = channelUtf8.length();
|
||||
packetList->writePrimitive(channelLength);
|
||||
packetList->write(channelUtf8);
|
||||
|
||||
auto messageUtf8 = message.toUtf8();
|
||||
quint16 messageLength = messageUtf8.length();
|
||||
packetList->writePrimitive(messageLength);
|
||||
packetList->write(messageUtf8);
|
||||
|
||||
packetList->write(senderID.toRfc4122());
|
||||
|
||||
return packetList;
|
||||
}
|
||||
|
||||
|
||||
void MessagesClient::handleMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode) {
|
||||
QString channel, message;
|
||||
QUuid senderID;
|
||||
decodeMessagesPacket(receivedMessage, channel, message, senderID);
|
||||
emit messageReceived(channel, message, senderID);
|
||||
}
|
||||
|
||||
void MessagesClient::sendMessage(QString channel, QString message) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
SharedNodePointer messagesMixer = nodeList->soloNodeOfType(NodeType::MessagesMixer);
|
||||
|
||||
if (messagesMixer) {
|
||||
auto packetList = NLPacketList::create(PacketType::MessagesData, QByteArray(), true, true);
|
||||
|
||||
auto channelUtf8 = channel.toUtf8();
|
||||
quint16 channelLength = channelUtf8.length();
|
||||
packetList->writePrimitive(channelLength);
|
||||
packetList->write(channelUtf8);
|
||||
|
||||
auto messageUtf8 = message.toUtf8();
|
||||
quint16 messageLength = messageUtf8.length();
|
||||
packetList->writePrimitive(messageLength);
|
||||
packetList->write(messageUtf8);
|
||||
|
||||
QUuid senderID = nodeList->getSessionUUID();
|
||||
auto packetList = encodeMessagesPacket(channel, message, senderID);
|
||||
nodeList->sendPacketList(std::move(packetList), *messagesMixer);
|
||||
}
|
||||
}
|
||||
|
||||
void MessagesClient::subscribe(const QString& channel) {
|
||||
void MessagesClient::subscribe(QString channel) {
|
||||
_subscribedChannels << channel;
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
SharedNodePointer messagesMixer = nodeList->soloNodeOfType(NodeType::MessagesMixer);
|
||||
|
@ -87,7 +105,7 @@ void MessagesClient::subscribe(const QString& channel) {
|
|||
}
|
||||
}
|
||||
|
||||
void MessagesClient::unsubscribe(const QString& channel) {
|
||||
void MessagesClient::unsubscribe(QString channel) {
|
||||
_subscribedChannels.remove(channel);
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
SharedNodePointer messagesMixer = nodeList->soloNodeOfType(NodeType::MessagesMixer);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "LimitedNodeList.h"
|
||||
#include "NLPacket.h"
|
||||
#include "Node.h"
|
||||
#include "ReceivedMessage.h"
|
||||
|
||||
class MessagesClient : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
|
@ -28,15 +29,19 @@ public:
|
|||
|
||||
Q_INVOKABLE void init();
|
||||
|
||||
Q_INVOKABLE void sendMessage(const QString& channel, const QString& message);
|
||||
Q_INVOKABLE void subscribe(const QString& channel);
|
||||
Q_INVOKABLE void unsubscribe(const QString& channel);
|
||||
Q_INVOKABLE void sendMessage(QString channel, QString message);
|
||||
Q_INVOKABLE void subscribe(QString channel);
|
||||
Q_INVOKABLE void unsubscribe(QString channel);
|
||||
|
||||
static void decodeMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, QString& channel, QString& message, QUuid& senderID);
|
||||
static std::unique_ptr<NLPacketList> encodeMessagesPacket(QString channel, QString message, QUuid senderID);
|
||||
|
||||
|
||||
signals:
|
||||
void messageReceived(const QString& channel, const QString& message, const QUuid& senderUUID);
|
||||
void messageReceived(QString channel, QString message, QUuid senderUUID);
|
||||
|
||||
private slots:
|
||||
void handleMessagesPacket(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode);
|
||||
void handleMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode);
|
||||
void handleNodeActivated(SharedNodePointer node);
|
||||
|
||||
protected:
|
||||
|
|
|
@ -52,7 +52,6 @@ Node::Node(const QUuid& uuid, NodeType_t type, const HifiSockAddr& publicSocket,
|
|||
NetworkPeer(uuid, publicSocket, localSocket, parent),
|
||||
_type(type),
|
||||
_connectionSecret(connectionSecret),
|
||||
_linkedData(NULL),
|
||||
_isAlive(true),
|
||||
_pingMs(-1), // "Uninitialized"
|
||||
_clockSkewUsec(0),
|
||||
|
@ -65,10 +64,6 @@ Node::Node(const QUuid& uuid, NodeType_t type, const HifiSockAddr& publicSocket,
|
|||
setType(_type);
|
||||
}
|
||||
|
||||
Node::~Node() {
|
||||
delete _linkedData;
|
||||
}
|
||||
|
||||
void Node::setType(char type) {
|
||||
_type = type;
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#ifndef hifi_Node_h
|
||||
#define hifi_Node_h
|
||||
|
||||
#include <memory>
|
||||
#include <ostream>
|
||||
#include <stdint.h>
|
||||
|
||||
|
@ -34,7 +35,6 @@ public:
|
|||
const HifiSockAddr& publicSocket, const HifiSockAddr& localSocket,
|
||||
bool canAdjustLocks, bool canRez, const QUuid& connectionSecret = QUuid(),
|
||||
QObject* parent = 0);
|
||||
~Node();
|
||||
|
||||
bool operator==(const Node& otherNode) const { return _uuid == otherNode._uuid; }
|
||||
bool operator!=(const Node& otherNode) const { return !(*this == otherNode); }
|
||||
|
@ -45,8 +45,8 @@ public:
|
|||
const QUuid& getConnectionSecret() const { return _connectionSecret; }
|
||||
void setConnectionSecret(const QUuid& connectionSecret) { _connectionSecret = connectionSecret; }
|
||||
|
||||
NodeData* getLinkedData() const { return _linkedData; }
|
||||
void setLinkedData(NodeData* linkedData) { _linkedData = linkedData; }
|
||||
NodeData* getLinkedData() const { return _linkedData.get(); }
|
||||
void setLinkedData(std::unique_ptr<NodeData> linkedData) { _linkedData = std::move(linkedData); }
|
||||
|
||||
bool isAlive() const { return _isAlive; }
|
||||
void setAlive(bool isAlive) { _isAlive = isAlive; }
|
||||
|
@ -75,7 +75,7 @@ private:
|
|||
NodeType_t _type;
|
||||
|
||||
QUuid _connectionSecret;
|
||||
NodeData* _linkedData;
|
||||
std::unique_ptr<NodeData> _linkedData;
|
||||
bool _isAlive;
|
||||
int _pingMs;
|
||||
int _clockSkewUsec;
|
||||
|
|
|
@ -41,7 +41,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
|||
case PacketType::EntityAdd:
|
||||
case PacketType::EntityEdit:
|
||||
case PacketType::EntityData:
|
||||
return VERSION_ENTITIES_PARTICLES_ADDITIVE_BLENDING;
|
||||
return VERSION_ENTITIES_POLYLINE_TEXTURE;
|
||||
case PacketType::AvatarData:
|
||||
case PacketType::BulkAvatarData:
|
||||
default:
|
||||
|
|
|
@ -159,5 +159,6 @@ const PacketVersion VERSION_ENTITIES_ANIMATION_PROPERTIES_GROUP = 46;
|
|||
const PacketVersion VERSION_ENTITIES_KEYLIGHT_PROPERTIES_GROUP = 47;
|
||||
const PacketVersion VERSION_ENTITIES_KEYLIGHT_PROPERTIES_GROUP_BIS = 48;
|
||||
const PacketVersion VERSION_ENTITIES_PARTICLES_ADDITIVE_BLENDING = 49;
|
||||
const PacketVersion VERSION_ENTITIES_POLYLINE_TEXTURE = 50;
|
||||
|
||||
#endif // hifi_PacketHeaders_h
|
||||
|
|
|
@ -1841,6 +1841,7 @@ bool Octree::readFromStream(unsigned long streamLength, QDataStream& inputStream
|
|||
|
||||
|
||||
bool Octree::readSVOFromStream(unsigned long streamLength, QDataStream& inputStream) {
|
||||
qWarning() << "SVO file format depricated. Support for reading SVO files is no longer support and will be removed soon.";
|
||||
|
||||
bool fileOk = false;
|
||||
|
||||
|
@ -2062,6 +2063,8 @@ void Octree::writeToJSONFile(const char* fileName, OctreeElementPointer element,
|
|||
}
|
||||
|
||||
void Octree::writeToSVOFile(const char* fileName, OctreeElementPointer element) {
|
||||
qWarning() << "SVO file format depricated. Support for reading SVO files is no longer support and will be removed soon.";
|
||||
|
||||
std::ofstream file(fileName, std::ios::out|std::ios::binary);
|
||||
|
||||
if(file.is_open()) {
|
||||
|
|
|
@ -246,6 +246,18 @@ void ObjectAction::activateBody() {
|
|||
}
|
||||
}
|
||||
|
||||
void ObjectAction::forceBodyNonStatic() {
|
||||
auto ownerEntity = _ownerEntity.lock();
|
||||
if (!ownerEntity) {
|
||||
return;
|
||||
}
|
||||
void* physicsInfo = ownerEntity->getPhysicsInfo();
|
||||
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
|
||||
if (motionState && motionState->getMotionType() == MOTION_TYPE_STATIC) {
|
||||
ownerEntity->flagForMotionStateChange();
|
||||
}
|
||||
}
|
||||
|
||||
bool ObjectAction::lifetimeIsOver() {
|
||||
if (_expires == 0) {
|
||||
return false;
|
||||
|
|
|
@ -63,6 +63,7 @@ protected:
|
|||
virtual glm::vec3 getAngularVelocity();
|
||||
virtual void setAngularVelocity(glm::vec3 angularVelocity);
|
||||
virtual void activateBody();
|
||||
virtual void forceBodyNonStatic();
|
||||
|
||||
EntityItemWeakPointer _ownerEntity;
|
||||
QString _tag;
|
||||
|
|
|
@ -4,6 +4,6 @@ set(TARGET_NAME recording)
|
|||
setup_hifi_library(Script)
|
||||
|
||||
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
|
||||
link_hifi_libraries(shared)
|
||||
link_hifi_libraries(shared networking)
|
||||
|
||||
GroupSources("src/recording")
|
||||
|
|
|
@ -13,6 +13,11 @@
|
|||
#include "impl/FileClip.h"
|
||||
#include "impl/BufferClip.h"
|
||||
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QtCore/QBuffer>
|
||||
#include <QtCore/QDebug>
|
||||
|
||||
using namespace recording;
|
||||
|
||||
Clip::Pointer Clip::fromFile(const QString& filePath) {
|
||||
|
@ -27,6 +32,15 @@ void Clip::toFile(const QString& filePath, const Clip::ConstPointer& clip) {
|
|||
FileClip::write(filePath, clip->duplicate());
|
||||
}
|
||||
|
||||
QByteArray Clip::toBuffer(const Clip::ConstPointer& clip) {
|
||||
QBuffer buffer;
|
||||
if (buffer.open(QFile::Truncate | QFile::WriteOnly)) {
|
||||
clip->duplicate()->write(buffer);
|
||||
buffer.close();
|
||||
}
|
||||
return buffer.data();
|
||||
}
|
||||
|
||||
Clip::Pointer Clip::newClip() {
|
||||
return std::make_shared<BufferClip>();
|
||||
}
|
||||
|
@ -37,4 +51,70 @@ void Clip::seek(float offset) {
|
|||
|
||||
float Clip::position() const {
|
||||
return Frame::frameTimeToSeconds(positionFrameTime());
|
||||
};
|
||||
}
|
||||
|
||||
// FIXME move to frame?
|
||||
bool writeFrame(QIODevice& output, const Frame& frame, bool compressed = true) {
|
||||
if (frame.type == Frame::TYPE_INVALID) {
|
||||
qWarning() << "Attempting to write invalid frame";
|
||||
return true;
|
||||
}
|
||||
|
||||
auto written = output.write((char*)&(frame.type), sizeof(FrameType));
|
||||
if (written != sizeof(FrameType)) {
|
||||
return false;
|
||||
}
|
||||
//qDebug() << "Writing frame with time offset " << frame.timeOffset;
|
||||
written = output.write((char*)&(frame.timeOffset), sizeof(Frame::Time));
|
||||
if (written != sizeof(Frame::Time)) {
|
||||
return false;
|
||||
}
|
||||
QByteArray frameData = frame.data;
|
||||
if (compressed) {
|
||||
frameData = qCompress(frameData);
|
||||
}
|
||||
|
||||
uint16_t dataSize = frameData.size();
|
||||
written = output.write((char*)&dataSize, sizeof(FrameSize));
|
||||
if (written != sizeof(uint16_t)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (dataSize != 0) {
|
||||
written = output.write(frameData);
|
||||
if (written != dataSize) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const QString Clip::FRAME_TYPE_MAP = QStringLiteral("frameTypes");
|
||||
const QString Clip::FRAME_COMREPSSION_FLAG = QStringLiteral("compressed");
|
||||
|
||||
bool Clip::write(QIODevice& output) {
|
||||
auto frameTypes = Frame::getFrameTypes();
|
||||
QJsonObject frameTypeObj;
|
||||
for (const auto& frameTypeName : frameTypes.keys()) {
|
||||
frameTypeObj[frameTypeName] = frameTypes[frameTypeName];
|
||||
}
|
||||
|
||||
QJsonObject rootObject;
|
||||
rootObject.insert(FRAME_TYPE_MAP, frameTypeObj);
|
||||
// Always mark new files as compressed
|
||||
rootObject.insert(FRAME_COMREPSSION_FLAG, true);
|
||||
QByteArray headerFrameData = QJsonDocument(rootObject).toBinaryData();
|
||||
// Never compress the header frame
|
||||
if (!writeFrame(output, Frame({ Frame::TYPE_HEADER, 0, headerFrameData }), false)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
seek(0);
|
||||
|
||||
for (auto frame = nextFrame(); frame; frame = nextFrame()) {
|
||||
if (!writeFrame(output, *frame)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -47,10 +47,16 @@ public:
|
|||
virtual void skipFrame() = 0;
|
||||
virtual void addFrame(FrameConstPointer) = 0;
|
||||
|
||||
bool write(QIODevice& output);
|
||||
|
||||
static Pointer fromFile(const QString& filePath);
|
||||
static void toFile(const QString& filePath, const ConstPointer& clip);
|
||||
static QByteArray toBuffer(const ConstPointer& clip);
|
||||
static Pointer newClip();
|
||||
|
||||
static const QString FRAME_TYPE_MAP;
|
||||
static const QString FRAME_COMREPSSION_FLAG;
|
||||
|
||||
protected:
|
||||
friend class WrapperClip;
|
||||
using Mutex = std::recursive_mutex;
|
||||
|
|
40
libraries/recording/src/recording/ClipCache.cpp
Normal file
40
libraries/recording/src/recording/ClipCache.cpp
Normal file
|
@ -0,0 +1,40 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/11/19
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "ClipCache.h"
|
||||
#include "impl/PointerClip.h"
|
||||
|
||||
using namespace recording;
|
||||
NetworkClipLoader::NetworkClipLoader(const QUrl& url, bool delayLoad)
|
||||
: Resource(url, delayLoad), _clip(std::make_shared<NetworkClip>(url))
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
|
||||
void NetworkClip::init(const QByteArray& clipData) {
|
||||
_clipData = clipData;
|
||||
PointerClip::init((uchar*)_clipData.data(), _clipData.size());
|
||||
}
|
||||
|
||||
void NetworkClipLoader::downloadFinished(const QByteArray& data) {
|
||||
_clip->init(data);
|
||||
}
|
||||
|
||||
ClipCache& ClipCache::instance() {
|
||||
static ClipCache _instance;
|
||||
return _instance;
|
||||
}
|
||||
|
||||
NetworkClipLoaderPointer ClipCache::getClipLoader(const QUrl& url) {
|
||||
return ResourceCache::getResource(url, QUrl(), false, nullptr).staticCast<NetworkClipLoader>();
|
||||
}
|
||||
|
||||
QSharedPointer<Resource> ClipCache::createResource(const QUrl& url, const QSharedPointer<Resource>& fallback, bool delayLoad, const void* extra) {
|
||||
return QSharedPointer<Resource>(new NetworkClipLoader(url, delayLoad), &Resource::allReferencesCleared);
|
||||
}
|
||||
|
57
libraries/recording/src/recording/ClipCache.h
Normal file
57
libraries/recording/src/recording/ClipCache.h
Normal file
|
@ -0,0 +1,57 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/11/19
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
#ifndef hifi_Recording_ClipCache_h
|
||||
#define hifi_Recording_ClipCache_h
|
||||
|
||||
#include <ResourceCache.h>
|
||||
|
||||
#include "Forward.h"
|
||||
#include "impl/PointerClip.h"
|
||||
|
||||
namespace recording {
|
||||
|
||||
class NetworkClip : public PointerClip {
|
||||
public:
|
||||
using Pointer = std::shared_ptr<NetworkClip>;
|
||||
|
||||
NetworkClip(const QUrl& url) : _url(url) {}
|
||||
virtual void init(const QByteArray& clipData);
|
||||
virtual QString getName() const override { return _url.toString(); }
|
||||
|
||||
private:
|
||||
QByteArray _clipData;
|
||||
QUrl _url;
|
||||
};
|
||||
|
||||
class NetworkClipLoader : public Resource {
|
||||
public:
|
||||
NetworkClipLoader(const QUrl& url, bool delayLoad);
|
||||
virtual void downloadFinished(const QByteArray& data) override;
|
||||
ClipPointer getClip() { return _clip; }
|
||||
bool completed() { return _failedToLoad || isLoaded(); }
|
||||
|
||||
private:
|
||||
const NetworkClip::Pointer _clip;
|
||||
};
|
||||
|
||||
using NetworkClipLoaderPointer = QSharedPointer<NetworkClipLoader>;
|
||||
|
||||
class ClipCache : public ResourceCache {
|
||||
public:
|
||||
static ClipCache& instance();
|
||||
|
||||
NetworkClipLoaderPointer getClipLoader(const QUrl& url);
|
||||
|
||||
protected:
|
||||
virtual QSharedPointer<Resource> createResource(const QUrl& url, const QSharedPointer<Resource>& fallback, bool delayLoad, const void* extra) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
|
@ -11,8 +11,6 @@
|
|||
#include <algorithm>
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonObject>
|
||||
|
||||
#include <Finally.h>
|
||||
|
||||
|
@ -23,63 +21,6 @@
|
|||
|
||||
using namespace recording;
|
||||
|
||||
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(Frame::Time) + sizeof(FrameSize);
|
||||
static const QString FRAME_TYPE_MAP = QStringLiteral("frameTypes");
|
||||
static const QString FRAME_COMREPSSION_FLAG = QStringLiteral("compressed");
|
||||
|
||||
using FrameTranslationMap = QMap<FrameType, FrameType>;
|
||||
|
||||
FrameTranslationMap parseTranslationMap(const QJsonDocument& doc) {
|
||||
FrameTranslationMap results;
|
||||
auto headerObj = doc.object();
|
||||
if (headerObj.contains(FRAME_TYPE_MAP)) {
|
||||
auto frameTypeObj = headerObj[FRAME_TYPE_MAP].toObject();
|
||||
auto currentFrameTypes = Frame::getFrameTypes();
|
||||
for (auto frameTypeName : frameTypeObj.keys()) {
|
||||
qDebug() << frameTypeName;
|
||||
if (!currentFrameTypes.contains(frameTypeName)) {
|
||||
continue;
|
||||
}
|
||||
FrameType currentTypeEnum = currentFrameTypes[frameTypeName];
|
||||
FrameType storedTypeEnum = static_cast<FrameType>(frameTypeObj[frameTypeName].toInt());
|
||||
results[storedTypeEnum] = currentTypeEnum;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
FileFrameHeaderList parseFrameHeaders(uchar* const start, const qint64& size) {
|
||||
FileFrameHeaderList results;
|
||||
auto current = start;
|
||||
auto end = current + size;
|
||||
// Read all the frame headers
|
||||
// FIXME move to Frame::readHeader?
|
||||
while (end - current >= MINIMUM_FRAME_SIZE) {
|
||||
FileFrameHeader header;
|
||||
memcpy(&(header.type), current, sizeof(FrameType));
|
||||
current += sizeof(FrameType);
|
||||
memcpy(&(header.timeOffset), current, sizeof(Frame::Time));
|
||||
current += sizeof(Frame::Time);
|
||||
memcpy(&(header.size), current, sizeof(FrameSize));
|
||||
current += sizeof(FrameSize);
|
||||
header.fileOffset = current - start;
|
||||
if (end - current < header.size) {
|
||||
current = end;
|
||||
break;
|
||||
}
|
||||
current += header.size;
|
||||
results.push_back(header);
|
||||
}
|
||||
qDebug() << "Parsed source data into " << results.size() << " frames";
|
||||
// int i = 0;
|
||||
// for (const auto& frameHeader : results) {
|
||||
// qDebug() << "Frame " << i++ << " time " << frameHeader.timeOffset << " Type " << frameHeader.type;
|
||||
// }
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
FileClip::FileClip(const QString& fileName) : _file(fileName) {
|
||||
auto size = _file.size();
|
||||
qDebug() << "Opening file of size: " << size;
|
||||
|
@ -88,58 +29,8 @@ FileClip::FileClip(const QString& fileName) : _file(fileName) {
|
|||
qCWarning(recordingLog) << "Unable to open file " << fileName;
|
||||
return;
|
||||
}
|
||||
_map = _file.map(0, size, QFile::MapPrivateOption);
|
||||
if (!_map) {
|
||||
qCWarning(recordingLog) << "Unable to map file " << fileName;
|
||||
return;
|
||||
}
|
||||
|
||||
auto parsedFrameHeaders = parseFrameHeaders(_map, size);
|
||||
|
||||
// Verify that at least one frame exists and that the first frame is a header
|
||||
if (0 == parsedFrameHeaders.size()) {
|
||||
qWarning() << "No frames found, invalid file";
|
||||
return;
|
||||
}
|
||||
|
||||
// Grab the file header
|
||||
{
|
||||
auto fileHeaderFrameHeader = *parsedFrameHeaders.begin();
|
||||
parsedFrameHeaders.pop_front();
|
||||
if (fileHeaderFrameHeader.type != Frame::TYPE_HEADER) {
|
||||
qWarning() << "Missing header frame, invalid file";
|
||||
return;
|
||||
}
|
||||
|
||||
QByteArray fileHeaderData((char*)_map + fileHeaderFrameHeader.fileOffset, fileHeaderFrameHeader.size);
|
||||
_fileHeader = QJsonDocument::fromBinaryData(fileHeaderData);
|
||||
}
|
||||
|
||||
// Check for compression
|
||||
{
|
||||
_compressed = _fileHeader.object()[FRAME_COMREPSSION_FLAG].toBool();
|
||||
}
|
||||
|
||||
// Find the type enum translation map and fix up the frame headers
|
||||
{
|
||||
FrameTranslationMap translationMap = parseTranslationMap(_fileHeader);
|
||||
if (translationMap.empty()) {
|
||||
qWarning() << "Header missing frame type map, invalid file";
|
||||
return;
|
||||
}
|
||||
qDebug() << translationMap;
|
||||
|
||||
// Update the loaded headers with the frame data
|
||||
_frames.reserve(parsedFrameHeaders.size());
|
||||
for (auto& frameHeader : parsedFrameHeaders) {
|
||||
if (!translationMap.contains(frameHeader.type)) {
|
||||
continue;
|
||||
}
|
||||
frameHeader.type = translationMap[frameHeader.type];
|
||||
_frames.push_back(frameHeader);
|
||||
}
|
||||
}
|
||||
|
||||
auto mappedFile = _file.map(0, size, QFile::MapPrivateOption);
|
||||
init(mappedFile, size);
|
||||
}
|
||||
|
||||
|
||||
|
@ -147,41 +38,7 @@ QString FileClip::getName() const {
|
|||
return _file.fileName();
|
||||
}
|
||||
|
||||
// FIXME move to frame?
|
||||
bool writeFrame(QIODevice& output, const Frame& frame, bool compressed = true) {
|
||||
if (frame.type == Frame::TYPE_INVALID) {
|
||||
qWarning() << "Attempting to write invalid frame";
|
||||
return true;
|
||||
}
|
||||
|
||||
auto written = output.write((char*)&(frame.type), sizeof(FrameType));
|
||||
if (written != sizeof(FrameType)) {
|
||||
return false;
|
||||
}
|
||||
//qDebug() << "Writing frame with time offset " << frame.timeOffset;
|
||||
written = output.write((char*)&(frame.timeOffset), sizeof(Frame::Time));
|
||||
if (written != sizeof(Frame::Time)) {
|
||||
return false;
|
||||
}
|
||||
QByteArray frameData = frame.data;
|
||||
if (compressed) {
|
||||
frameData = qCompress(frameData);
|
||||
}
|
||||
|
||||
uint16_t dataSize = frameData.size();
|
||||
written = output.write((char*)&dataSize, sizeof(FrameSize));
|
||||
if (written != sizeof(uint16_t)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (dataSize != 0) {
|
||||
written = output.write(frameData);
|
||||
if (written != dataSize) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool FileClip::write(const QString& fileName, Clip::Pointer clip) {
|
||||
// FIXME need to move this to a different thread
|
||||
|
@ -197,62 +54,14 @@ bool FileClip::write(const QString& fileName, Clip::Pointer clip) {
|
|||
}
|
||||
|
||||
Finally closer([&] { outputFile.close(); });
|
||||
{
|
||||
auto frameTypes = Frame::getFrameTypes();
|
||||
QJsonObject frameTypeObj;
|
||||
for (const auto& frameTypeName : frameTypes.keys()) {
|
||||
frameTypeObj[frameTypeName] = frameTypes[frameTypeName];
|
||||
}
|
||||
|
||||
QJsonObject rootObject;
|
||||
rootObject.insert(FRAME_TYPE_MAP, frameTypeObj);
|
||||
// Always mark new files as compressed
|
||||
rootObject.insert(FRAME_COMREPSSION_FLAG, true);
|
||||
QByteArray headerFrameData = QJsonDocument(rootObject).toBinaryData();
|
||||
// Never compress the header frame
|
||||
if (!writeFrame(outputFile, Frame({ Frame::TYPE_HEADER, 0, headerFrameData }), false)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
clip->seek(0);
|
||||
for (auto frame = clip->nextFrame(); frame; frame = clip->nextFrame()) {
|
||||
if (!writeFrame(outputFile, *frame)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
outputFile.close();
|
||||
return true;
|
||||
return clip->write(outputFile);
|
||||
}
|
||||
|
||||
FileClip::~FileClip() {
|
||||
Locker lock(_mutex);
|
||||
_file.unmap(_map);
|
||||
_map = nullptr;
|
||||
_file.unmap(_data);
|
||||
if (_file.isOpen()) {
|
||||
_file.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Internal only function, needs no locking
|
||||
FrameConstPointer FileClip::readFrame(size_t frameIndex) const {
|
||||
FramePointer result;
|
||||
if (frameIndex < _frames.size()) {
|
||||
result = std::make_shared<Frame>();
|
||||
const auto& header = _frames[frameIndex];
|
||||
result->type = header.type;
|
||||
result->timeOffset = header.timeOffset;
|
||||
if (header.size) {
|
||||
result->data.insert(0, reinterpret_cast<char*>(_map)+header.fileOffset, header.size);
|
||||
if (_compressed) {
|
||||
result->data = qUncompress(result->data);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void FileClip::addFrame(FrameConstPointer) {
|
||||
throw std::runtime_error("File clips are read only");
|
||||
reset();
|
||||
}
|
||||
|
|
|
@ -10,27 +10,13 @@
|
|||
#ifndef hifi_Recording_Impl_FileClip_h
|
||||
#define hifi_Recording_Impl_FileClip_h
|
||||
|
||||
#include "ArrayClip.h"
|
||||
|
||||
#include <mutex>
|
||||
#include "PointerClip.h"
|
||||
|
||||
#include <QtCore/QFile>
|
||||
#include <QtCore/QJsonDocument>
|
||||
|
||||
#include "../Frame.h"
|
||||
|
||||
namespace recording {
|
||||
|
||||
struct FileFrameHeader : public FrameHeader {
|
||||
FrameType type;
|
||||
Frame::Time timeOffset;
|
||||
uint16_t size;
|
||||
quint64 fileOffset;
|
||||
};
|
||||
|
||||
using FileFrameHeaderList = std::list<FileFrameHeader>;
|
||||
|
||||
class FileClip : public ArrayClip<FileFrameHeader> {
|
||||
class FileClip : public PointerClip {
|
||||
public:
|
||||
using Pointer = std::shared_ptr<FileClip>;
|
||||
|
||||
|
@ -38,20 +24,11 @@ public:
|
|||
virtual ~FileClip();
|
||||
|
||||
virtual QString getName() const override;
|
||||
virtual void addFrame(FrameConstPointer) override;
|
||||
|
||||
const QJsonDocument& getHeader() {
|
||||
return _fileHeader;
|
||||
}
|
||||
|
||||
static bool write(const QString& filePath, Clip::Pointer clip);
|
||||
|
||||
private:
|
||||
virtual FrameConstPointer readFrame(size_t index) const override;
|
||||
QJsonDocument _fileHeader;
|
||||
QFile _file;
|
||||
uchar* _map { nullptr };
|
||||
bool _compressed { true };
|
||||
};
|
||||
|
||||
}
|
||||
|
|
160
libraries/recording/src/recording/impl/PointerClip.cpp
Normal file
160
libraries/recording/src/recording/impl/PointerClip.cpp
Normal file
|
@ -0,0 +1,160 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis 2015/11/04
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "PointerClip.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonObject>
|
||||
|
||||
#include <Finally.h>
|
||||
|
||||
#include "../Frame.h"
|
||||
#include "../Logging.h"
|
||||
#include "BufferClip.h"
|
||||
|
||||
|
||||
using namespace recording;
|
||||
|
||||
using FrameTranslationMap = QMap<FrameType, FrameType>;
|
||||
|
||||
FrameTranslationMap parseTranslationMap(const QJsonDocument& doc) {
|
||||
FrameTranslationMap results;
|
||||
auto headerObj = doc.object();
|
||||
if (headerObj.contains(Clip::FRAME_TYPE_MAP)) {
|
||||
auto frameTypeObj = headerObj[Clip::FRAME_TYPE_MAP].toObject();
|
||||
auto currentFrameTypes = Frame::getFrameTypes();
|
||||
for (auto frameTypeName : frameTypeObj.keys()) {
|
||||
qDebug() << frameTypeName;
|
||||
if (!currentFrameTypes.contains(frameTypeName)) {
|
||||
continue;
|
||||
}
|
||||
FrameType currentTypeEnum = currentFrameTypes[frameTypeName];
|
||||
FrameType storedTypeEnum = static_cast<FrameType>(frameTypeObj[frameTypeName].toInt());
|
||||
results[storedTypeEnum] = currentTypeEnum;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
PointerFrameHeaderList parseFrameHeaders(uchar* const start, const size_t& size) {
|
||||
PointerFrameHeaderList results;
|
||||
auto current = start;
|
||||
auto end = current + size;
|
||||
// Read all the frame headers
|
||||
// FIXME move to Frame::readHeader?
|
||||
while (end - current >= PointerClip::MINIMUM_FRAME_SIZE) {
|
||||
PointerFrameHeader header;
|
||||
memcpy(&(header.type), current, sizeof(FrameType));
|
||||
current += sizeof(FrameType);
|
||||
memcpy(&(header.timeOffset), current, sizeof(Frame::Time));
|
||||
current += sizeof(Frame::Time);
|
||||
memcpy(&(header.size), current, sizeof(FrameSize));
|
||||
current += sizeof(FrameSize);
|
||||
header.fileOffset = current - start;
|
||||
if (end - current < header.size) {
|
||||
current = end;
|
||||
break;
|
||||
}
|
||||
current += header.size;
|
||||
results.push_back(header);
|
||||
}
|
||||
qDebug() << "Parsed source data into " << results.size() << " frames";
|
||||
// int i = 0;
|
||||
// for (const auto& frameHeader : results) {
|
||||
// qDebug() << "Frame " << i++ << " time " << frameHeader.timeOffset << " Type " << frameHeader.type;
|
||||
// }
|
||||
return results;
|
||||
}
|
||||
|
||||
void PointerClip::reset() {
|
||||
_frames.clear();
|
||||
_data = nullptr;
|
||||
_size = 0;
|
||||
_header = QJsonDocument();
|
||||
}
|
||||
|
||||
void PointerClip::init(uchar* data, size_t size) {
|
||||
reset();
|
||||
|
||||
_data = data;
|
||||
_size = size;
|
||||
|
||||
auto parsedFrameHeaders = parseFrameHeaders(data, size);
|
||||
// Verify that at least one frame exists and that the first frame is a header
|
||||
if (0 == parsedFrameHeaders.size()) {
|
||||
qWarning() << "No frames found, invalid file";
|
||||
reset();
|
||||
return;
|
||||
}
|
||||
|
||||
// Grab the file header
|
||||
{
|
||||
auto fileHeaderFrameHeader = *parsedFrameHeaders.begin();
|
||||
parsedFrameHeaders.pop_front();
|
||||
if (fileHeaderFrameHeader.type != Frame::TYPE_HEADER) {
|
||||
qWarning() << "Missing header frame, invalid file";
|
||||
reset();
|
||||
return;
|
||||
}
|
||||
|
||||
QByteArray fileHeaderData((char*)_data + fileHeaderFrameHeader.fileOffset, fileHeaderFrameHeader.size);
|
||||
_header = QJsonDocument::fromBinaryData(fileHeaderData);
|
||||
}
|
||||
|
||||
// Check for compression
|
||||
{
|
||||
_compressed = _header.object()[FRAME_COMREPSSION_FLAG].toBool();
|
||||
}
|
||||
|
||||
// Find the type enum translation map and fix up the frame headers
|
||||
{
|
||||
FrameTranslationMap translationMap = parseTranslationMap(_header);
|
||||
if (translationMap.empty()) {
|
||||
qWarning() << "Header missing frame type map, invalid file";
|
||||
reset();
|
||||
return;
|
||||
}
|
||||
|
||||
// Update the loaded headers with the frame data
|
||||
_frames.reserve(parsedFrameHeaders.size());
|
||||
for (auto& frameHeader : parsedFrameHeaders) {
|
||||
if (!translationMap.contains(frameHeader.type)) {
|
||||
continue;
|
||||
}
|
||||
frameHeader.type = translationMap[frameHeader.type];
|
||||
_frames.push_back(frameHeader);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Internal only function, needs no locking
|
||||
FrameConstPointer PointerClip::readFrame(size_t frameIndex) const {
|
||||
FramePointer result;
|
||||
if (frameIndex < _frames.size()) {
|
||||
result = std::make_shared<Frame>();
|
||||
const auto& header = _frames[frameIndex];
|
||||
result->type = header.type;
|
||||
result->timeOffset = header.timeOffset;
|
||||
if (header.size) {
|
||||
result->data.insert(0, reinterpret_cast<char*>(_data)+header.fileOffset, header.size);
|
||||
if (_compressed) {
|
||||
result->data = qUncompress(result->data);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void PointerClip::addFrame(FrameConstPointer) {
|
||||
throw std::runtime_error("Pointer clips are read only, use duplicate to create a read/write clip");
|
||||
}
|
58
libraries/recording/src/recording/impl/PointerClip.h
Normal file
58
libraries/recording/src/recording/impl/PointerClip.h
Normal file
|
@ -0,0 +1,58 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis 2015/11/05
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#pragma once
|
||||
#ifndef hifi_Recording_Impl_PointerClip_h
|
||||
#define hifi_Recording_Impl_PointerClip_h
|
||||
|
||||
#include "ArrayClip.h"
|
||||
|
||||
#include <mutex>
|
||||
|
||||
#include <QtCore/QJsonDocument>
|
||||
|
||||
#include "../Frame.h"
|
||||
|
||||
namespace recording {
|
||||
|
||||
struct PointerFrameHeader : public FrameHeader {
|
||||
FrameType type;
|
||||
Frame::Time timeOffset;
|
||||
uint16_t size;
|
||||
quint64 fileOffset;
|
||||
};
|
||||
|
||||
using PointerFrameHeaderList = std::list<PointerFrameHeader>;
|
||||
|
||||
class PointerClip : public ArrayClip<PointerFrameHeader> {
|
||||
public:
|
||||
using Pointer = std::shared_ptr<PointerClip>;
|
||||
|
||||
PointerClip() {};
|
||||
PointerClip(uchar* data, size_t size) { init(data, size); }
|
||||
|
||||
void init(uchar* data, size_t size);
|
||||
virtual void addFrame(FrameConstPointer) override;
|
||||
const QJsonDocument& getHeader() const {
|
||||
return _header;
|
||||
}
|
||||
|
||||
// FIXME move to frame?
|
||||
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(Frame::Time) + sizeof(FrameSize);
|
||||
protected:
|
||||
void reset();
|
||||
virtual FrameConstPointer readFrame(size_t index) const override;
|
||||
QJsonDocument _header;
|
||||
uchar* _data { nullptr };
|
||||
size_t _size { 0 };
|
||||
bool _compressed { true };
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
|
@ -8,19 +8,28 @@
|
|||
|
||||
#include "RecordingScriptingInterface.h"
|
||||
|
||||
#include <QThread>
|
||||
#include <QtCore/QThread>
|
||||
|
||||
#include <NumericalConstants.h>
|
||||
#include <Transform.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <recording/Recorder.h>
|
||||
#include <recording/Clip.h>
|
||||
#include <recording/Frame.h>
|
||||
#include <NumericalConstants.h>
|
||||
#include <Transform.h>
|
||||
#include <recording/ClipCache.h>
|
||||
|
||||
|
||||
#include <QtScript/QScriptValue>
|
||||
#include <AssetClient.h>
|
||||
#include <AssetUpload.h>
|
||||
#include <QtCore/QUrl>
|
||||
#include <QtWidgets/QFileDialog>
|
||||
|
||||
#include "ScriptEngineLogging.h"
|
||||
|
||||
using namespace recording;
|
||||
|
||||
static const QString HFR_EXTENSION = "hfr";
|
||||
|
||||
RecordingScriptingInterface::RecordingScriptingInterface() {
|
||||
_player = DependencyManager::get<Deck>();
|
||||
|
@ -43,22 +52,25 @@ float RecordingScriptingInterface::playerLength() const {
|
|||
return _player->length();
|
||||
}
|
||||
|
||||
void RecordingScriptingInterface::loadRecording(const QString& filename) {
|
||||
bool RecordingScriptingInterface::loadRecording(const QString& url) {
|
||||
using namespace recording;
|
||||
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection,
|
||||
Q_ARG(QString, filename));
|
||||
return;
|
||||
auto loader = ClipCache::instance().getClipLoader(url);
|
||||
QEventLoop loop;
|
||||
QObject::connect(loader.data(), &Resource::loaded, &loop, &QEventLoop::quit);
|
||||
QObject::connect(loader.data(), &Resource::failed, &loop, &QEventLoop::quit);
|
||||
loop.exec();
|
||||
|
||||
if (!loader->isLoaded()) {
|
||||
qWarning() << "Clip failed to load from " << url;
|
||||
return false;
|
||||
}
|
||||
|
||||
ClipPointer clip = Clip::fromFile(filename);
|
||||
if (!clip) {
|
||||
qWarning() << "Unable to load clip data from " << filename;
|
||||
}
|
||||
_player->queueClip(clip);
|
||||
_player->queueClip(loader->getClip());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void RecordingScriptingInterface::startPlaying() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection);
|
||||
|
@ -167,6 +179,47 @@ void RecordingScriptingInterface::saveRecording(const QString& filename) {
|
|||
recording::Clip::toFile(filename, _lastClip);
|
||||
}
|
||||
|
||||
bool RecordingScriptingInterface::saveRecordingToAsset(QScriptValue getClipAtpUrl) {
|
||||
if (!getClipAtpUrl.isFunction()) {
|
||||
qCWarning(scriptengine) << "The argument is not a function.";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "saveRecordingToAsset", Qt::BlockingQueuedConnection,
|
||||
Q_ARG(QScriptValue, getClipAtpUrl));
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!_lastClip) {
|
||||
qWarning() << "There is no recording to save";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (auto upload = DependencyManager::get<AssetClient>()->createUpload(recording::Clip::toBuffer(_lastClip), HFR_EXTENSION)) {
|
||||
QObject::connect(upload, &AssetUpload::finished, this, [=](AssetUpload* upload, const QString& hash) mutable {
|
||||
QString clip_atp_url = "";
|
||||
|
||||
if (upload->getError() == AssetUpload::NoError) {
|
||||
|
||||
clip_atp_url = QString("%1:%2.%3").arg(URL_SCHEME_ATP, hash, upload->getExtension());
|
||||
upload->deleteLater();
|
||||
} else {
|
||||
qCWarning(scriptengine) << "Error during the Asset upload.";
|
||||
}
|
||||
|
||||
QScriptValueList args;
|
||||
args << clip_atp_url;
|
||||
getClipAtpUrl.call(QScriptValue(), args);
|
||||
});
|
||||
upload->start();
|
||||
return true;
|
||||
}
|
||||
|
||||
qCWarning(scriptengine) << "Saving on asset failed.";
|
||||
return false;
|
||||
}
|
||||
|
||||
void RecordingScriptingInterface::loadLastRecording() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "loadLastRecording", Qt::BlockingQueuedConnection);
|
||||
|
|
|
@ -12,12 +12,14 @@
|
|||
#include <atomic>
|
||||
#include <mutex>
|
||||
|
||||
#include <QObject>
|
||||
#include <QtCore/QObject>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <recording/Forward.h>
|
||||
#include <recording/Frame.h>
|
||||
|
||||
class QScriptValue;
|
||||
|
||||
class RecordingScriptingInterface : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
|
||||
|
@ -25,7 +27,7 @@ public:
|
|||
RecordingScriptingInterface();
|
||||
|
||||
public slots:
|
||||
void loadRecording(const QString& filename);
|
||||
bool loadRecording(const QString& url);
|
||||
|
||||
void startPlaying();
|
||||
void pausePlayer();
|
||||
|
@ -60,6 +62,7 @@ public slots:
|
|||
float recorderElapsed() const;
|
||||
|
||||
void saveRecording(const QString& filename);
|
||||
bool saveRecordingToAsset(QScriptValue getClipAtpUrl);
|
||||
void loadLastRecording();
|
||||
|
||||
protected:
|
||||
|
|
|
@ -62,7 +62,7 @@ static QScriptValue debugPrint(QScriptContext* context, QScriptEngine* engine){
|
|||
}
|
||||
message += context->argument(i).toString();
|
||||
}
|
||||
qCDebug(scriptengine) << "script:print()<<" << message;
|
||||
qCDebug(scriptengine).noquote() << "script:print()<<" << message; // noquote() so that \n is treated as newline
|
||||
|
||||
message = message.replace("\\", "\\\\")
|
||||
.replace("\n", "\\n")
|
||||
|
@ -124,14 +124,9 @@ static bool hadUncaughtExceptions(QScriptEngine& engine, const QString& fileName
|
|||
|
||||
ScriptEngine::ScriptEngine(const QString& scriptContents, const QString& fileNameString, bool wantSignals) :
|
||||
_scriptContents(scriptContents),
|
||||
_isFinished(false),
|
||||
_isRunning(false),
|
||||
_isInitialized(false),
|
||||
_timerFunctionMap(),
|
||||
_wantSignals(wantSignals),
|
||||
_fileNameString(fileNameString),
|
||||
_isUserLoaded(false),
|
||||
_isReloading(false),
|
||||
_arrayBufferClass(new ArrayBufferClass(this))
|
||||
{
|
||||
_allScriptsMutex.lock();
|
||||
|
@ -140,6 +135,8 @@ ScriptEngine::ScriptEngine(const QString& scriptContents, const QString& fileNam
|
|||
}
|
||||
|
||||
ScriptEngine::~ScriptEngine() {
|
||||
qCDebug(scriptengine) << "Script Engine shutting down (destructor) for script:" << getFilename();
|
||||
|
||||
// If we're not already in the middle of stopping all scripts, then we should remove ourselves
|
||||
// from the list of running scripts. We don't do this if we're in the process of stopping all scripts
|
||||
// because that method removes scripts from its list as it iterates them
|
||||
|
@ -150,11 +147,21 @@ ScriptEngine::~ScriptEngine() {
|
|||
}
|
||||
}
|
||||
|
||||
void ScriptEngine::disconnectNonEssentialSignals() {
|
||||
disconnect();
|
||||
connect(this, &ScriptEngine::doneRunning, thread(), &QThread::quit);
|
||||
}
|
||||
|
||||
void ScriptEngine::runInThread() {
|
||||
_isThreaded = true;
|
||||
QThread* workerThread = new QThread(); // thread is not owned, so we need to manage the delete
|
||||
QString scriptEngineName = QString("Script Thread:") + getFilename();
|
||||
workerThread->setObjectName(scriptEngineName);
|
||||
|
||||
// NOTE: If you connect any essential signals for proper shutdown or cleanup of
|
||||
// the script engine, make sure to add code to "reconnect" them to the
|
||||
// disconnectNonEssentialSignals() method
|
||||
|
||||
// when the worker thread is started, call our engine's run..
|
||||
connect(workerThread, &QThread::started, this, &ScriptEngine::run);
|
||||
|
||||
|
@ -176,12 +183,13 @@ void ScriptEngine::runInThread() {
|
|||
QSet<ScriptEngine*> ScriptEngine::_allKnownScriptEngines;
|
||||
QMutex ScriptEngine::_allScriptsMutex;
|
||||
bool ScriptEngine::_stoppingAllScripts = false;
|
||||
bool ScriptEngine::_doneRunningThisScript = false;
|
||||
|
||||
void ScriptEngine::stopAllScripts(QObject* application) {
|
||||
_allScriptsMutex.lock();
|
||||
_stoppingAllScripts = true;
|
||||
|
||||
qCDebug(scriptengine) << "Stopping all scripts.... currently known scripts:" << _allKnownScriptEngines.size();
|
||||
|
||||
QMutableSetIterator<ScriptEngine*> i(_allKnownScriptEngines);
|
||||
while (i.hasNext()) {
|
||||
ScriptEngine* scriptEngine = i.next();
|
||||
|
@ -219,7 +227,9 @@ void ScriptEngine::stopAllScripts(QObject* application) {
|
|||
// We need to wait for the engine to be done running before we proceed, because we don't
|
||||
// want any of the scripts final "scriptEnding()" or pending "update()" methods from accessing
|
||||
// any application state after we leave this stopAllScripts() method
|
||||
qCDebug(scriptengine) << "waiting on script:" << scriptName;
|
||||
scriptEngine->waitTillDoneRunning();
|
||||
qCDebug(scriptengine) << "done waiting on script:" << scriptName;
|
||||
|
||||
// If the script is stopped, we can remove it from our set
|
||||
i.remove();
|
||||
|
@ -227,21 +237,19 @@ void ScriptEngine::stopAllScripts(QObject* application) {
|
|||
}
|
||||
_stoppingAllScripts = false;
|
||||
_allScriptsMutex.unlock();
|
||||
qCDebug(scriptengine) << "DONE Stopping all scripts....";
|
||||
}
|
||||
|
||||
|
||||
void ScriptEngine::waitTillDoneRunning() {
|
||||
// If the script never started running or finished running before we got here, we don't need to wait for it
|
||||
if (_isRunning) {
|
||||
|
||||
_doneRunningThisScript = false; // NOTE: this is static, we serialize our waiting for scripts to finish
|
||||
if (_isRunning && _isThreaded) {
|
||||
|
||||
// NOTE: waitTillDoneRunning() will be called on the main Application thread, inside of stopAllScripts()
|
||||
// we want the application thread to continue to process events, because the scripts will likely need to
|
||||
// marshall messages across to the main thread. For example if they access Settings or Meny in any of their
|
||||
// shutdown code.
|
||||
while (!_doneRunningThisScript) {
|
||||
|
||||
while (thread()->isRunning()) {
|
||||
// process events for the main application thread, allowing invokeMethod calls to pass between threads
|
||||
QCoreApplication::processEvents();
|
||||
}
|
||||
|
@ -381,6 +389,9 @@ void ScriptEngine::init() {
|
|||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
registerGlobalObject("Recording", recordingInterface.data());
|
||||
|
||||
registerGlobalObject("Assets", &_assetScriptingInterface);
|
||||
|
||||
}
|
||||
|
||||
void ScriptEngine::registerValue(const QString& valueName, QScriptValue value) {
|
||||
|
@ -749,8 +760,6 @@ void ScriptEngine::run() {
|
|||
emit runningStateChanged();
|
||||
emit doneRunning();
|
||||
}
|
||||
|
||||
_doneRunningThisScript = true;
|
||||
}
|
||||
|
||||
// NOTE: This is private because it must be called on the same thread that created the timers, which is why
|
||||
|
@ -1165,7 +1174,7 @@ void ScriptEngine::refreshFileScript(const EntityItemID& entityID) {
|
|||
QString filePath = QUrl(details.scriptText).toLocalFile();
|
||||
auto lastModified = QFileInfo(filePath).lastModified().toMSecsSinceEpoch();
|
||||
if (lastModified > details.lastModified) {
|
||||
qDebug() << "Reloading modified script " << details.scriptText;
|
||||
qCDebug(scriptengine) << "Reloading modified script " << details.scriptText;
|
||||
|
||||
QFile file(filePath);
|
||||
file.open(QIODevice::ReadOnly);
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
|
||||
#include <AnimationCache.h>
|
||||
#include <AnimVariant.h>
|
||||
#include <AssetClient.h>
|
||||
#include <AvatarData.h>
|
||||
#include <AvatarHashMap.h>
|
||||
#include <LimitedNodeList.h>
|
||||
|
@ -127,6 +128,7 @@ public:
|
|||
bool isFinished() const { return _isFinished; } // used by Application and ScriptWidget
|
||||
bool isRunning() const { return _isRunning; } // used by ScriptWidget
|
||||
|
||||
void disconnectNonEssentialSignals();
|
||||
static void stopAllScripts(QObject* application); // used by Application on shutdown
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -164,15 +166,16 @@ signals:
|
|||
protected:
|
||||
QString _scriptContents;
|
||||
QString _parentURL;
|
||||
bool _isFinished;
|
||||
bool _isRunning;
|
||||
int _evaluatesPending = 0;
|
||||
bool _isInitialized;
|
||||
bool _isFinished { false };
|
||||
bool _isRunning { false };
|
||||
int _evaluatesPending { 0 };
|
||||
bool _isInitialized { false };
|
||||
QHash<QTimer*, QScriptValue> _timerFunctionMap;
|
||||
QSet<QUrl> _includedURLs;
|
||||
bool _wantSignals = true;
|
||||
bool _wantSignals { true };
|
||||
QHash<EntityItemID, EntityScriptDetails> _entityScripts;
|
||||
private:
|
||||
bool _isThreaded { false };
|
||||
|
||||
void init();
|
||||
QString getFilename() const;
|
||||
void waitTillDoneRunning();
|
||||
|
@ -190,11 +193,13 @@ private:
|
|||
Quat _quatLibrary;
|
||||
Vec3 _vec3Library;
|
||||
ScriptUUID _uuidLibrary;
|
||||
bool _isUserLoaded;
|
||||
bool _isReloading;
|
||||
bool _isUserLoaded { false };
|
||||
bool _isReloading { false };
|
||||
|
||||
ArrayBufferClass* _arrayBufferClass;
|
||||
|
||||
AssetScriptingInterface _assetScriptingInterface{ this };
|
||||
|
||||
QHash<EntityItemID, RegisteredEventHandlers> _registeredHandlers;
|
||||
void forwardHandlerCall(const EntityItemID& entityID, const QString& eventName, QScriptValueList eventHanderArgs);
|
||||
Q_INVOKABLE void entityScriptContentAvailable(const EntityItemID& entityID, const QString& scriptOrURL, const QString& contents, bool isURL, bool success);
|
||||
|
@ -202,8 +207,6 @@ private:
|
|||
static QSet<ScriptEngine*> _allKnownScriptEngines;
|
||||
static QMutex _allScriptsMutex;
|
||||
static bool _stoppingAllScripts;
|
||||
static bool _doneRunningThisScript;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_ScriptEngine_h
|
||||
|
|
|
@ -29,6 +29,7 @@ glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseP
|
|||
void OculusBaseDisplayPlugin::resetSensors() {
|
||||
#if (OVR_MAJOR_VERSION >= 6)
|
||||
ovr_RecenterPose(_hmd);
|
||||
preRender();
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
48
stack-manager/CMakeLists.txt
Normal file
48
stack-manager/CMakeLists.txt
Normal file
|
@ -0,0 +1,48 @@
|
|||
set(TARGET_NAME "stack-manager")
|
||||
set(BUILD_BUNDLE YES)
|
||||
setup_hifi_project(Widgets Gui Svg Core Network WebKitWidgets)
|
||||
|
||||
if (WIN32)
|
||||
target_zlib()
|
||||
endif ()
|
||||
target_quazip()
|
||||
|
||||
set_target_properties(
|
||||
${TARGET_NAME} PROPERTIES
|
||||
EXCLUDE_FROM_ALL TRUE
|
||||
)
|
||||
|
||||
if (DEFINED ENV{JOB_ID})
|
||||
set(PR_BUILD "false")
|
||||
set(BUILD_SEQ $ENV{JOB_ID})
|
||||
set(BASE_URL "http://s3.amazonaws.com/hifi-public")
|
||||
else ()
|
||||
set(BUILD_SEQ "dev")
|
||||
if (DEFINED ENV{PR_NUMBER})
|
||||
set(PR_BUILD "true")
|
||||
set(BASE_URL "http://s3.amazonaws.com/hifi-public/pr-builds/$ENV{PR_NUMBER}")
|
||||
else ()
|
||||
set(PR_BUILD "false")
|
||||
set(BASE_URL "http://s3.amazonaws.com/hifi-public")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
configure_file(src/StackManagerVersion.h.in "${PROJECT_BINARY_DIR}/includes/StackManagerVersion.h")
|
||||
include_directories(
|
||||
${PROJECT_BINARY_DIR}/includes
|
||||
${PROJECT_SOURCE_DIR}/src
|
||||
${PROJECT_SOURCE_DIR}/src/ui
|
||||
${QUAZIP_INCLUDE_DIRS}
|
||||
${ZLIB_INCLUDE_DIRS}
|
||||
)
|
||||
|
||||
if (APPLE)
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET 10.8)
|
||||
set(MACOSX_BUNDLE_BUNDLE_NAME "Stack Manager")
|
||||
set(MACOSX_BUNDLE_GUI_IDENTIFIER io.highfidelity.StackManager)
|
||||
set(MACOSX_BUNDLE_ICON_FILE icon.icns)
|
||||
set_source_files_properties(${CMAKE_CURRENT_SOURCE_DIR}/assets/icon.icns PROPERTIES MACOSX_PACKAGE_LOCATION Resources)
|
||||
set(SM_SRCS ${SM_SRCS} "${CMAKE_CURRENT_SOURCE_DIR}/assets/icon.icns")
|
||||
endif ()
|
||||
|
||||
package_libraries_for_deployment()
|
22
stack-manager/assets/assignment-run.svg
Normal file
22
stack-manager/assets/assignment-run.svg
Normal file
|
@ -0,0 +1,22 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 18.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 59 32" enable-background="new 0 0 59 32" xml:space="preserve">
|
||||
<g>
|
||||
<path fill="#29957E" d="M54,32H5c-2.8,0-5-2.2-5-5V5c0-2.8,2.2-5,5-5h49c2.8,0,5,2.2,5,5v22C59,29.8,56.8,32,54,32z"/>
|
||||
<g>
|
||||
<path fill="#EDEDED" d="M22.1,10.1c0.5,0.2,0.8,0.5,1.1,0.9c0.3,0.3,0.5,0.7,0.6,1c0.2,0.4,0.2,0.8,0.2,1.3c0,0.6-0.1,1.2-0.4,1.7
|
||||
c-0.3,0.6-0.8,1-1.5,1.2c0.6,0.2,1,0.5,1.2,1s0.4,1.1,0.4,1.9v0.8c0,0.6,0,0.9,0.1,1.1c0.1,0.3,0.2,0.5,0.5,0.7V22h-2.8
|
||||
c-0.1-0.3-0.1-0.5-0.2-0.7c-0.1-0.3-0.1-0.7-0.1-1.1l0-1.1c0-0.8-0.1-1.3-0.4-1.6c-0.3-0.3-0.7-0.4-1.5-0.4h-2.5V22h-2.5V9.8h5.9
|
||||
C21,9.8,21.7,9.9,22.1,10.1z M16.8,11.9v3.3h2.8c0.5,0,1-0.1,1.2-0.2c0.5-0.2,0.7-0.7,0.7-1.4c0-0.7-0.2-1.2-0.7-1.5
|
||||
c-0.3-0.1-0.7-0.2-1.2-0.2H16.8z"/>
|
||||
<path fill="#EDEDED" d="M28.7,13v5.5c0,0.5,0.1,0.9,0.2,1.2c0.2,0.5,0.6,0.7,1.3,0.7c0.8,0,1.4-0.3,1.7-1c0.2-0.4,0.2-0.8,0.2-1.4
|
||||
V13h2.4v9h-2.3v-1.3c0,0-0.1,0.1-0.2,0.2c-0.1,0.1-0.2,0.3-0.3,0.4c-0.4,0.3-0.7,0.6-1.1,0.7s-0.7,0.2-1.2,0.2
|
||||
c-1.3,0-2.2-0.5-2.7-1.4c-0.3-0.5-0.4-1.3-0.4-2.3V13H28.7z"/>
|
||||
<path fill="#EDEDED" d="M44,13.5c0.6,0.5,0.9,1.3,0.9,2.4V22h-2.4v-5.5c0-0.5-0.1-0.8-0.2-1.1c-0.2-0.5-0.7-0.7-1.3-0.7
|
||||
c-0.8,0-1.3,0.3-1.6,1c-0.2,0.4-0.2,0.8-0.2,1.4V22h-2.4v-9H39v1.3c0.3-0.5,0.6-0.8,0.9-1c0.5-0.4,1.1-0.5,1.8-0.5
|
||||
C42.7,12.7,43.4,13,44,13.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.7 KiB |
27
stack-manager/assets/assignment-stop.svg
Normal file
27
stack-manager/assets/assignment-stop.svg
Normal file
|
@ -0,0 +1,27 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 18.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 59 32" enable-background="new 0 0 59 32" xml:space="preserve">
|
||||
<g>
|
||||
<path fill="#BB3850" d="M54,32H5c-2.8,0-5-2.2-5-5V5c0-2.8,2.2-5,5-5h49c2.8,0,5,2.2,5,5v22C59,29.8,56.8,32,54,32z"/>
|
||||
<g>
|
||||
<path fill="#EDEDED" d="M13.8,18.2c0.1,0.6,0.2,1,0.5,1.3c0.4,0.5,1.1,0.8,2.2,0.8c0.6,0,1.1-0.1,1.5-0.2c0.7-0.3,1.1-0.7,1.1-1.4
|
||||
c0-0.4-0.2-0.7-0.5-0.9c-0.4-0.2-0.9-0.4-1.7-0.6l-1.3-0.3c-1.3-0.3-2.2-0.6-2.7-0.9c-0.8-0.6-1.2-1.4-1.2-2.6
|
||||
c0-1.1,0.4-2,1.2-2.7s2-1.1,3.6-1.1c1.3,0,2.4,0.3,3.3,1c0.9,0.7,1.4,1.7,1.4,3h-2.5c0-0.7-0.4-1.3-1-1.6
|
||||
c-0.4-0.2-0.9-0.3-1.5-0.3c-0.7,0-1.2,0.1-1.6,0.4s-0.6,0.6-0.6,1.1c0,0.4,0.2,0.8,0.6,1c0.3,0.1,0.8,0.3,1.6,0.5l2.1,0.5
|
||||
c0.9,0.2,1.6,0.5,2.1,0.9c0.7,0.6,1.1,1.4,1.1,2.5c0,1.1-0.4,2-1.3,2.8c-0.9,0.7-2.1,1.1-3.7,1.1c-1.6,0-2.9-0.4-3.8-1.1
|
||||
s-1.4-1.7-1.4-3H13.8z"/>
|
||||
<path fill="#EDEDED" d="M22.1,14.7V13h1.3v-2.5h2.3V13h1.5v1.7h-1.5v4.8c0,0.4,0,0.6,0.1,0.7c0.1,0.1,0.4,0.1,0.9,0.1
|
||||
c0.1,0,0.1,0,0.2,0c0.1,0,0.2,0,0.2,0v1.8l-1.1,0c-1.1,0-1.9-0.2-2.3-0.6c-0.3-0.3-0.4-0.7-0.4-1.3v-5.6H22.1z"/>
|
||||
<path fill="#EDEDED" d="M36.3,20.9c-0.8,0.9-1.9,1.4-3.5,1.4s-2.7-0.5-3.5-1.4c-0.8-0.9-1.1-2.1-1.1-3.4c0-1.3,0.4-2.4,1.1-3.4
|
||||
c0.8-1,1.9-1.4,3.5-1.4s2.7,0.5,3.5,1.4c0.8,1,1.1,2.1,1.1,3.4C37.4,18.8,37,19.9,36.3,20.9z M34.4,19.6c0.4-0.5,0.6-1.2,0.6-2.1
|
||||
s-0.2-1.6-0.6-2.1s-0.9-0.7-1.6-0.7s-1.2,0.2-1.6,0.7c-0.4,0.5-0.6,1.2-0.6,2.1s0.2,1.6,0.6,2.1c0.4,0.5,0.9,0.7,1.6,0.7
|
||||
S34,20.1,34.4,19.6z"/>
|
||||
<path fill="#EDEDED" d="M46.7,13.9c0.7,0.8,1.1,1.9,1.1,3.4c0,1.6-0.4,2.8-1.1,3.6s-1.6,1.3-2.8,1.3c-0.7,0-1.3-0.2-1.8-0.5
|
||||
c-0.3-0.2-0.5-0.5-0.8-0.9v4.7H39V13h2.3v1.3c0.3-0.4,0.5-0.7,0.8-0.9c0.5-0.4,1.2-0.6,1.9-0.6C45.1,12.8,46,13.1,46.7,13.9z
|
||||
M44.9,15.6c-0.3-0.5-0.8-0.8-1.6-0.8c-0.9,0-1.5,0.4-1.8,1.2c-0.2,0.4-0.3,1-0.3,1.6c0,1.1,0.3,1.8,0.8,2.2
|
||||
c0.3,0.2,0.7,0.4,1.2,0.4c0.7,0,1.2-0.3,1.5-0.8s0.5-1.2,0.5-2C45.4,16.8,45.2,16.2,44.9,15.6z"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.3 KiB |
BIN
stack-manager/assets/icon.icns
Normal file
BIN
stack-manager/assets/icon.icns
Normal file
Binary file not shown.
BIN
stack-manager/assets/icon.ico
Normal file
BIN
stack-manager/assets/icon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 361 KiB |
BIN
stack-manager/assets/icon.png
Normal file
BIN
stack-manager/assets/icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 14 KiB |
BIN
stack-manager/assets/logo-larger.png
Normal file
BIN
stack-manager/assets/logo-larger.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.5 KiB |
57
stack-manager/assets/server-start.svg
Normal file
57
stack-manager/assets/server-start.svg
Normal file
|
@ -0,0 +1,57 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 18.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 189 46" enable-background="new 0 0 189 46" xml:space="preserve">
|
||||
<g>
|
||||
<path fill="#29957E" d="M184,46H5c-2.8,0-5-2.2-5-5V5c0-2.8,2.2-5,5-5h179c2.8,0,5,2.2,5,5v36C189,43.8,186.8,46,184,46z"/>
|
||||
<g>
|
||||
<path fill="#EDEDED" d="M25.3,33.7c-5.7,0-10.3-4.6-10.3-10.3c0-3.3,1.5-6.3,4.1-8.2c0.8-0.6,1.8-0.4,2.4,0.3
|
||||
c0.6,0.8,0.4,1.8-0.3,2.4c-1.7,1.3-2.7,3.3-2.7,5.5c0,3.8,3.1,6.9,6.9,6.9s6.9-3.1,6.9-6.9c0-2.2-1-4.2-2.7-5.5
|
||||
c-0.8-0.6-0.9-1.6-0.3-2.4c0.6-0.8,1.6-0.9,2.4-0.3c2.6,2,4.1,5,4.1,8.2C35.6,29.1,31,33.7,25.3,33.7z M27,21.7
|
||||
c0,0.9-0.8,1.7-1.7,1.7s-1.7-0.8-1.7-1.7v-8.6c0-0.9,0.8-1.7,1.7-1.7s1.7,0.8,1.7,1.7V21.7z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path fill="#EDEDED" d="M46.4,26.7c0.1,0.8,0.3,1.4,0.7,1.8c0.6,0.7,1.6,1.1,3.1,1.1c0.9,0,1.6-0.1,2.1-0.3c1-0.4,1.5-1,1.5-2
|
||||
c0-0.6-0.3-1-0.8-1.3c-0.5-0.3-1.3-0.6-2.4-0.8l-1.9-0.4c-1.8-0.4-3.1-0.8-3.8-1.3c-1.2-0.8-1.7-2-1.7-3.7c0-1.5,0.6-2.8,1.7-3.9
|
||||
c1.1-1,2.8-1.5,5-1.5c1.8,0,3.4,0.5,4.7,1.4c1.3,1,2,2.4,2,4.2h-3.5c-0.1-1-0.5-1.8-1.4-2.2c-0.6-0.3-1.3-0.4-2.2-0.4
|
||||
c-1,0-1.7,0.2-2.3,0.6s-0.9,0.9-0.9,1.6c0,0.6,0.3,1.1,0.8,1.4c0.4,0.2,1.1,0.4,2.3,0.7l3,0.7c1.3,0.3,2.3,0.7,3,1.3
|
||||
c1,0.8,1.5,2,1.5,3.5c0,1.6-0.6,2.9-1.8,3.9c-1.2,1-2.9,1.6-5.2,1.6c-2.3,0-4.1-0.5-5.4-1.5c-1.3-1-2-2.4-2-4.2H46.4z"/>
|
||||
<path fill="#EDEDED" d="M58.3,21.7v-2.4H60v-3.6h3.3v3.6h2.1v2.4h-2.1v6.8c0,0.5,0.1,0.8,0.2,1s0.5,0.2,1.2,0.2c0.1,0,0.2,0,0.3,0
|
||||
s0.2,0,0.3,0v2.5l-1.6,0.1c-1.6,0.1-2.7-0.2-3.2-0.8c-0.4-0.4-0.6-1-0.6-1.8v-7.9H58.3z"/>
|
||||
<path fill="#EDEDED" d="M72.6,24.2c0.6-0.1,1.1-0.2,1.3-0.3c0.5-0.2,0.7-0.5,0.7-0.9c0-0.5-0.2-0.9-0.6-1.1
|
||||
c-0.4-0.2-0.9-0.3-1.6-0.3c-0.8,0-1.4,0.2-1.7,0.6c-0.2,0.3-0.4,0.7-0.5,1.2h-3.2c0.1-1.1,0.4-2,0.9-2.8c0.9-1.1,2.4-1.7,4.6-1.7
|
||||
c1.4,0,2.7,0.3,3.7,0.8c1.1,0.6,1.6,1.6,1.6,3.1v5.9c0,0.4,0,0.9,0,1.5c0,0.4,0.1,0.7,0.2,0.9s0.3,0.3,0.5,0.4V32h-3.6
|
||||
c-0.1-0.3-0.2-0.5-0.2-0.7s-0.1-0.5-0.1-0.8c-0.5,0.5-1,0.9-1.6,1.3c-0.7,0.4-1.5,0.6-2.5,0.6c-1.2,0-2.1-0.3-2.9-1
|
||||
c-0.8-0.7-1.1-1.6-1.1-2.8c0-1.6,0.6-2.7,1.8-3.4c0.7-0.4,1.7-0.7,3-0.8L72.6,24.2z M74.7,25.8c-0.2,0.1-0.4,0.2-0.6,0.3
|
||||
c-0.2,0.1-0.5,0.2-0.9,0.2l-0.8,0.1c-0.7,0.1-1.2,0.3-1.5,0.5c-0.5,0.3-0.8,0.8-0.8,1.4c0,0.6,0.2,1,0.5,1.2s0.7,0.4,1.2,0.4
|
||||
c0.7,0,1.4-0.2,2-0.6s0.9-1.2,1-2.3V25.8z"/>
|
||||
<path fill="#EDEDED" d="M88,18.9c0,0,0.1,0,0.3,0v3.4c-0.2,0-0.4,0-0.6,0s-0.3,0-0.4,0c-1.3,0-2.2,0.4-2.7,1.3
|
||||
c-0.3,0.5-0.4,1.2-0.4,2.3V32h-3.4V19.2h3.2v2.2c0.5-0.9,1-1.4,1.3-1.7c0.6-0.5,1.4-0.8,2.4-0.8C87.9,18.9,88,18.9,88,18.9z"/>
|
||||
<path fill="#EDEDED" d="M88.9,21.7v-2.4h1.8v-3.6H94v3.6h2.1v2.4H94v6.8c0,0.5,0.1,0.8,0.2,1s0.5,0.2,1.2,0.2c0.1,0,0.2,0,0.3,0
|
||||
s0.2,0,0.3,0v2.5l-1.6,0.1c-1.6,0.1-2.7-0.2-3.2-0.8c-0.4-0.4-0.6-1-0.6-1.8v-7.9H88.9z"/>
|
||||
<path fill="#EDEDED" d="M107.5,27.9c0.1,0.6,0.2,1,0.5,1.3c0.4,0.4,1.2,0.7,2.3,0.7c0.7,0,1.2-0.1,1.6-0.3
|
||||
c0.4-0.2,0.6-0.5,0.6-0.9c0-0.4-0.2-0.7-0.5-0.9s-1.5-0.5-3.5-1c-1.5-0.4-2.5-0.8-3.1-1.3c-0.6-0.5-0.9-1.3-0.9-2.3
|
||||
c0-1.2,0.5-2.2,1.4-3s2.2-1.3,3.9-1.3c1.6,0,2.9,0.3,3.9,1s1.6,1.7,1.7,3.3h-3.3c0-0.4-0.2-0.8-0.4-1c-0.4-0.5-1-0.7-1.9-0.7
|
||||
c-0.7,0-1.2,0.1-1.6,0.3c-0.3,0.2-0.5,0.5-0.5,0.8c0,0.4,0.2,0.7,0.5,0.8c0.3,0.2,1.5,0.5,3.5,0.9c1.3,0.3,2.3,0.8,3,1.4
|
||||
c0.7,0.6,1,1.4,1,2.4c0,1.3-0.5,2.3-1.4,3.1s-2.4,1.2-4.4,1.2c-2,0-3.5-0.4-4.5-1.3s-1.4-1.9-1.4-3.2H107.5z"/>
|
||||
<path fill="#EDEDED" d="M126.6,19.5c0.9,0.4,1.6,1,2.2,1.9c0.5,0.8,0.9,1.6,1,2.6c0.1,0.6,0.1,1.4,0.1,2.5h-9.3
|
||||
c0.1,1.3,0.5,2.2,1.3,2.7c0.5,0.3,1.1,0.5,1.8,0.5c0.8,0,1.4-0.2,1.9-0.6c0.3-0.2,0.5-0.5,0.7-0.9h3.4c-0.1,0.8-0.5,1.5-1.2,2.3
|
||||
c-1.1,1.2-2.7,1.9-4.8,1.9c-1.7,0-3.2-0.5-4.5-1.6c-1.3-1-1.9-2.8-1.9-5.1c0-2.2,0.6-3.9,1.8-5.1c1.2-1.2,2.7-1.8,4.6-1.8
|
||||
C124.7,18.9,125.7,19.1,126.6,19.5z M121.6,22.4c-0.5,0.5-0.8,1.1-0.9,2h5.8c-0.1-0.9-0.4-1.6-0.9-2c-0.5-0.5-1.2-0.7-2-0.7
|
||||
C122.7,21.6,122.1,21.9,121.6,22.4z"/>
|
||||
<path fill="#EDEDED" d="M138.7,18.9c0,0,0.1,0,0.3,0v3.4c-0.2,0-0.4,0-0.6,0s-0.3,0-0.4,0c-1.3,0-2.2,0.4-2.7,1.3
|
||||
c-0.3,0.5-0.4,1.2-0.4,2.3V32h-3.4V19.2h3.2v2.2c0.5-0.9,1-1.4,1.3-1.7c0.6-0.5,1.4-0.8,2.4-0.8C138.6,18.9,138.7,18.9,138.7,18.9
|
||||
z"/>
|
||||
<path fill="#EDEDED" d="M148.8,19.2h3.6L147.8,32h-3.5l-4.6-12.8h3.8l2.7,9.4L148.8,19.2z"/>
|
||||
<path fill="#EDEDED" d="M162.6,19.5c0.9,0.4,1.6,1,2.2,1.9c0.5,0.8,0.9,1.6,1,2.6c0.1,0.6,0.1,1.4,0.1,2.5h-9.3
|
||||
c0.1,1.3,0.5,2.2,1.3,2.7c0.5,0.3,1.1,0.5,1.8,0.5c0.8,0,1.4-0.2,1.9-0.6c0.3-0.2,0.5-0.5,0.7-0.9h3.4c-0.1,0.8-0.5,1.5-1.2,2.3
|
||||
c-1.1,1.2-2.7,1.9-4.8,1.9c-1.7,0-3.2-0.5-4.5-1.6c-1.3-1-1.9-2.8-1.9-5.1c0-2.2,0.6-3.9,1.8-5.1c1.2-1.2,2.7-1.8,4.6-1.8
|
||||
C160.7,18.9,161.7,19.1,162.6,19.5z M157.6,22.4c-0.5,0.5-0.8,1.1-0.9,2h5.8c-0.1-0.9-0.4-1.6-0.9-2c-0.5-0.5-1.2-0.7-2-0.7
|
||||
C158.8,21.6,158.1,21.9,157.6,22.4z"/>
|
||||
<path fill="#EDEDED" d="M174.7,18.9c0,0,0.1,0,0.3,0v3.4c-0.2,0-0.4,0-0.6,0s-0.3,0-0.4,0c-1.3,0-2.2,0.4-2.7,1.3
|
||||
c-0.3,0.5-0.4,1.2-0.4,2.3V32h-3.4V19.2h3.2v2.2c0.5-0.9,1-1.4,1.3-1.7c0.6-0.5,1.4-0.8,2.4-0.8C174.6,18.9,174.7,18.9,174.7,18.9
|
||||
z"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 5.2 KiB |
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue