Merge remote-tracking branch 'upstream/master' into bow

This commit is contained in:
James B. Pollack 2015-11-20 14:33:26 -08:00
commit 49e815f3d6
49 changed files with 1538 additions and 784 deletions

View file

@ -22,6 +22,7 @@
#include <NodeList.h>
#include <udt/PacketHeaders.h>
#include <ResourceCache.h>
#include <ScriptCache.h>
#include <SoundCache.h>
#include <UUID.h>
@ -116,6 +117,11 @@ void Agent::handleAudioPacket(QSharedPointer<NLPacket> packet) {
const QString AGENT_LOGGING_NAME = "agent";
void Agent::run() {
// make sure we request our script once the agent connects to the domain
auto nodeList = DependencyManager::get<NodeList>();
connect(&nodeList->getDomainHandler(), &DomainHandler::connectedToDomain, this, &Agent::requestScript);
ThreadedAssignment::commonInit(AGENT_LOGGING_NAME, NodeType::Agent);
// Setup MessagesClient
@ -125,72 +131,99 @@ void Agent::run() {
messagesClient->moveToThread(messagesThread);
connect(messagesThread, &QThread::started, messagesClient.data(), &MessagesClient::init);
messagesThread->start();
nodeList->addSetOfNodeTypesToNodeInterestSet({
NodeType::AudioMixer, NodeType::AvatarMixer, NodeType::EntityServer, NodeType::MessagesMixer
});
}
void Agent::requestScript() {
auto nodeList = DependencyManager::get<NodeList>();
nodeList->addSetOfNodeTypesToNodeInterestSet(NodeSet()
<< NodeType::AudioMixer
<< NodeType::AvatarMixer
<< NodeType::EntityServer
<< NodeType::MessagesMixer
);
disconnect(&nodeList->getDomainHandler(), &DomainHandler::connectedToDomain, this, &Agent::requestScript);
// figure out the URL for the script for this agent assignment
QUrl scriptURL;
if (_payload.isEmpty()) {
scriptURL = QUrl(QString("http://%1:%2/assignment/%3")
.arg(DependencyManager::get<NodeList>()->getDomainHandler().getIP().toString())
.arg(DOMAIN_SERVER_HTTP_PORT)
.arg(uuidStringWithoutCurlyBraces(_uuid)));
scriptURL = QUrl(QString("http://%1:%2/assignment/%3/")
.arg(nodeList->getDomainHandler().getIP().toString())
.arg(DOMAIN_SERVER_HTTP_PORT)
.arg(uuidStringWithoutCurlyBraces(nodeList->getSessionUUID())));
} else {
scriptURL = QUrl(_payload);
}
// setup a network access manager and
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest = QNetworkRequest(scriptURL);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
QNetworkReply* reply = networkAccessManager.get(networkRequest);
QNetworkDiskCache* cache = new QNetworkDiskCache();
QString cachePath = QStandardPaths::writableLocation(QStandardPaths::DataLocation);
cache->setCacheDirectory(!cachePath.isEmpty() ? cachePath : "agentCache");
networkAccessManager.setCache(cache);
QNetworkRequest networkRequest = QNetworkRequest(scriptURL);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
// setup a timeout for script request
static const int SCRIPT_TIMEOUT_MS = 10000;
_scriptRequestTimeout = new QTimer(this);
connect(_scriptRequestTimeout, &QTimer::timeout, this, &Agent::scriptRequestFinished);
_scriptRequestTimeout->start(SCRIPT_TIMEOUT_MS);
qDebug() << "Downloading script at" << scriptURL.toString();
QNetworkReply* reply = networkAccessManager.get(networkRequest);
connect(reply, &QNetworkReply::finished, this, &Agent::scriptRequestFinished);
}
QEventLoop loop;
QObject::connect(reply, SIGNAL(finished()), &loop, SLOT(quit()));
void Agent::scriptRequestFinished() {
auto reply = qobject_cast<QNetworkReply*>(sender());
loop.exec();
_scriptRequestTimeout->stop();
if (reply && reply->error() == QNetworkReply::NoError) {
_scriptContents = reply->readAll();
qDebug() << "Downloaded script:" << _scriptContents;
// we could just call executeScript directly - we use a QueuedConnection to allow scriptRequestFinished
// to return before calling executeScript
QMetaObject::invokeMethod(this, "executeScript", Qt::QueuedConnection);
} else {
if (reply) {
qDebug() << "Failed to download script at" << reply->url().toString() << " - bailing on assignment.";
qDebug() << "QNetworkReply error was" << reply->errorString();
} else {
qDebug() << "Failed to download script - request timed out. Bailing on assignment.";
}
setFinished(true);
}
reply->deleteLater();
}
QString scriptContents(reply->readAll());
delete reply;
qDebug() << "Downloaded script:" << scriptContents;
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(scriptContents, _payload));
void Agent::executeScript() {
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(_scriptContents, _payload));
_scriptEngine->setParent(this); // be the parent of the script engine so it gets moved when we do
// setup an Avatar for the script to use
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
connect(_scriptEngine.get(), SIGNAL(update(float)), scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
scriptedAvatar->setForceFaceTrackerConnected(true);
// call model URL setters with empty URLs so our avatar, if user, will have the default models
scriptedAvatar->setFaceModelURL(QUrl());
scriptedAvatar->setSkeletonModelURL(QUrl());
// give this AvatarData object to the script engine
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
using namespace recording;
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
// FIXME how to deal with driving multiple avatars locally?
// FIXME how to deal with driving multiple avatars locally?
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this, scriptedAvatar](Frame::ConstPointer frame) {
AvatarData::fromFrame(frame->data, *scriptedAvatar);
});
using namespace recording;
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::AUDIO_FRAME_NAME);
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
@ -201,32 +234,30 @@ void Agent::run() {
audioTransform.setRotation(scriptedAvatar->getOrientation());
AbstractAudioInterface::emitAudioPacket(audio.data(), audio.size(), audioSequenceNumber, audioTransform, PacketType::MicrophoneAudioNoEcho);
});
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListener(PacketType::BulkAvatarData, avatarHashMap.data(), "processAvatarDataPacket");
packetReceiver.registerListener(PacketType::KillAvatar, avatarHashMap.data(), "processKillAvatar");
packetReceiver.registerListener(PacketType::AvatarIdentity, avatarHashMap.data(), "processAvatarIdentityPacket");
packetReceiver.registerListener(PacketType::AvatarBillboard, avatarHashMap.data(), "processAvatarBillboardPacket");
// register ourselves to the script engine
_scriptEngine->registerGlobalObject("Agent", this);
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
// viewers would need this called.
//_scriptEngine->init(); // must be done before we set up the viewers
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCache>().data());
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
_scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
// we need to make sure that init has been called for our EntityScriptingInterface
@ -237,15 +268,15 @@ void Agent::run() {
_entityViewer.init();
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
// wire up our additional agent related processing to the update signal
QObject::connect(_scriptEngine.get(), &ScriptEngine::update, this, &Agent::processAgentAvatarAndAudio);
_scriptEngine->run();
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
setFinished(true);
}

View file

@ -55,6 +55,10 @@ public slots:
void playAvatarSound(Sound* avatarSound) { setAvatarSound(avatarSound); }
private slots:
void requestScript();
void scriptRequestFinished();
void executeScript();
void handleAudioPacket(QSharedPointer<NLPacket> packet);
void handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleJurisdictionPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
@ -73,6 +77,8 @@ private:
void sendAvatarIdentityPacket();
void sendAvatarBillboardPacket();
QString _scriptContents;
QTimer* _scriptRequestTimeout { nullptr };
bool _isListeningToAudioStream = false;
Sound* _avatarSound = nullptr;
int _numAvatarSoundSentBytes = 0;

View file

@ -12,30 +12,23 @@
#include <QtCore/QCoreApplication>
#include <QtCore/QJsonObject>
#include <QBuffer>
#include <LogHandler.h>
#include <MessagesClient.h>
#include <NodeList.h>
#include <udt/PacketHeaders.h>
#include "MessagesMixer.h"
const QString MESSAGES_MIXER_LOGGING_NAME = "messages-mixer";
MessagesMixer::MessagesMixer(NLPacket& packet) :
ThreadedAssignment(packet)
MessagesMixer::MessagesMixer(NLPacket& packet) : ThreadedAssignment(packet)
{
// make sure we hear about node kills so we can tell the other nodes
connect(DependencyManager::get<NodeList>().data(), &NodeList::nodeKilled, this, &MessagesMixer::nodeKilled);
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerMessageListener(PacketType::MessagesData, this, "handleMessages");
packetReceiver.registerMessageListener(PacketType::MessagesSubscribe, this, "handleMessagesSubscribe");
packetReceiver.registerMessageListener(PacketType::MessagesUnsubscribe, this, "handleMessagesUnsubscribe");
}
MessagesMixer::~MessagesMixer() {
}
void MessagesMixer::nodeKilled(SharedNodePointer killedNode) {
for (auto& channel : _channelSubscribers) {
channel.remove(killedNode->getUUID());
@ -43,92 +36,52 @@ void MessagesMixer::nodeKilled(SharedNodePointer killedNode) {
}
void MessagesMixer::handleMessages(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
Q_ASSERT(packetList->getType() == PacketType::MessagesData);
QString channel, message;
QUuid senderID;
MessagesClient::decodeMessagesPacket(packetList, channel, message, senderID);
QByteArray packetData = packetList->getMessage();
QBuffer packet{ &packetData };
packet.open(QIODevice::ReadOnly);
quint16 channelLength;
packet.read(reinterpret_cast<char*>(&channelLength), sizeof(channelLength));
auto channelData = packet.read(channelLength);
QString channel = QString::fromUtf8(channelData);
quint16 messageLength;
packet.read(reinterpret_cast<char*>(&messageLength), sizeof(messageLength));
auto messageData = packet.read(messageLength);
QString message = QString::fromUtf8(messageData);
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachMatchingNode(
[&](const SharedNodePointer& node)->bool {
return node->getType() == NodeType::Agent && node->getActiveSocket() &&
_channelSubscribers[channel].contains(node->getUUID());
},
[&](const SharedNodePointer& node) {
auto packetList = NLPacketList::create(PacketType::MessagesData, QByteArray(), true, true);
auto channelUtf8 = channel.toUtf8();
quint16 channelLength = channelUtf8.length();
packetList->writePrimitive(channelLength);
packetList->write(channelUtf8);
auto messageUtf8 = message.toUtf8();
quint16 messageLength = messageUtf8.length();
packetList->writePrimitive(messageLength);
packetList->write(messageUtf8);
auto packetList = MessagesClient::encodeMessagesPacket(channel, message, senderID);
nodeList->sendPacketList(std::move(packetList), *node);
});
}
void MessagesMixer::handleMessagesSubscribe(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
Q_ASSERT(packetList->getType() == PacketType::MessagesSubscribe);
QString channel = QString::fromUtf8(packetList->getMessage());
qDebug() << "Node [" << senderNode->getUUID() << "] subscribed to channel:" << channel;
_channelSubscribers[channel] << senderNode->getUUID();
}
void MessagesMixer::handleMessagesUnsubscribe(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
Q_ASSERT(packetList->getType() == PacketType::MessagesUnsubscribe);
QString channel = QString::fromUtf8(packetList->getMessage());
qDebug() << "Node [" << senderNode->getUUID() << "] unsubscribed from channel:" << channel;
if (_channelSubscribers.contains(channel)) {
_channelSubscribers[channel].remove(senderNode->getUUID());
}
}
// FIXME - make these stats relevant
void MessagesMixer::sendStatsPacket() {
QJsonObject statsObject;
QJsonObject messagesObject;
auto nodeList = DependencyManager::get<NodeList>();
QJsonObject statsObject, messagesMixerObject;
// add stats for each listerner
nodeList->eachNode([&](const SharedNodePointer& node) {
QJsonObject messagesStats;
// add the key to ask the domain-server for a username replacement, if it has it
messagesStats[USERNAME_UUID_REPLACEMENT_STATS_KEY] = uuidStringWithoutCurlyBraces(node->getUUID());
messagesStats["outbound_kbps"] = node->getOutboundBandwidth();
messagesStats["inbound_kbps"] = node->getInboundBandwidth();
messagesObject[uuidStringWithoutCurlyBraces(node->getUUID())] = messagesStats;
DependencyManager::get<NodeList>()->eachNode([&](const SharedNodePointer& node) {
QJsonObject clientStats;
clientStats[USERNAME_UUID_REPLACEMENT_STATS_KEY] = uuidStringWithoutCurlyBraces(node->getUUID());
clientStats["outbound_kbps"] = node->getOutboundBandwidth();
clientStats["inbound_kbps"] = node->getInboundBandwidth();
messagesMixerObject[uuidStringWithoutCurlyBraces(node->getUUID())] = clientStats;
});
statsObject["messages"] = messagesObject;
statsObject["messages"] = messagesMixerObject;
ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject);
}
void MessagesMixer::run() {
ThreadedAssignment::commonInit(MESSAGES_MIXER_LOGGING_NAME, NodeType::MessagesMixer);
auto nodeList = DependencyManager::get<NodeList>();
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
// The messages-mixer currently does currently have any domain settings. If it did, they would be
// synchronously grabbed here.
}
DependencyManager::get<NodeList>()->addNodeTypeToInterestSet(NodeType::Agent);
}

View file

@ -22,7 +22,6 @@ class MessagesMixer : public ThreadedAssignment {
Q_OBJECT
public:
MessagesMixer(NLPacket& packet);
~MessagesMixer();
public slots:
void run();

View file

@ -1097,29 +1097,37 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
if (connection->requestOperation() == QNetworkAccessManager::GetOperation
&& assignmentRegex.indexIn(url.path()) != -1) {
QUuid matchingUUID = QUuid(assignmentRegex.cap(1));
SharedAssignmentPointer matchingAssignment = _allAssignments.value(matchingUUID);
if (!matchingAssignment) {
// check if we have a pending assignment that matches this temp UUID, and it is a scripted assignment
QUuid assignmentUUID = _gatekeeper.assignmentUUIDForPendingAssignment(matchingUUID);
if (!assignmentUUID.isNull()) {
matchingAssignment = _allAssignments.value(assignmentUUID);
if (matchingAssignment && matchingAssignment->getType() == Assignment::AgentType) {
// we have a matching assignment and it is for the right type, have the HTTP manager handle it
// via correct URL for the script so the client can download
QUrl scriptURL = url;
scriptURL.setPath(URI_ASSIGNMENT + "/scripts/"
+ uuidStringWithoutCurlyBraces(assignmentUUID));
// have the HTTPManager serve the appropriate script file
return _httpManager.handleHTTPRequest(connection, scriptURL, true);
}
}
QUuid nodeUUID = QUuid(assignmentRegex.cap(1));
auto matchingNode = nodeList->nodeWithUUID(nodeUUID);
// don't handle if we don't have a matching node
if (!matchingNode) {
return false;
}
auto nodeData = dynamic_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
// don't handle if we don't have node data for this node
if (!nodeData) {
return false;
}
SharedAssignmentPointer matchingAssignment = _allAssignments.value(nodeData->getAssignmentUUID());
// check if we have an assignment that matches this temp UUID, and it is a scripted assignment
if (matchingAssignment && matchingAssignment->getType() == Assignment::AgentType) {
// we have a matching assignment and it is for the right type, have the HTTP manager handle it
// via correct URL for the script so the client can download
QUrl scriptURL = url;
scriptURL.setPath(URI_ASSIGNMENT + "/scripts/"
+ uuidStringWithoutCurlyBraces(matchingAssignment->getUUID()));
// have the HTTPManager serve the appropriate script file
return _httpManager.handleHTTPRequest(connection, scriptURL, true);
}
// request not handled
return false;
}

View file

@ -0,0 +1,155 @@
//
// playbackAgents.js
// acScripts
//
// Created by Edgar Pironti on 11/17/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Set the following variables to the values needed
var channel = "PlaybackChannel1";
var clip_url = null;
var playFromCurrentLocation = true;
var useDisplayName = true;
var useAttachments = true;
var useAvatarModel = true;
// ID of the agent. Two agents can't have the same ID.
var id = 0;
// Set position/orientation/scale here if playFromCurrentLocation is true
Avatar.position = { x:0, y: 0, z: 0 };
Avatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
Avatar.scale = 1.0;
var totalTime = 0;
var subscribed = false;
var WAIT_FOR_AUDIO_MIXER = 1;
// Script. DO NOT MODIFY BEYOND THIS LINE.
var DO_NOTHING = 0;
var PLAY = 1;
var PLAY_LOOP = 2;
var STOP = 3;
var SHOW = 4;
var HIDE = 5;
var LOAD = 6;
Recording.setPlayFromCurrentLocation(playFromCurrentLocation);
Recording.setPlayerUseDisplayName(useDisplayName);
Recording.setPlayerUseAttachments(useAttachments);
Recording.setPlayerUseHeadModel(false);
Recording.setPlayerUseSkeletonModel(useAvatarModel);
function getAction(channel, message, senderID) {
if(subscribed) {
var command = JSON.parse(message);
print("I'm the agent " + id + " and I received this: ID: " + command.id_key + " Action: " + command.action_key + " URL: " + command.clip_url_key);
if (command.id_key == id || command.id_key == -1) {
if (command.action_key === 6) {
clip_url = command.clip_url_key;
// If the id is -1 (broadcast) and the action is 6, in the url should be the performance file
// with all the clips recorded in a session (not just the single clip url).
// It has to be computed here in order to retrieve the url for the single agent.
// Checking the id we can assign the correct url to the correct agent.
if (command.id_key == -1) {
Assets.downloadData(clip_url, function (data) {
var myJSONObject = JSON.parse(data);
var hash = myJSONObject.results[id].hashATP;
});
Assets.downloadData(hash, function (data) {
clip_url = JSON.parse(data);
});
}
}
action = command.action_key;
print("That command was for me!");
print("My clip is: " + clip_url);
} else {
action = DO_NOTHING;
}
switch(action) {
case PLAY:
print("Play");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
if (!Recording.isPlaying()) {
Recording.startPlaying();
}
Recording.setPlayerLoop(false);
break;
case PLAY_LOOP:
print("Play loop");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
if (!Recording.isPlaying()) {
Recording.startPlaying();
}
Recording.setPlayerLoop(true);
break;
case STOP:
print("Stop");
if (Recording.isPlaying()) {
Recording.stopPlaying();
}
break;
case SHOW:
print("Show");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
break;
case HIDE:
print("Hide");
if (Recording.isPlaying()) {
Recording.stopPlaying();
}
Agent.isAvatar = false;
break;
case LOAD:
print("Load");
if(clip_url !== null) {
Recording.loadRecording(clip_url);
}
break;
case DO_NOTHING:
break;
default:
print("Unknown action: " + action);
break;
}
if (Recording.isPlaying()) {
Recording.play();
}
}
}
function update(deltaTime) {
totalTime += deltaTime;
if (totalTime > WAIT_FOR_AUDIO_MIXER && !subscribed) {
Messages.subscribe(channel);
subscribed = true;
print("I'm the agent and I am ready to receive!")
}
}
Script.update.connect(update);
Messages.messageReceived.connect(getAction);

View file

@ -0,0 +1,260 @@
//
// playbackMaster.js
// acScripts
//
// Created by Edgar Pironti on 11/17/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var ac_number = 1; // This is the default number of ACs. Their ID need to be unique and between 0 (included) and ac_number (excluded)
var names = new Array(); // It is possible to specify the name of the ACs in this array. ACs names ordered by IDs (Default name is "ACx", x = ID + 1))
var channel = "PlaybackChannel1";
var subscribed = false;
var clip_url = null;
var input_text = null;
// Script. DO NOT MODIFY BEYOND THIS LINE.
Script.include("../libraries/toolBars.js");
var DO_NOTHING = 0;
var PLAY = 1;
var PLAY_LOOP = 2;
var STOP = 3;
var SHOW = 4;
var HIDE = 5;
var LOAD = 6;
var windowDimensions = Controller.getViewportDimensions();
var TOOL_ICON_URL = HIFI_PUBLIC_BUCKET + "images/tools/";
var ALPHA_ON = 1.0;
var ALPHA_OFF = 0.7;
var COLOR_TOOL_BAR = { red: 0, green: 0, blue: 0 };
var COLOR_MASTER = { red: 0, green: 0, blue: 0 };
var TEXT_HEIGHT = 12;
var TEXT_MARGIN = 3;
var toolBars = new Array();
var nameOverlays = new Array();
var onOffIcon = new Array();
var playIcon = new Array();
var playLoopIcon = new Array();
var stopIcon = new Array();
var loadIcon = new Array();
setupPlayback();
function setupPlayback() {
ac_number = Window.prompt("Insert number of agents: ","1");
if (ac_number === "" || ac_number === null)
ac_number = 1;
Messages.subscribe(channel);
subscribed = true;
setupToolBars();
}
function setupToolBars() {
if (toolBars.length > 0) {
print("Multiple calls to Recorder.js:setupToolBars()");
return;
}
Tool.IMAGE_HEIGHT /= 2;
Tool.IMAGE_WIDTH /= 2;
for (i = 0; i <= ac_number; i++) {
toolBars.push(new ToolBar(0, 0, ToolBar.HORIZONTAL));
toolBars[i].setBack((i == ac_number) ? COLOR_MASTER : COLOR_TOOL_BAR, ALPHA_OFF);
onOffIcon.push(toolBars[i].addTool({
imageURL: TOOL_ICON_URL + "ac-on-off.svg",
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
x: 0, y: 0,
width: Tool.IMAGE_WIDTH,
height: Tool.IMAGE_HEIGHT,
alpha: ALPHA_ON,
visible: true
}, true, true));
playIcon[i] = toolBars[i].addTool({
imageURL: TOOL_ICON_URL + "play.svg",
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
width: Tool.IMAGE_WIDTH,
height: Tool.IMAGE_HEIGHT,
alpha: ALPHA_OFF,
visible: true
}, false);
var playLoopWidthFactor = 1.65;
playLoopIcon[i] = toolBars[i].addTool({
imageURL: TOOL_ICON_URL + "play-and-loop.svg",
subImage: { x: 0, y: 0, width: playLoopWidthFactor * Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
width: playLoopWidthFactor * Tool.IMAGE_WIDTH,
height: Tool.IMAGE_HEIGHT,
alpha: ALPHA_OFF,
visible: true
}, false);
stopIcon[i] = toolBars[i].addTool({
imageURL: TOOL_ICON_URL + "recording-stop.svg",
width: Tool.IMAGE_WIDTH,
height: Tool.IMAGE_HEIGHT,
alpha: ALPHA_OFF,
visible: true
}, false);
loadIcon[i] = toolBars[i].addTool({
imageURL: TOOL_ICON_URL + "recording-upload.svg",
width: Tool.IMAGE_WIDTH,
height: Tool.IMAGE_HEIGHT,
alpha: ALPHA_OFF,
visible: true
}, false);
nameOverlays.push(Overlays.addOverlay("text", {
backgroundColor: { red: 0, green: 0, blue: 0 },
font: { size: TEXT_HEIGHT },
text: (i == ac_number) ? "Master" : i + ". " +
((i < names.length) ? names[i] :
"AC" + i),
x: 0, y: 0,
width: toolBars[i].width + ToolBar.SPACING,
height: TEXT_HEIGHT + TEXT_MARGIN,
leftMargin: TEXT_MARGIN,
topMargin: TEXT_MARGIN,
alpha: ALPHA_OFF,
backgroundAlpha: ALPHA_OFF,
visible: true
}));
}
}
function sendCommand(id, action) {
if (action === SHOW) {
toolBars[id].selectTool(onOffIcon[id], false);
toolBars[id].setAlpha(ALPHA_ON, playIcon[id]);
toolBars[id].setAlpha(ALPHA_ON, playLoopIcon[id]);
toolBars[id].setAlpha(ALPHA_ON, stopIcon[id]);
toolBars[id].setAlpha(ALPHA_ON, loadIcon[id]);
} else if (action === HIDE) {
toolBars[id].selectTool(onOffIcon[id], true);
toolBars[id].setAlpha(ALPHA_OFF, playIcon[id]);
toolBars[id].setAlpha(ALPHA_OFF, playLoopIcon[id]);
toolBars[id].setAlpha(ALPHA_OFF, stopIcon[id]);
toolBars[id].setAlpha(ALPHA_OFF, loadIcon[id]);
} else if (toolBars[id].toolSelected(onOffIcon[id])) {
return;
}
if (id == (toolBars.length - 1))
id = -1; // Master command becomes broadcast.
var message = {
id_key: id,
action_key: action,
clip_url_key: clip_url
};
if(subscribed){
Messages.sendMessage(channel, JSON.stringify(message));
print("Message sent!");
clip_url = null;
}
}
function mousePressEvent(event) {
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
// Check master control
var i = toolBars.length - 1;
if (onOffIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
if (toolBars[i].toolSelected(onOffIcon[i])) {
sendCommand(i, SHOW);
} else {
sendCommand(i, HIDE);
}
} else if (playIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, PLAY);
} else if (playLoopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, PLAY_LOOP);
} else if (stopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, STOP);
} else if (loadIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
input_text = Window.prompt("Insert the url of the clip: ","");
if (!(input_text === "" || input_text === null)) {
clip_url = input_text;
sendCommand(i, LOAD);
}
} else {
// Check individual controls
for (i = 0; i < ac_number; i++) {
if (onOffIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
if (toolBars[i].toolSelected(onOffIcon[i], false)) {
sendCommand(i, SHOW);
} else {
sendCommand(i, HIDE);
}
} else if (playIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, PLAY);
} else if (playLoopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, PLAY_LOOP);
} else if (stopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, STOP);
} else if (loadIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
input_text = Window.prompt("Insert the url of the clip: ","");
if (!(input_text === "" || input_text === null)) {
clip_url = input_text;
sendCommand(i, LOAD);
}
} else {
}
}
}
}
function moveUI() {
var textSize = TEXT_HEIGHT + 2 * TEXT_MARGIN;
var relative = { x: 70, y: 75 + (ac_number) * (Tool.IMAGE_HEIGHT + ToolBar.SPACING + textSize) };
for (i = 0; i <= ac_number; i++) {
toolBars[i].move(relative.x,
windowDimensions.y - relative.y +
i * (Tool.IMAGE_HEIGHT + ToolBar.SPACING + textSize));
Overlays.editOverlay(nameOverlays[i], {
x: toolBars[i].x - ToolBar.SPACING,
y: toolBars[i].y - textSize
});
}
}
function update() {
var newDimensions = Controller.getViewportDimensions();
if (windowDimensions.x != newDimensions.x ||
windowDimensions.y != newDimensions.y) {
windowDimensions = newDimensions;
moveUI();
}
}
function scriptEnding() {
for (i = 0; i <= ac_number; i++) {
toolBars[i].cleanup();
Overlays.deleteOverlay(nameOverlays[i]);
}
if(subscribed)
Messages.unsubscribe(channel);
}
Controller.mousePressEvent.connect(mousePressEvent);
Script.update.connect(update);
Script.scriptEnding.connect(scriptEnding);
moveUI();

View file

@ -37,9 +37,21 @@ var BUMPER_ON_VALUE = 0.5;
var DISTANCE_HOLDING_RADIUS_FACTOR = 5; // multiplied by distance between hand and object
var DISTANCE_HOLDING_ACTION_TIMEFRAME = 0.1; // how quickly objects move to their new position
var DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR = 2.0; // object rotates this much more than hand did
var NO_INTERSECT_COLOR = { red: 10, green: 10, blue: 255}; // line color when pick misses
var INTERSECT_COLOR = { red: 250, green: 10, blue: 10}; // line color when pick hits
var LINE_ENTITY_DIMENSIONS = { x: 1000, y: 1000,z: 1000};
var NO_INTERSECT_COLOR = {
red: 10,
green: 10,
blue: 255
}; // line color when pick misses
var INTERSECT_COLOR = {
red: 250,
green: 10,
blue: 10
}; // line color when pick hits
var LINE_ENTITY_DIMENSIONS = {
x: 1000,
y: 1000,
z: 1000
};
var LINE_LENGTH = 500;
var PICK_MAX_DISTANCE = 500; // max length of pick-ray
@ -84,12 +96,13 @@ var ACTION_TTL_REFRESH = 5;
var PICKS_PER_SECOND_PER_HAND = 5;
var MSECS_PER_SEC = 1000.0;
var GRABBABLE_PROPERTIES = ["position",
"rotation",
"gravity",
"ignoreForCollisions",
"collisionsWillMove",
"locked",
"name"];
"rotation",
"gravity",
"ignoreForCollisions",
"collisionsWillMove",
"locked",
"name"
];
var GRABBABLE_DATA_KEY = "grabbableKey"; // shared with grab.js
@ -100,7 +113,7 @@ var DEFAULT_GRABBABLE_DATA = {
invertSolidWhileHeld: false
};
var disabledHand ='none';
var disabledHand = 'none';
// states for the state machine
@ -125,40 +138,40 @@ var STATE_EQUIP_SPRING = 16;
function stateToName(state) {
switch (state) {
case STATE_OFF:
return "off";
case STATE_SEARCHING:
return "searching";
case STATE_DISTANCE_HOLDING:
return "distance_holding";
case STATE_CONTINUE_DISTANCE_HOLDING:
return "continue_distance_holding";
case STATE_NEAR_GRABBING:
return "near_grabbing";
case STATE_CONTINUE_NEAR_GRABBING:
return "continue_near_grabbing";
case STATE_NEAR_TRIGGER:
return "near_trigger";
case STATE_CONTINUE_NEAR_TRIGGER:
return "continue_near_trigger";
case STATE_FAR_TRIGGER:
return "far_trigger";
case STATE_CONTINUE_FAR_TRIGGER:
return "continue_far_trigger";
case STATE_RELEASE:
return "release";
case STATE_EQUIP_SEARCHING:
return "equip_searching";
case STATE_EQUIP:
return "equip";
case STATE_CONTINUE_EQUIP_BD:
return "continue_equip_bd";
case STATE_CONTINUE_EQUIP:
return "continue_equip";
case STATE_WAITING_FOR_BUMPER_RELEASE:
return "waiting_for_bumper_release";
case STATE_EQUIP_SPRING:
return "state_equip_spring";
case STATE_OFF:
return "off";
case STATE_SEARCHING:
return "searching";
case STATE_DISTANCE_HOLDING:
return "distance_holding";
case STATE_CONTINUE_DISTANCE_HOLDING:
return "continue_distance_holding";
case STATE_NEAR_GRABBING:
return "near_grabbing";
case STATE_CONTINUE_NEAR_GRABBING:
return "continue_near_grabbing";
case STATE_NEAR_TRIGGER:
return "near_trigger";
case STATE_CONTINUE_NEAR_TRIGGER:
return "continue_near_trigger";
case STATE_FAR_TRIGGER:
return "far_trigger";
case STATE_CONTINUE_FAR_TRIGGER:
return "continue_far_trigger";
case STATE_RELEASE:
return "release";
case STATE_EQUIP_SEARCHING:
return "equip_searching";
case STATE_EQUIP:
return "equip";
case STATE_CONTINUE_EQUIP_BD:
return "continue_equip_bd";
case STATE_CONTINUE_EQUIP:
return "continue_equip";
case STATE_WAITING_FOR_BUMPER_RELEASE:
return "waiting_for_bumper_release";
case STATE_EQUIP_SPRING:
return "state_equip_spring";
}
return "unknown";
@ -187,7 +200,6 @@ function entityIsGrabbedByOther(entityID) {
return false;
}
function MyController(hand) {
this.hand = hand;
if (this.hand === RIGHT_HAND) {
@ -211,8 +223,17 @@ function MyController(hand) {
this.rawTriggerValue = 0;
this.rawBumperValue = 0;
this.offsetPosition = { x: 0.0, y: 0.0, z: 0.0 };
this.offsetRotation = { x: 0.0, y: 0.0, z: 0.0, w: 1.0 };
this.offsetPosition = {
x: 0.0,
y: 0.0,
z: 0.0
};
this.offsetRotation = {
x: 0.0,
y: 0.0,
z: 0.0,
w: 1.0
};
var _this = this;
@ -277,7 +298,7 @@ function MyController(hand) {
this.state = newState;
}
this.debugLine = function(closePoint, farPoint, color){
this.debugLine = function(closePoint, farPoint, color) {
Entities.addEntity({
type: "Line",
name: "Grab Debug Entity",
@ -321,16 +342,16 @@ function MyController(hand) {
this.pointer = null;
};
this.triggerPress = function (value) {
this.triggerPress = function(value) {
_this.rawTriggerValue = value;
};
this.bumperPress = function (value) {
this.bumperPress = function(value) {
_this.rawBumperValue = value;
};
this.updateSmoothedTrigger = function () {
this.updateSmoothedTrigger = function() {
var triggerValue = this.rawTriggerValue;
// smooth out trigger value
this.triggerValue = (this.triggerValue * TRIGGER_SMOOTH_RATIO) +
@ -401,7 +422,7 @@ function MyController(hand) {
this.lastPickTime = now;
}
for (var index=0; index < pickRays.length; ++index) {
for (var index = 0; index < pickRays.length; ++index) {
var pickRay = pickRays[index];
var directionNormalized = Vec3.normalize(pickRay.direction);
var directionBacked = Vec3.multiply(directionNormalized, PICK_BACKOFF_DISTANCE);
@ -466,10 +487,9 @@ function MyController(hand) {
}
return;
}
} else if (! entityIsGrabbedByOther(intersection.entityID)) {
} else if (!entityIsGrabbedByOther(intersection.entityID)) {
// don't allow two people to distance grab the same object
if (intersection.properties.collisionsWillMove
&& !intersection.properties.locked) {
if (intersection.properties.collisionsWillMove && !intersection.properties.locked) {
// the hand is far from the intersected object. go into distance-holding mode
this.grabbedEntity = intersection.entityID;
if (typeof grabbableData.spatialKey !== 'undefined' && this.state == STATE_EQUIP_SEARCHING) {
@ -494,10 +514,18 @@ function MyController(hand) {
Entities.addEntity({
type: "Sphere",
name: "Grab Debug Entity",
dimensions: {x: GRAB_RADIUS, y: GRAB_RADIUS, z: GRAB_RADIUS},
dimensions: {
x: GRAB_RADIUS,
y: GRAB_RADIUS,
z: GRAB_RADIUS
},
visible: true,
position: handPosition,
color: { red: 0, green: 255, blue: 0},
color: {
red: 0,
green: 255,
blue: 0
},
lifetime: 0.1
});
}
@ -604,6 +632,7 @@ function MyController(hand) {
} else {
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
}
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
Entities.callEntityMethod(this.grabbedEntity, "startDistantGrab");
}
@ -639,7 +668,7 @@ function MyController(hand) {
// the action was set up on a previous call. update the targets.
var radius = Math.max(Vec3.distance(this.currentObjectPosition, handControllerPosition) *
DISTANCE_HOLDING_RADIUS_FACTOR, DISTANCE_HOLDING_RADIUS_FACTOR);
DISTANCE_HOLDING_RADIUS_FACTOR, DISTANCE_HOLDING_RADIUS_FACTOR);
// how far did avatar move this timestep?
var currentPosition = MyAvatar.position;
var avatarDeltaPosition = Vec3.subtract(currentPosition, this.currentAvatarPosition);
@ -688,9 +717,9 @@ function MyController(hand) {
// this doubles hand rotation
var handChange = Quat.multiply(Quat.slerp(this.handPreviousRotation,
handRotation,
DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR),
Quat.inverse(this.handPreviousRotation));
handRotation,
DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR),
Quat.inverse(this.handPreviousRotation));
this.handPreviousRotation = handRotation;
this.currentObjectRotation = Quat.multiply(handChange, this.currentObjectRotation);
@ -773,6 +802,8 @@ function MyController(hand) {
this.setState(STATE_CONTINUE_NEAR_GRABBING);
} else {
// equipping
Entities.callEntityMethod(this.grabbedEntity, "startEquip", [JSON.stringify(this.hand)]);
this.startHandGrasp();
this.setState(STATE_CONTINUE_EQUIP_BD);
}
@ -781,6 +812,9 @@ function MyController(hand) {
} else {
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
}
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
Entities.callEntityMethod(this.grabbedEntity, "startNearGrab");
}
@ -807,6 +841,7 @@ function MyController(hand) {
}
if (this.state == STATE_CONTINUE_NEAR_GRABBING && this.bumperSqueezed()) {
this.setState(STATE_CONTINUE_EQUIP_BD);
Entities.callEntityMethod(this.grabbedEntity, "startEquip", [JSON.stringify(this.hand)]);
return;
}
@ -827,6 +862,10 @@ function MyController(hand) {
this.currentObjectTime = now;
Entities.callEntityMethod(this.grabbedEntity, "continueNearGrab");
if (this.state === STATE_CONTINUE_EQUIP_BD) {
Entities.callEntityMethod(this.grabbedEntity, "continueEquip");
}
if (this.actionTimeout - now < ACTION_TTL_REFRESH * MSEC_PER_SEC) {
// if less than a 5 seconds left, refresh the actions ttl
Entities.updateAction(this.grabbedEntity, this.actionID, {
@ -846,6 +885,8 @@ function MyController(hand) {
if (this.bumperReleased()) {
this.setState(STATE_RELEASE);
Entities.callEntityMethod(this.grabbedEntity, "releaseGrab");
Entities.callEntityMethod(this.grabbedEntity, "unequip");
this.endHandGrasp();
}
};
@ -856,8 +897,17 @@ function MyController(hand) {
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, this.grabbedEntity, DEFAULT_GRABBABLE_DATA);
// use a spring to pull the object to where it will be when equipped
var relativeRotation = { x: 0.0, y: 0.0, z: 0.0, w: 1.0 };
var relativePosition = { x: 0.0, y: 0.0, z: 0.0 };
var relativeRotation = {
x: 0.0,
y: 0.0,
z: 0.0,
w: 1.0
};
var relativePosition = {
x: 0.0,
y: 0.0,
z: 0.0
};
if (grabbableData.spatialKey.relativePosition) {
relativePosition = grabbableData.spatialKey.relativePosition;
}
@ -913,6 +963,9 @@ function MyController(hand) {
} else {
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
}
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
Entities.callEntityMethod(this.grabbedEntity, "startNearTrigger");
this.setState(STATE_CONTINUE_NEAR_TRIGGER);
};
@ -929,6 +982,7 @@ function MyController(hand) {
} else {
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
}
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
Entities.callEntityMethod(this.grabbedEntity, "startFarTrigger");
this.setState(STATE_CONTINUE_FAR_TRIGGER);
};
@ -1040,7 +1094,7 @@ function MyController(hand) {
this.release = function() {
if(this.hand !== disabledHand){
if (this.hand !== disabledHand) {
//release the disabled hand when we let go with the main one
disabledHand = 'none';
}
@ -1061,6 +1115,7 @@ function MyController(hand) {
this.cleanup = function() {
this.release();
this.endHandGrasp();
};
this.activateEntity = function(entityID, grabbedProperties) {
@ -1075,9 +1130,15 @@ function MyController(hand) {
data["gravity"] = grabbedProperties.gravity;
data["ignoreForCollisions"] = grabbedProperties.ignoreForCollisions;
data["collisionsWillMove"] = grabbedProperties.collisionsWillMove;
var whileHeldProperties = {gravity: {x:0, y:0, z:0}};
var whileHeldProperties = {
gravity: {
x: 0,
y: 0,
z: 0
}
};
if (invertSolidWhileHeld) {
whileHeldProperties["ignoreForCollisions"] = ! grabbedProperties.ignoreForCollisions;
whileHeldProperties["ignoreForCollisions"] = !grabbedProperties.ignoreForCollisions;
}
Entities.editEntity(entityID, whileHeldProperties);
}
@ -1103,6 +1164,44 @@ function MyController(hand) {
}
setEntityCustomData(GRAB_USER_DATA_KEY, entityID, data);
};
//this is our handler, where we do the actual work of changing animation settings
this.graspHand = function(animationProperties) {
var result = {};
//full alpha on overlay for this hand
//set grab to true
//set idle to false
//full alpha on the blend btw open and grab
if (_this.hand === RIGHT_HAND) {
result['rightHandOverlayAlpha'] = 1.0;
result['isRightHandGrab'] = true;
result['isRightHandIdle'] = false;
result['rightHandGrabBlend'] = 1.0;
} else if (_this.hand === LEFT_HAND) {
result['leftHandOverlayAlpha'] = 1.0;
result['isLeftHandGrab'] = true;
result['isLeftHandIdle'] = false;
result['leftHandGrabBlend'] = 1.0;
}
//return an object with our updated settings
return result;
}
this.graspHandler = null
this.startHandGrasp = function() {
if (this.hand === RIGHT_HAND) {
this.graspHandler = MyAvatar.addAnimationStateHandler(this.graspHand, ['isRightHandGrab']);
} else if (this.hand === LEFT_HAND) {
this.graspHandler = MyAvatar.addAnimationStateHandler(this.graspHand, ['isLeftHandGrab']);
}
}
this.endHandGrasp = function() {
// Tell the animation system we don't need any more callbacks.
MyAvatar.removeAnimationStateHandler(this.graspHandler);
}
}
var rightController = new MyController(RIGHT_HAND);

View file

@ -19,4 +19,3 @@ Script.load("controllers/handControllerGrab.js");
Script.load("grab.js");
Script.load("directory.js");
Script.load("dialTone.js");
Script.load("libraries/omniTool.js");

View file

@ -27,7 +27,6 @@ var toolBar = null;
var recordIcon;
var isRecording = false;
var channel = "groupRecordingChannel";
Messages.subscribe(channel);
setupToolBar();
function setupToolBar() {

View file

@ -1,90 +0,0 @@
//
// synchronizerEntityScript.js
// examples/entityScripts
//
// Created by Alessandro Signa on 11/12/15.
// Copyright 2015 High Fidelity, Inc.
//
// This script shows how to create a synchronized event between avatars trhough an entity.
// It works using the entity's userData: the master change its value and every client checks it every frame
// This entity prints a message when the event starts and when it ends.
// The client running synchronizerMaster.js is the event master and it decides when the event starts/ends by pressing a button.
// All the avatars in the area when the master presses the button will receive a message.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
(function() {
var insideArea = false;
var isJoiningTheEvent = false;
var _this;
function ParamsEntity() {
_this = this;
return;
}
ParamsEntity.prototype = {
update: function(){
var userData = JSON.parse(Entities.getEntityProperties(_this.entityID, ["userData"]).userData);
var valueToCheck = userData.myKey.valueToCheck;
if(valueToCheck && !isJoiningTheEvent){
_this.sendMessage();
}else if((!valueToCheck && isJoiningTheEvent) || (isJoiningTheEvent && !insideArea)){
_this.stopMessage();
}
},
preload: function(entityID) {
print('entity loaded')
this.entityID = entityID;
Script.update.connect(_this.update);
},
enterEntity: function(entityID) {
print("enterEntity("+entityID+")");
var userData = JSON.parse(Entities.getEntityProperties(_this.entityID, ["userData"]).userData);
var valueToCheck = userData.myKey.valueToCheck;
if(!valueToCheck){
//i'm in the area in time (before the event starts)
insideArea = true;
}
change(entityID);
},
leaveEntity: function(entityID) {
print("leaveEntity("+entityID+")");
Entities.editEntity(entityID, { color: { red: 255, green: 190, blue: 20} });
insideArea = false;
},
sendMessage: function(myID){
if(insideArea && !isJoiningTheEvent){
print("The event started");
isJoiningTheEvent = true;
}
},
stopMessage: function(myID){
if(isJoiningTheEvent){
print("The event ended");
isJoiningTheEvent = false;
}
},
clean: function(entityID) {
Script.update.disconnect(_this.update);
}
}
function change(entityID) {
Entities.editEntity(entityID, { color: { red: 255, green: 100, blue: 220} });
}
return new ParamsEntity();
});

View file

@ -1,117 +0,0 @@
//
// synchronizerMaster.js
// examples/entityScripts
//
// Created by Alessandro Signa on 11/12/15.
// Copyright 2015 High Fidelity, Inc.
//
// Run this script to spawn a box (synchronizer) and drive the start/end of the event for anyone who is inside the box
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
var PARAMS_SCRIPT_URL = Script.resolvePath('synchronizerEntityScript.js');
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
Script.include("../libraries/toolBars.js");
Script.include("../libraries/utils.js");
var rotation = Quat.safeEulerAngles(Camera.getOrientation());
rotation = Quat.fromPitchYawRollDegrees(0, rotation.y, 0);
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(1, Quat.getFront(rotation)));
var TOOL_ICON_URL = HIFI_PUBLIC_BUCKET + "images/tools/";
var ALPHA_ON = 1.0;
var ALPHA_OFF = 0.7;
var COLOR_TOOL_BAR = { red: 0, green: 0, blue: 0 };
var toolBar = null;
var recordIcon;
var isHappening = false;
var testEntity = Entities.addEntity({
name: 'paramsTestEntity',
dimensions: {
x: 2,
y: 1,
z: 2
},
type: 'Box',
position: center,
color: {
red: 255,
green: 255,
blue: 255
},
visible: true,
ignoreForCollisions: true,
script: PARAMS_SCRIPT_URL,
userData: JSON.stringify({
myKey: {
valueToCheck: false
}
})
});
setupToolBar();
function setupToolBar() {
if (toolBar != null) {
print("Multiple calls to setupToolBar()");
return;
}
Tool.IMAGE_HEIGHT /= 2;
Tool.IMAGE_WIDTH /= 2;
toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL); //put the button in the up-left corner
toolBar.setBack(COLOR_TOOL_BAR, ALPHA_OFF);
recordIcon = toolBar.addTool({
imageURL: TOOL_ICON_URL + "recording-record.svg",
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
x: 0, y: 0,
width: Tool.IMAGE_WIDTH,
height: Tool.IMAGE_HEIGHT,
alpha: MyAvatar.isPlaying() ? ALPHA_OFF : ALPHA_ON,
visible: true
}, true, isHappening);
}
function mousePressEvent(event) {
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
if (recordIcon === toolBar.clicked(clickedOverlay, false)) {
if (!isHappening) {
print("I'm the event master. I want the event starts");
isHappening = true;
setEntityCustomData("myKey", testEntity, {valueToCheck: true});
} else {
print("I want the event stops");
isHappening = false;
setEntityCustomData("myKey", testEntity, {valueToCheck: false});
}
}
}
function cleanup() {
toolBar.cleanup();
Entities.callEntityMethod(testEntity, 'clean'); //have to call this before deleting to avoid the JSON warnings
Entities.deleteEntity(testEntity);
}
Script.scriptEnding.connect(cleanup);
Controller.mousePressEvent.connect(mousePressEvent);

View file

@ -0,0 +1,11 @@
var data = "this is some data";
var extension = "txt";
var uploadedFile;
Assets.uploadData(data, extension, function (url) {
print("data uploaded to:" + url);
uploadedFile = url;
Assets.downloadData(url, function (data) {
print("data downloaded from:" + url + " the data is:" + data);
});
});

View file

@ -0,0 +1,68 @@
// graspHands.js
//
// Created by James B. Pollack @imgntn -- 11/19/2015
// Copyright 2015 High Fidelity, Inc.
//
// Shows how to use the animation API to grasp an Avatar's hands.
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//choose a hand. set it programatically if you'd like
var handToGrasp = 'LEFT_HAND';
//this is our handler, where we do the actual work of changing animation settings
function graspHand(animationProperties) {
var result = {};
//full alpha on overlay for this hand
//set grab to true
//set idle to false
//full alpha on the blend btw open and grab
if (handToGrasp === 'RIGHT_HAND') {
result['rightHandOverlayAlpha'] = 1.0;
result['isRightHandGrab'] = true;
result['isRightHandIdle'] = false;
result['rightHandGrabBlend'] = 1.0;
} else if (handToGrasp === 'LEFT_HAND') {
result['leftHandOverlayAlpha'] = 1.0;
result['isLeftHandGrab'] = true;
result['isLeftHandIdle'] = false;
result['leftHandGrabBlend'] = 1.0;
}
//return an object with our updated settings
return result;
}
//keep a reference to this so we can clear it
var handler;
//register our handler with the animation system
function startHandGrasp() {
if (handToGrasp === 'RIGHT_HAND') {
handler = MyAvatar.addAnimationStateHandler(graspHand, ['isRightHandGrab']);
} else if (handToGrasp === 'LEFT_HAND') {
handler = MyAvatar.addAnimationStateHandler(graspHand, ['isLeftHandGrab']);
}
}
function endHandGrasp() {
// Tell the animation system we don't need any more callbacks.
MyAvatar.removeAnimationStateHandler(handler);
}
//make sure to clean this up when the script ends so we don't get stuck.
Script.scriptEnding.connect(function() {
Script.clearInterval(graspInterval);
endHandGrasp();
})
//set an interval and toggle grasping
var isGrasping = false;
var graspInterval = Script.setInterval(function() {
if (isGrasping === false) {
startHandGrasp();
isGrasping = true;
} else {
endHandGrasp();
isGrasping = false
}
}, 1000)

View file

@ -19,10 +19,11 @@ var MAX_LINE_LENGTH = 40; // This must be 2 or greater;
var DEFAULT_STROKE_WIDTH = 0.1;
var DEFAULT_LIFETIME = 20;
var DEFAULT_COLOR = { red: 255, green: 255, blue: 255 };
var PolyLine = function(position, color, lifetime) {
var PolyLine = function(position, color, lifetime, texture) {
this.position = position;
this.color = color;
this.lifetime = lifetime === undefined ? DEFAULT_LIFETIME : lifetime;
this.texture = texture ? texture : "";
this.points = [
];
this.strokeWidths = [
@ -37,7 +38,8 @@ var PolyLine = function(position, color, lifetime) {
strokeWidths: this.strokeWidths,
dimensions: LINE_DIMENSIONS,
color: color,
lifetime: lifetime
lifetime: lifetime,
textures: this.texture
});
};
@ -98,26 +100,29 @@ PolyLine.prototype.destroy = function() {
// InfiniteLine
InfiniteLine = function(position, color, lifetime) {
InfiniteLine = function(position, color, lifetime, textureBegin, textureMiddle) {
this.position = position;
this.color = color;
this.lifetime = lifetime === undefined ? DEFAULT_LIFETIME : lifetime;
this.lines = [];
this.size = 0;
this.textureBegin = textureBegin ? textureBegin : "";
this.textureMiddle = textureMiddle ? textureMiddle : "";
};
InfiniteLine.prototype.enqueuePoint = function(position, strokeWidth) {
var currentLine;
if (this.lines.length == 0) {
currentLine = new PolyLine(position, this.color, this.lifetime);
currentLine = new PolyLine(position, this.color, this.lifetime, this.textureBegin);
this.lines.push(currentLine);
} else {
currentLine = this.lines[this.lines.length - 1];
}
if (currentLine.isFull()) {
var newLine = new PolyLine(currentLine.getLastPoint(), this.color, this.lifetime);
var newLine = new PolyLine(currentLine.getLastPoint(), this.color, this.lifetime, this.textureMiddle);
newLine.enqueuePoint(currentLine.getLastPoint(), strokeWidth);
this.lines.push(newLine);
currentLine = newLine;

View file

@ -12,8 +12,8 @@
Script.include("../../libraries/utils.js");
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/wand.fbx';
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/actual_no_top_collision_hull.obj';
var WAND_MODEL = 'http://hifi-content.s3.amazonaws.com/james/bubblewand/wand.fbx';
var WAND_COLLISION_SHAPE = 'http://hifi-content.s3.amazonaws.com/james/bubblewand/wand_collision_hull.obj';
var WAND_SCRIPT_URL = Script.resolvePath("wand.js");
@ -43,5 +43,18 @@ var wand = Entities.addEntity({
//must be enabled to be grabbable in the physics engine
collisionsWillMove: true,
compoundShapeURL: WAND_COLLISION_SHAPE,
script: WAND_SCRIPT_URL
script: WAND_SCRIPT_URL,
userData: JSON.stringify({
grabbableKey: {
invertSolidWhileHeld: true,
spatialKey: {
relativePosition: {
x: 0,
y: 0.1,
z: 0
},
relativeRotation: Quat.fromPitchYawRollDegrees(0, 0, 90)
}
}
})
});

View file

@ -12,7 +12,7 @@
/*global MyAvatar, Entities, AnimationCache, SoundCache, Scene, Camera, Overlays, HMD, AvatarList, AvatarManager, Controller, UndoStack, Window, Account, GlobalServices, Script, ScriptDiscoveryService, LODManager, Menu, Vec3, Quat, AudioDevice, Paths, Clipboard, Settings, XMLHttpRequest, randFloat, randInt */
(function () {
(function() {
Script.include("../../libraries/utils.js");
@ -58,23 +58,23 @@
BubbleWand.prototype = {
timePassed: null,
currentBubble: null,
preload: function (entityID) {
preload: function(entityID) {
this.entityID = entityID;
},
getWandTipPosition: function (properties) {
getWandTipPosition: function(properties) {
//the tip of the wand is going to be in a different place than the center, so we move in space relative to the model to find that position
var upVector = Quat.getUp(properties.rotation);
var upOffset = Vec3.multiply(upVector, WAND_TIP_OFFSET);
var wandTipPosition = Vec3.sum(properties.position, upOffset);
return wandTipPosition;
},
addCollisionsToBubbleAfterCreation: function (bubble) {
addCollisionsToBubbleAfterCreation: function(bubble) {
//if the bubble collide immediately, we get weird effects. so we add collisions after release
Entities.editEntity(bubble, {
collisionsWillMove: true
});
},
randomizeBubbleGravity: function () {
randomizeBubbleGravity: function() {
//change up the gravity a little bit for variation in floating effects
var randomNumber = randFloat(BUBBLE_GRAVITY_MIN, BUBBLE_GRAVITY_MAX);
var gravity = {
@ -84,7 +84,7 @@
};
return gravity;
},
growBubbleWithWandVelocity: function (properties, deltaTime) {
growBubbleWithWandVelocity: function(properties, deltaTime) {
//get the wand and tip position for calculations
var wandPosition = properties.position;
this.getWandTipPosition(properties);
@ -145,7 +145,7 @@
dimensions: dimensions
});
},
createBubbleAtTipOfWand: function () {
createBubbleAtTipOfWand: function() {
//create a new bubble at the tip of the wand
var properties = Entities.getEntityProperties(this.entityID, ["position", "rotation"]);
@ -162,24 +162,23 @@
position: this.getWandTipPosition(properties),
dimensions: BUBBLE_INITIAL_DIMENSIONS,
collisionsWillMove: false,
ignoreForCollisions: false,
ignoreForCollisions: true,
linearDamping: BUBBLE_LINEAR_DAMPING,
shapeType: "sphere"
});
},
startNearGrab: function () {
startNearGrab: function() {
//create a bubble to grow at the start of the grab
if (this.currentBubble === null) {
this.createBubbleAtTipOfWand();
}
},
continueNearGrab: function () {
continueNearGrab: function() {
var deltaTime = checkInterval();
//only get the properties that we need
var properties = Entities.getEntityProperties(this.entityID, ["position", "rotation"]);
var wandTipPosition = this.getWandTipPosition(properties);
//update the bubble to stay with the wand tip
@ -189,7 +188,7 @@
this.growBubbleWithWandVelocity(properties, deltaTime);
},
releaseGrab: function () {
releaseGrab: function() {
//delete the current buble and reset state when the wand is released
Entities.deleteEntity(this.currentBubble);
this.currentBubble = null;

View file

@ -1187,7 +1187,7 @@ void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
// virtual
void Avatar::rebuildSkeletonBody() {
DependencyManager::get<AvatarManager>()->updateAvatarPhysicsShape(getSessionUUID());
DependencyManager::get<AvatarManager>()->updateAvatarPhysicsShape(this);
}
glm::vec3 Avatar::getLeftPalmPosition() {

View file

@ -110,28 +110,34 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
}
void AvatarManager::updateOtherAvatars(float deltaTime) {
// lock the hash for read to check the size
QReadLocker lock(&_hashLock);
if (_avatarHash.size() < 2 && _avatarFades.isEmpty()) {
return;
}
lock.unlock();
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::updateAvatars()");
PerformanceTimer perfTimer("otherAvatars");
// simulate avatars
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
auto hashCopy = getHashCopy();
AvatarHash::iterator avatarIterator = hashCopy.begin();
while (avatarIterator != hashCopy.end()) {
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
if (avatar == _myAvatar || !avatar->isInitialized()) {
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
// DO NOT update or fade out uninitialized Avatars
++avatarIterator;
} else if (avatar->shouldDie()) {
removeAvatarMotionState(avatar);
_avatarFades.push_back(avatarIterator.value());
QWriteLocker locker(&_hashLock);
avatarIterator = _avatarHash.erase(avatarIterator);
removeAvatar(avatarIterator.key());
++avatarIterator;
} else {
avatar->startUpdate();
avatar->simulate(deltaTime);
@ -169,19 +175,21 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
}
AvatarSharedPointer AvatarManager::newSharedAvatar() {
return AvatarSharedPointer(std::make_shared<Avatar>(std::make_shared<AvatarRig>()));
return std::make_shared<Avatar>(std::make_shared<AvatarRig>());
}
// virtual
AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
auto avatar = std::dynamic_pointer_cast<Avatar>(AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer));
auto newAvatar = AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer);
auto rawRenderableAvatar = std::static_pointer_cast<Avatar>(newAvatar);
render::ScenePointer scene = qApp->getMain3DScene();
render::PendingChanges pendingChanges;
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
avatar->addToScene(avatar, scene, pendingChanges);
rawRenderableAvatar->addToScene(rawRenderableAvatar, scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
return avatar;
return newAvatar;
}
// protected
@ -200,20 +208,25 @@ void AvatarManager::removeAvatarMotionState(AvatarSharedPointer avatar) {
// virtual
void AvatarManager::removeAvatar(const QUuid& sessionUUID) {
AvatarHash::iterator avatarIterator = _avatarHash.find(sessionUUID);
if (avatarIterator != _avatarHash.end()) {
std::shared_ptr<Avatar> avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
if (avatar != _myAvatar && avatar->isInitialized()) {
removeAvatarMotionState(avatar);
_avatarFades.push_back(avatarIterator.value());
QWriteLocker locker(&_hashLock);
_avatarHash.erase(avatarIterator);
}
QWriteLocker locker(&_hashLock);
auto removedAvatar = _avatarHash.take(sessionUUID);
if (removedAvatar) {
handleRemovedAvatar(removedAvatar);
}
}
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar) {
AvatarHashMap::handleRemovedAvatar(removedAvatar);
removeAvatarMotionState(removedAvatar);
_avatarFades.push_back(removedAvatar);
}
void AvatarManager::clearOtherAvatars() {
// clear any avatars that came from an avatar-mixer
QWriteLocker locker(&_hashLock);
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
@ -221,10 +234,10 @@ void AvatarManager::clearOtherAvatars() {
// don't remove myAvatar or uninitialized avatars from the list
++avatarIterator;
} else {
removeAvatarMotionState(avatar);
_avatarFades.push_back(avatarIterator.value());
QWriteLocker locker(&_hashLock);
auto removedAvatar = avatarIterator.value();
avatarIterator = _avatarHash.erase(avatarIterator);
handleRemovedAvatar(removedAvatar);
}
}
_myAvatar->clearLookAtTargetAvatar();
@ -252,6 +265,7 @@ QVector<QUuid> AvatarManager::getAvatarIdentifiers() {
QReadLocker locker(&_hashLock);
return _avatarHash.keys().toVector();
}
AvatarData* AvatarManager::getAvatar(QUuid avatarID) {
QReadLocker locker(&_hashLock);
return _avatarHash[avatarID].get(); // Non-obvious: A bogus avatarID answers your own avatar.
@ -317,23 +331,19 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
}
}
void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
AvatarHash::iterator avatarItr = _avatarHash.find(id);
if (avatarItr != _avatarHash.end()) {
auto avatar = std::static_pointer_cast<Avatar>(avatarItr.value());
AvatarMotionState* motionState = avatar->getMotionState();
if (motionState) {
motionState->addDirtyFlags(Simulation::DIRTY_SHAPE);
} else {
ShapeInfo shapeInfo;
avatar->computeShapeInfo(shapeInfo);
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
if (shape) {
AvatarMotionState* motionState = new AvatarMotionState(avatar.get(), shape);
avatar->setMotionState(motionState);
_motionStatesToAdd.insert(motionState);
_avatarMotionStates.insert(motionState);
}
void AvatarManager::updateAvatarPhysicsShape(Avatar* avatar) {
AvatarMotionState* motionState = avatar->getMotionState();
if (motionState) {
motionState->addDirtyFlags(Simulation::DIRTY_SHAPE);
} else {
ShapeInfo shapeInfo;
avatar->computeShapeInfo(shapeInfo);
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
if (shape) {
AvatarMotionState* motionState = new AvatarMotionState(avatar, shape);
avatar->setMotionState(motionState);
_motionStatesToAdd.insert(motionState);
_avatarMotionStates.insert(motionState);
}
}
}
@ -341,7 +351,7 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
for (auto avatarData : _avatarHash) {
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
auto avatar = std::static_pointer_cast<Avatar>(avatarData);
render::ScenePointer scene = qApp->getMain3DScene();
render::PendingChanges pendingChanges;
avatar->addToScene(avatar, scene, pendingChanges);
@ -349,7 +359,7 @@ void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
}
} else {
for (auto avatarData : _avatarHash) {
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
auto avatar = std::static_pointer_cast<Avatar>(avatarData);
render::ScenePointer scene = qApp->getMain3DScene();
render::PendingChanges pendingChanges;
avatar->removeFromScene(avatar, scene, pendingChanges);
@ -363,11 +373,6 @@ AvatarSharedPointer AvatarManager::getAvatarBySessionID(const QUuid& sessionID)
if (sessionID == _myAvatar->getSessionUUID()) {
return std::static_pointer_cast<Avatar>(_myAvatar);
}
QReadLocker locker(&_hashLock);
auto iter = _avatarHash.find(sessionID);
if (iter != _avatarHash.end()) {
return iter.value();
} else {
return AvatarSharedPointer();
}
return findAvatar(sessionID);
}

View file

@ -63,7 +63,7 @@ public:
void handleOutgoingChanges(const VectorOfMotionStates& motionStates);
void handleCollisionEvents(const CollisionEvents& collisionEvents);
void updateAvatarPhysicsShape(const QUuid& id);
void updateAvatarPhysicsShape(Avatar* avatar);
public slots:
void setShouldShowReceiveStats(bool shouldShowReceiveStats) { _shouldShowReceiveStats = shouldShowReceiveStats; }
@ -79,7 +79,9 @@ private:
virtual AvatarSharedPointer newSharedAvatar();
virtual AvatarSharedPointer addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer);
void removeAvatarMotionState(AvatarSharedPointer avatar);
virtual void removeAvatar(const QUuid& sessionUUID);
virtual void handleRemovedAvatar(const AvatarSharedPointer& removedAvatar);
QVector<AvatarSharedPointer> _avatarFades;
std::shared_ptr<MyAvatar> _myAvatar;

View file

@ -428,7 +428,7 @@ void MyAvatar::updateHMDFollowVelocity() {
}
if (_followSpeed > 0.0f) {
// to compute new velocity we must rotate offset into the world-frame
glm::quat sensorToWorldRotation = extractRotation(_sensorToWorldMatrix);
glm::quat sensorToWorldRotation = glm::normalize(glm::quat_cast(_sensorToWorldMatrix));
_followVelocity = _followSpeed * glm::normalize(sensorToWorldRotation * offset);
}
}
@ -982,10 +982,8 @@ void MyAvatar::updateLookAtTargetAvatar() {
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
AvatarHash hash;
DependencyManager::get<AvatarManager>()->withAvatarHash([&] (const AvatarHash& locked) {
hash = locked; // make a shallow copy and operate on that, to minimize lock time
});
AvatarHash hash = DependencyManager::get<AvatarManager>()->getHashCopy();
foreach (const AvatarSharedPointer& avatarPointer, hash) {
auto avatar = static_pointer_cast<Avatar>(avatarPointer);
bool isCurrentTarget = avatar->getIsLookAtTarget();

View file

@ -22,13 +22,9 @@ AvatarHashMap::AvatarHashMap() {
connect(DependencyManager::get<NodeList>().data(), &NodeList::uuidChanged, this, &AvatarHashMap::sessionUUIDChanged);
}
void AvatarHashMap::withAvatarHash(std::function<void(const AvatarHash& hash)> callback) {
QReadLocker locker(&_hashLock);
callback(_avatarHash);
}
bool AvatarHashMap::isAvatarInRange(const glm::vec3& position, const float range) {
QReadLocker locker(&_hashLock);
foreach(const AvatarSharedPointer& sharedAvatar, _avatarHash) {
auto hashCopy = getHashCopy();
foreach(const AvatarSharedPointer& sharedAvatar, hashCopy) {
glm::vec3 avatarPosition = sharedAvatar->getPosition();
float distance = glm::distance(avatarPosition, position);
if (distance < range) {
@ -44,16 +40,34 @@ AvatarSharedPointer AvatarHashMap::newSharedAvatar() {
AvatarSharedPointer AvatarHashMap::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
qCDebug(avatars) << "Adding avatar with sessionUUID " << sessionUUID << "to AvatarHashMap.";
AvatarSharedPointer avatar = newSharedAvatar();
auto avatar = newSharedAvatar();
avatar->setSessionUUID(sessionUUID);
avatar->setOwningAvatarMixer(mixerWeakPointer);
QWriteLocker locker(&_hashLock);
_avatarHash.insert(sessionUUID, avatar);
emit avatarAddedEvent(sessionUUID);
return avatar;
}
AvatarSharedPointer AvatarHashMap::newOrExistingAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
QWriteLocker locker(&_hashLock);
auto avatar = _avatarHash.value(sessionUUID);
if (!avatar) {
avatar = addAvatar(sessionUUID, mixerWeakPointer);
}
return avatar;
}
AvatarSharedPointer AvatarHashMap::findAvatar(const QUuid& sessionUUID) {
QReadLocker locker(&_hashLock);
return _avatarHash.value(sessionUUID);
}
void AvatarHashMap::processAvatarDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
// enumerate over all of the avatars in this packet
@ -66,10 +80,7 @@ void AvatarHashMap::processAvatarDataPacket(QSharedPointer<NLPacket> packet, Sha
QByteArray byteArray = packet->readWithoutCopy(packet->bytesLeftToRead());
if (sessionUUID != _lastOwnerSessionUUID) {
AvatarSharedPointer avatar = _avatarHash.value(sessionUUID);
if (!avatar) {
avatar = addAvatar(sessionUUID, sendingNode);
}
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
// have the matching (or new) avatar parse the data from the packet
int bytesRead = avatar->parseDataFromBuffer(byteArray);
@ -97,10 +108,8 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<NLPacket> packet,
identityStream >> sessionUUID >> faceMeshURL >> skeletonURL >> attachmentData >> displayName;
// mesh URL for a UUID, find avatar in our list
AvatarSharedPointer avatar = _avatarHash.value(sessionUUID);
if (!avatar) {
avatar = addAvatar(sessionUUID, sendingNode);
}
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
if (avatar->getFaceModelURL() != faceMeshURL) {
avatar->setFaceModelURL(faceMeshURL);
}
@ -122,10 +131,7 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<NLPacket> packet,
void AvatarHashMap::processAvatarBillboardPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
QUuid sessionUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
AvatarSharedPointer avatar = _avatarHash.value(sessionUUID);
if (!avatar) {
avatar = addAvatar(sessionUUID, sendingNode);
}
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
QByteArray billboard = packet->read(packet->bytesLeftToRead());
if (avatar->getBillboard() != billboard) {
@ -137,13 +143,22 @@ void AvatarHashMap::processKillAvatar(QSharedPointer<NLPacket> packet, SharedNod
// read the node id
QUuid sessionUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
removeAvatar(sessionUUID);
}
void AvatarHashMap::removeAvatar(const QUuid& sessionUUID) {
QWriteLocker locker(&_hashLock);
_avatarHash.remove(sessionUUID);
emit avatarRemovedEvent(sessionUUID);
auto removedAvatar = _avatarHash.take(sessionUUID);
if (removedAvatar) {
handleRemovedAvatar(removedAvatar);
}
}
void AvatarHashMap::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar) {
qDebug() << "Removed avatar with UUID" << uuidStringWithoutCurlyBraces(removedAvatar->getSessionUUID())
<< "from AvatarHashMap";
emit avatarRemovedEvent(removedAvatar->getSessionUUID());
}
void AvatarHashMap::sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& oldUUID) {

View file

@ -31,7 +31,7 @@ class AvatarHashMap : public QObject, public Dependency {
SINGLETON_DEPENDENCY
public:
void withAvatarHash(std::function<void(const AvatarHash& hash)>);
AvatarHash getHashCopy() { QReadLocker lock(&_hashLock); return _avatarHash; }
int size() { return _avatarHash.size(); }
signals:
@ -55,7 +55,11 @@ protected:
virtual AvatarSharedPointer newSharedAvatar();
virtual AvatarSharedPointer addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer);
AvatarSharedPointer newOrExistingAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer);
virtual AvatarSharedPointer findAvatar(const QUuid& sessionUUID); // uses a QReadLocker on the hashLock
virtual void removeAvatar(const QUuid& sessionUUID);
virtual void handleRemovedAvatar(const AvatarSharedPointer& removedAvatar);
AvatarHash _avatarHash;
// "Case-based safety": Most access to the _avatarHash is on the same thread. Write access is protected by a write-lock.

View file

@ -36,7 +36,6 @@ PolyLineEntityItem(entityItemID, properties) {
gpu::PipelinePointer RenderablePolyLineEntityItem::_pipeline;
gpu::Stream::FormatPointer RenderablePolyLineEntityItem::_format;
gpu::TexturePointer RenderablePolyLineEntityItem::_texture;
int32_t RenderablePolyLineEntityItem::PAINTSTROKE_GPU_SLOT;
void RenderablePolyLineEntityItem::createPipeline() {
@ -44,9 +43,6 @@ void RenderablePolyLineEntityItem::createPipeline() {
static const int COLOR_OFFSET = 24;
static const int TEXTURE_OFFSET = 28;
auto textureCache = DependencyManager::get<TextureCache>();
QString path = PathUtils::resourcesPath() + "images/paintStroke.png";
_texture = textureCache->getImageTexture(path);
_format.reset(new gpu::Stream::Format());
_format->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
_format->setAttribute(gpu::Stream::NORMAL, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), NORMAL_OFFSET);
@ -132,6 +128,13 @@ void RenderablePolyLineEntityItem::render(RenderArgs* args) {
createPipeline();
}
if (!_texture || _texturesChangedFlag) {
auto textureCache = DependencyManager::get<TextureCache>();
QString path = _textures.isEmpty() ? PathUtils::resourcesPath() + "images/paintStroke.png" : _textures;
_texture = textureCache->getTexture(QUrl(path));
_texturesChangedFlag = false;
}
PerformanceTimer perfTimer("RenderablePolyLineEntityItem::render");
Q_ASSERT(getType() == EntityTypes::PolyLine);
@ -147,7 +150,11 @@ void RenderablePolyLineEntityItem::render(RenderArgs* args) {
batch.setModelTransform(transform);
batch.setPipeline(_pipeline);
batch.setResourceTexture(PAINTSTROKE_GPU_SLOT, _texture);
if (_texture->isLoaded()) {
batch.setResourceTexture(PAINTSTROKE_GPU_SLOT, _texture->getGPUTexture());
} else {
batch.setResourceTexture(PAINTSTROKE_GPU_SLOT, args->_whiteTexture);
}
batch.setInputFormat(_format);
batch.setInputBuffer(0, _verticesBuffer, 0, _format->getChannels().at(0)._stride);

View file

@ -12,10 +12,13 @@
#ifndef hifi_RenderablePolyLineEntityItem_h
#define hifi_RenderablePolyLineEntityItem_h
#include <gpu/Batch.h>
#include <GeometryCache.h>
#include <PolyLineEntityItem.h>
#include "RenderableEntityItem.h"
#include <GeometryCache.h>
#include <TextureCache.h>
#include <QReadWriteLock>
@ -29,9 +32,10 @@ public:
SIMPLE_RENDERABLE();
NetworkTexturePointer _texture;
static gpu::PipelinePointer _pipeline;
static gpu::Stream::FormatPointer _format;
static gpu::TexturePointer _texture;
static int32_t PAINTSTROKE_GPU_SLOT;
protected:

View file

@ -432,6 +432,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LINE_POINTS, linePoints);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_NORMALS, normals);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_STROKE_WIDTHS, strokeWidths);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_TEXTURES, textures);
}
// Sitting properties support
@ -1011,6 +1012,7 @@ bool EntityItemProperties::encodeEntityEditPacket(PacketType command, EntityItem
APPEND_ENTITY_PROPERTY(PROP_LINE_POINTS, properties.getLinePoints());
APPEND_ENTITY_PROPERTY(PROP_NORMALS, properties.getNormals());
APPEND_ENTITY_PROPERTY(PROP_STROKE_WIDTHS, properties.getStrokeWidths());
APPEND_ENTITY_PROPERTY(PROP_TEXTURES, properties.getTextures());
}
APPEND_ENTITY_PROPERTY(PROP_MARKETPLACE_ID, properties.getMarketplaceID());
@ -1287,6 +1289,7 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_LINE_POINTS, QVector<glm::vec3>, setLinePoints);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_NORMALS, QVector<glm::vec3>, setNormals);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_STROKE_WIDTHS, QVector<float>, setStrokeWidths);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_TEXTURES, QString, setTextures);
}
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_MARKETPLACE_ID, QString, setMarketplaceID);

View file

@ -37,7 +37,8 @@ _pointsChanged(true),
_points(QVector<glm::vec3>(0.0f)),
_vertices(QVector<glm::vec3>(0.0f)),
_normals(QVector<glm::vec3>(0.0f)),
_strokeWidths(QVector<float>(0.0f))
_strokeWidths(QVector<float>(0.0f)),
_textures("")
{
_type = EntityTypes::PolyLine;
_created = properties.getCreated();
@ -56,6 +57,7 @@ EntityItemProperties PolyLineEntityItem::getProperties(EntityPropertyFlags desir
COPY_ENTITY_PROPERTY_TO_PROPERTIES(linePoints, getLinePoints);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(normals, getNormals);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(strokeWidths, getStrokeWidths);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(textures, getTextures);
properties._glowLevel = getGlowLevel();
properties._glowLevelChanged = false;
@ -72,6 +74,7 @@ bool PolyLineEntityItem::setProperties(const EntityItemProperties& properties) {
SET_ENTITY_PROPERTY_FROM_PROPERTIES(linePoints, setLinePoints);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(normals, setNormals);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(strokeWidths, setStrokeWidths);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(textures, setTextures);
if (somethingChanged) {
bool wantDebug = false;
@ -196,6 +199,7 @@ int PolyLineEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* da
READ_ENTITY_PROPERTY(PROP_LINE_POINTS, QVector<glm::vec3>, setLinePoints);
READ_ENTITY_PROPERTY(PROP_NORMALS, QVector<glm::vec3>, setNormals);
READ_ENTITY_PROPERTY(PROP_STROKE_WIDTHS, QVector<float>, setStrokeWidths);
READ_ENTITY_PROPERTY(PROP_TEXTURES, QString, setTextures);
return bytesRead;
}
@ -209,6 +213,7 @@ EntityPropertyFlags PolyLineEntityItem::getEntityProperties(EncodeBitstreamParam
requestedProperties += PROP_LINE_POINTS;
requestedProperties += PROP_NORMALS;
requestedProperties += PROP_STROKE_WIDTHS;
requestedProperties += PROP_TEXTURES;
return requestedProperties;
}
@ -228,6 +233,7 @@ void PolyLineEntityItem::appendSubclassData(OctreePacketData* packetData, Encode
APPEND_ENTITY_PROPERTY(PROP_LINE_POINTS, getLinePoints());
APPEND_ENTITY_PROPERTY(PROP_NORMALS, getNormals());
APPEND_ENTITY_PROPERTY(PROP_STROKE_WIDTHS, getStrokeWidths());
APPEND_ENTITY_PROPERTY(PROP_TEXTURES, getTextures());
}
void PolyLineEntityItem::debugDump() const {

View file

@ -67,7 +67,14 @@ class PolyLineEntityItem : public EntityItem {
bool setStrokeWidths(const QVector<float>& strokeWidths);
const QVector<float>& getStrokeWidths() const{ return _strokeWidths; }
const QString& getTextures() const { return _textures; }
void setTextures(const QString& textures) {
if (_textures != textures) {
_textures = textures;
_texturesChangedFlag = true;
}
}
virtual ShapeType getShapeType() const { return SHAPE_TYPE_LINE; }
@ -90,6 +97,8 @@ class PolyLineEntityItem : public EntityItem {
QVector<glm::vec3> _vertices;
QVector<glm::vec3> _normals;
QVector<float> _strokeWidths;
QString _textures;
bool _texturesChangedFlag { false };
mutable QReadWriteLock _quadReadWriteLock;
};

View file

@ -365,3 +365,65 @@ void AssetClient::handleNodeKilled(SharedNodePointer node) {
}
}
}
void AssetScriptingInterface::uploadData(QString data, QString extension, QScriptValue callback) {
QByteArray dataByteArray = data.toUtf8();
auto upload = DependencyManager::get<AssetClient>()->createUpload(dataByteArray, extension);
if (!upload) {
qCWarning(asset_client) << "Error uploading file to asset server";
return;
}
QObject::connect(upload, &AssetUpload::finished, this, [callback, extension](AssetUpload* upload, const QString& hash) mutable {
if (callback.isFunction()) {
QString url = "atp://" + hash + "." + extension;
QScriptValueList args { url };
callback.call(QScriptValue(), args);
}
});
upload->start();
}
void AssetScriptingInterface::downloadData(QString urlString, QScriptValue callback) {
const QString ATP_SCHEME { "atp://" };
if (!urlString.startsWith(ATP_SCHEME)) {
return;
}
// Make request to atp
auto path = urlString.right(urlString.length() - ATP_SCHEME.length());
auto parts = path.split(".", QString::SkipEmptyParts);
auto hash = parts.length() > 0 ? parts[0] : "";
auto extension = parts.length() > 1 ? parts[1] : "";
if (hash.length() != SHA256_HASH_HEX_LENGTH) {
return;
}
auto assetClient = DependencyManager::get<AssetClient>();
auto assetRequest = assetClient->createRequest(hash, extension);
if (!assetRequest) {
return;
}
_pendingRequests << assetRequest;
connect(assetRequest, &AssetRequest::finished, [this, callback](AssetRequest* request) mutable {
Q_ASSERT(request->getState() == AssetRequest::Finished);
if (request->getError() == AssetRequest::Error::NoError) {
if (callback.isFunction()) {
QString data = QString::fromUtf8(request->getData());
QScriptValueList args { data };
callback.call(QScriptValue(), args);
}
}
request->deleteLater();
_pendingRequests.remove(request);
});
assetRequest->start();
}

View file

@ -14,6 +14,7 @@
#define hifi_AssetClient_h
#include <QString>
#include <QScriptValue>
#include <DependencyManager.h>
@ -21,6 +22,7 @@
#include "LimitedNodeList.h"
#include "NLPacket.h"
#include "Node.h"
#include "ResourceCache.h"
class AssetRequest;
class AssetUpload;
@ -68,4 +70,15 @@ private:
friend class AssetUpload;
};
class AssetScriptingInterface : public QObject {
Q_OBJECT
public:
Q_INVOKABLE void uploadData(QString data, QString extension, QScriptValue callback);
Q_INVOKABLE void downloadData(QString url, QScriptValue downloadComplete);
protected:
QSet<AssetRequest*> _pendingRequests;
};
#endif

View file

@ -36,7 +36,7 @@ void MessagesClient::init() {
}
}
void MessagesClient::handleMessagesPacket(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
void MessagesClient::decodeMessagesPacket(QSharedPointer<NLPacketList> packetList, QString& channel, QString& message, QUuid& senderID) {
QByteArray packetData = packetList->getMessage();
QBuffer packet{ &packetData };
packet.open(QIODevice::ReadOnly);
@ -44,38 +44,60 @@ void MessagesClient::handleMessagesPacket(QSharedPointer<NLPacketList> packetLis
quint16 channelLength;
packet.read(reinterpret_cast<char*>(&channelLength), sizeof(channelLength));
auto channelData = packet.read(channelLength);
QString channel = QString::fromUtf8(channelData);
channel = QString::fromUtf8(channelData);
quint16 messageLength;
packet.read(reinterpret_cast<char*>(&messageLength), sizeof(messageLength));
auto messageData = packet.read(messageLength);
QString message = QString::fromUtf8(messageData);
message = QString::fromUtf8(messageData);
emit messageReceived(channel, message, senderNode->getUUID());
QByteArray bytesSenderID = packet.read(NUM_BYTES_RFC4122_UUID);
if (bytesSenderID.length() == NUM_BYTES_RFC4122_UUID) {
senderID = QUuid::fromRfc4122(bytesSenderID);
} else {
QUuid emptyUUID;
senderID = emptyUUID; // packet was missing UUID use default instead
}
}
void MessagesClient::sendMessage(const QString& channel, const QString& message) {
std::unique_ptr<NLPacketList> MessagesClient::encodeMessagesPacket(QString channel, QString message, QUuid senderID) {
auto packetList = NLPacketList::create(PacketType::MessagesData, QByteArray(), true, true);
auto channelUtf8 = channel.toUtf8();
quint16 channelLength = channelUtf8.length();
packetList->writePrimitive(channelLength);
packetList->write(channelUtf8);
auto messageUtf8 = message.toUtf8();
quint16 messageLength = messageUtf8.length();
packetList->writePrimitive(messageLength);
packetList->write(messageUtf8);
packetList->write(senderID.toRfc4122());
return packetList;
}
void MessagesClient::handleMessagesPacket(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
QString channel, message;
QUuid senderID;
decodeMessagesPacket(packetList, channel, message, senderID);
emit messageReceived(channel, message, senderID);
}
void MessagesClient::sendMessage(QString channel, QString message) {
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer messagesMixer = nodeList->soloNodeOfType(NodeType::MessagesMixer);
if (messagesMixer) {
auto packetList = NLPacketList::create(PacketType::MessagesData, QByteArray(), true, true);
auto channelUtf8 = channel.toUtf8();
quint16 channelLength = channelUtf8.length();
packetList->writePrimitive(channelLength);
packetList->write(channelUtf8);
auto messageUtf8 = message.toUtf8();
quint16 messageLength = messageUtf8.length();
packetList->writePrimitive(messageLength);
packetList->write(messageUtf8);
QUuid senderID = nodeList->getSessionUUID();
auto packetList = encodeMessagesPacket(channel, message, senderID);
nodeList->sendPacketList(std::move(packetList), *messagesMixer);
}
}
void MessagesClient::subscribe(const QString& channel) {
void MessagesClient::subscribe(QString channel) {
_subscribedChannels << channel;
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer messagesMixer = nodeList->soloNodeOfType(NodeType::MessagesMixer);
@ -87,7 +109,7 @@ void MessagesClient::subscribe(const QString& channel) {
}
}
void MessagesClient::unsubscribe(const QString& channel) {
void MessagesClient::unsubscribe(QString channel) {
_subscribedChannels.remove(channel);
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer messagesMixer = nodeList->soloNodeOfType(NodeType::MessagesMixer);

View file

@ -28,12 +28,16 @@ public:
Q_INVOKABLE void init();
Q_INVOKABLE void sendMessage(const QString& channel, const QString& message);
Q_INVOKABLE void subscribe(const QString& channel);
Q_INVOKABLE void unsubscribe(const QString& channel);
Q_INVOKABLE void sendMessage(QString channel, QString message);
Q_INVOKABLE void subscribe(QString channel);
Q_INVOKABLE void unsubscribe(QString channel);
static void decodeMessagesPacket(QSharedPointer<NLPacketList> packetList, QString& channel, QString& message, QUuid& senderID);
static std::unique_ptr<NLPacketList> encodeMessagesPacket(QString channel, QString message, QUuid senderID);
signals:
void messageReceived(const QString& channel, const QString& message, const QUuid& senderUUID);
void messageReceived(QString channel, QString message, QUuid senderUUID);
private slots:
void handleMessagesPacket(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode);

View file

@ -41,7 +41,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::EntityAdd:
case PacketType::EntityEdit:
case PacketType::EntityData:
return VERSION_ENTITIES_PARTICLES_ADDITIVE_BLENDING;
return VERSION_ENTITIES_POLYLINE_TEXTURE;
case PacketType::AvatarData:
case PacketType::BulkAvatarData:
default:

View file

@ -159,5 +159,6 @@ const PacketVersion VERSION_ENTITIES_ANIMATION_PROPERTIES_GROUP = 46;
const PacketVersion VERSION_ENTITIES_KEYLIGHT_PROPERTIES_GROUP = 47;
const PacketVersion VERSION_ENTITIES_KEYLIGHT_PROPERTIES_GROUP_BIS = 48;
const PacketVersion VERSION_ENTITIES_PARTICLES_ADDITIVE_BLENDING = 49;
const PacketVersion VERSION_ENTITIES_POLYLINE_TEXTURE = 50;
#endif // hifi_PacketHeaders_h

View file

@ -1841,6 +1841,7 @@ bool Octree::readFromStream(unsigned long streamLength, QDataStream& inputStream
bool Octree::readSVOFromStream(unsigned long streamLength, QDataStream& inputStream) {
qWarning() << "SVO file format depricated. Support for reading SVO files is no longer support and will be removed soon.";
bool fileOk = false;
@ -2062,6 +2063,8 @@ void Octree::writeToJSONFile(const char* fileName, OctreeElementPointer element,
}
void Octree::writeToSVOFile(const char* fileName, OctreeElementPointer element) {
qWarning() << "SVO file format depricated. Support for reading SVO files is no longer support and will be removed soon.";
std::ofstream file(fileName, std::ios::out|std::ios::binary);
if(file.is_open()) {

View file

@ -4,6 +4,6 @@ set(TARGET_NAME recording)
setup_hifi_library(Script)
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
link_hifi_libraries(shared)
link_hifi_libraries(shared networking)
GroupSources("src/recording")

View file

@ -13,6 +13,11 @@
#include "impl/FileClip.h"
#include "impl/BufferClip.h"
#include <QtCore/QJsonDocument>
#include <QtCore/QJsonObject>
#include <QtCore/QBuffer>
#include <QtCore/QDebug>
using namespace recording;
Clip::Pointer Clip::fromFile(const QString& filePath) {
@ -27,6 +32,15 @@ void Clip::toFile(const QString& filePath, const Clip::ConstPointer& clip) {
FileClip::write(filePath, clip->duplicate());
}
QByteArray Clip::toBuffer(const Clip::ConstPointer& clip) {
QBuffer buffer;
if (buffer.open(QFile::Truncate | QFile::WriteOnly)) {
clip->duplicate()->write(buffer);
buffer.close();
}
return buffer.data();
}
Clip::Pointer Clip::newClip() {
return std::make_shared<BufferClip>();
}
@ -37,4 +51,70 @@ void Clip::seek(float offset) {
float Clip::position() const {
return Frame::frameTimeToSeconds(positionFrameTime());
};
}
// FIXME move to frame?
bool writeFrame(QIODevice& output, const Frame& frame, bool compressed = true) {
if (frame.type == Frame::TYPE_INVALID) {
qWarning() << "Attempting to write invalid frame";
return true;
}
auto written = output.write((char*)&(frame.type), sizeof(FrameType));
if (written != sizeof(FrameType)) {
return false;
}
//qDebug() << "Writing frame with time offset " << frame.timeOffset;
written = output.write((char*)&(frame.timeOffset), sizeof(Frame::Time));
if (written != sizeof(Frame::Time)) {
return false;
}
QByteArray frameData = frame.data;
if (compressed) {
frameData = qCompress(frameData);
}
uint16_t dataSize = frameData.size();
written = output.write((char*)&dataSize, sizeof(FrameSize));
if (written != sizeof(uint16_t)) {
return false;
}
if (dataSize != 0) {
written = output.write(frameData);
if (written != dataSize) {
return false;
}
}
return true;
}
const QString Clip::FRAME_TYPE_MAP = QStringLiteral("frameTypes");
const QString Clip::FRAME_COMREPSSION_FLAG = QStringLiteral("compressed");
bool Clip::write(QIODevice& output) {
auto frameTypes = Frame::getFrameTypes();
QJsonObject frameTypeObj;
for (const auto& frameTypeName : frameTypes.keys()) {
frameTypeObj[frameTypeName] = frameTypes[frameTypeName];
}
QJsonObject rootObject;
rootObject.insert(FRAME_TYPE_MAP, frameTypeObj);
// Always mark new files as compressed
rootObject.insert(FRAME_COMREPSSION_FLAG, true);
QByteArray headerFrameData = QJsonDocument(rootObject).toBinaryData();
// Never compress the header frame
if (!writeFrame(output, Frame({ Frame::TYPE_HEADER, 0, headerFrameData }), false)) {
return false;
}
seek(0);
for (auto frame = nextFrame(); frame; frame = nextFrame()) {
if (!writeFrame(output, *frame)) {
return false;
}
}
return true;
}

View file

@ -47,10 +47,16 @@ public:
virtual void skipFrame() = 0;
virtual void addFrame(FrameConstPointer) = 0;
bool write(QIODevice& output);
static Pointer fromFile(const QString& filePath);
static void toFile(const QString& filePath, const ConstPointer& clip);
static QByteArray toBuffer(const ConstPointer& clip);
static Pointer newClip();
static const QString FRAME_TYPE_MAP;
static const QString FRAME_COMREPSSION_FLAG;
protected:
friend class WrapperClip;
using Mutex = std::recursive_mutex;

View file

@ -0,0 +1,40 @@
//
// Created by Bradley Austin Davis on 2015/11/19
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ClipCache.h"
#include "impl/PointerClip.h"
using namespace recording;
NetworkClipLoader::NetworkClipLoader(const QUrl& url, bool delayLoad)
: Resource(url, delayLoad), _clip(std::make_shared<NetworkClip>(url))
{
}
void NetworkClip::init(const QByteArray& clipData) {
_clipData = clipData;
PointerClip::init((uchar*)_clipData.data(), _clipData.size());
}
void NetworkClipLoader::downloadFinished(const QByteArray& data) {
_clip->init(data);
}
ClipCache& ClipCache::instance() {
static ClipCache _instance;
return _instance;
}
NetworkClipLoaderPointer ClipCache::getClipLoader(const QUrl& url) {
return ResourceCache::getResource(url, QUrl(), false, nullptr).staticCast<NetworkClipLoader>();
}
QSharedPointer<Resource> ClipCache::createResource(const QUrl& url, const QSharedPointer<Resource>& fallback, bool delayLoad, const void* extra) {
return QSharedPointer<Resource>(new NetworkClipLoader(url, delayLoad), &Resource::allReferencesCleared);
}

View file

@ -0,0 +1,57 @@
//
// Created by Bradley Austin Davis on 2015/11/19
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Recording_ClipCache_h
#define hifi_Recording_ClipCache_h
#include <ResourceCache.h>
#include "Forward.h"
#include "impl/PointerClip.h"
namespace recording {
class NetworkClip : public PointerClip {
public:
using Pointer = std::shared_ptr<NetworkClip>;
NetworkClip(const QUrl& url) : _url(url) {}
virtual void init(const QByteArray& clipData);
virtual QString getName() const override { return _url.toString(); }
private:
QByteArray _clipData;
QUrl _url;
};
class NetworkClipLoader : public Resource {
public:
NetworkClipLoader(const QUrl& url, bool delayLoad);
virtual void downloadFinished(const QByteArray& data) override;
ClipPointer getClip() { return _clip; }
bool completed() { return _failedToLoad || isLoaded(); }
private:
const NetworkClip::Pointer _clip;
};
using NetworkClipLoaderPointer = QSharedPointer<NetworkClipLoader>;
class ClipCache : public ResourceCache {
public:
static ClipCache& instance();
NetworkClipLoaderPointer getClipLoader(const QUrl& url);
protected:
virtual QSharedPointer<Resource> createResource(const QUrl& url, const QSharedPointer<Resource>& fallback, bool delayLoad, const void* extra) override;
};
}
#endif

View file

@ -11,8 +11,6 @@
#include <algorithm>
#include <QtCore/QDebug>
#include <QtCore/QJsonDocument>
#include <QtCore/QJsonObject>
#include <Finally.h>
@ -23,63 +21,6 @@
using namespace recording;
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(Frame::Time) + sizeof(FrameSize);
static const QString FRAME_TYPE_MAP = QStringLiteral("frameTypes");
static const QString FRAME_COMREPSSION_FLAG = QStringLiteral("compressed");
using FrameTranslationMap = QMap<FrameType, FrameType>;
FrameTranslationMap parseTranslationMap(const QJsonDocument& doc) {
FrameTranslationMap results;
auto headerObj = doc.object();
if (headerObj.contains(FRAME_TYPE_MAP)) {
auto frameTypeObj = headerObj[FRAME_TYPE_MAP].toObject();
auto currentFrameTypes = Frame::getFrameTypes();
for (auto frameTypeName : frameTypeObj.keys()) {
qDebug() << frameTypeName;
if (!currentFrameTypes.contains(frameTypeName)) {
continue;
}
FrameType currentTypeEnum = currentFrameTypes[frameTypeName];
FrameType storedTypeEnum = static_cast<FrameType>(frameTypeObj[frameTypeName].toInt());
results[storedTypeEnum] = currentTypeEnum;
}
}
return results;
}
FileFrameHeaderList parseFrameHeaders(uchar* const start, const qint64& size) {
FileFrameHeaderList results;
auto current = start;
auto end = current + size;
// Read all the frame headers
// FIXME move to Frame::readHeader?
while (end - current >= MINIMUM_FRAME_SIZE) {
FileFrameHeader header;
memcpy(&(header.type), current, sizeof(FrameType));
current += sizeof(FrameType);
memcpy(&(header.timeOffset), current, sizeof(Frame::Time));
current += sizeof(Frame::Time);
memcpy(&(header.size), current, sizeof(FrameSize));
current += sizeof(FrameSize);
header.fileOffset = current - start;
if (end - current < header.size) {
current = end;
break;
}
current += header.size;
results.push_back(header);
}
qDebug() << "Parsed source data into " << results.size() << " frames";
// int i = 0;
// for (const auto& frameHeader : results) {
// qDebug() << "Frame " << i++ << " time " << frameHeader.timeOffset << " Type " << frameHeader.type;
// }
return results;
}
FileClip::FileClip(const QString& fileName) : _file(fileName) {
auto size = _file.size();
qDebug() << "Opening file of size: " << size;
@ -88,58 +29,8 @@ FileClip::FileClip(const QString& fileName) : _file(fileName) {
qCWarning(recordingLog) << "Unable to open file " << fileName;
return;
}
_map = _file.map(0, size, QFile::MapPrivateOption);
if (!_map) {
qCWarning(recordingLog) << "Unable to map file " << fileName;
return;
}
auto parsedFrameHeaders = parseFrameHeaders(_map, size);
// Verify that at least one frame exists and that the first frame is a header
if (0 == parsedFrameHeaders.size()) {
qWarning() << "No frames found, invalid file";
return;
}
// Grab the file header
{
auto fileHeaderFrameHeader = *parsedFrameHeaders.begin();
parsedFrameHeaders.pop_front();
if (fileHeaderFrameHeader.type != Frame::TYPE_HEADER) {
qWarning() << "Missing header frame, invalid file";
return;
}
QByteArray fileHeaderData((char*)_map + fileHeaderFrameHeader.fileOffset, fileHeaderFrameHeader.size);
_fileHeader = QJsonDocument::fromBinaryData(fileHeaderData);
}
// Check for compression
{
_compressed = _fileHeader.object()[FRAME_COMREPSSION_FLAG].toBool();
}
// Find the type enum translation map and fix up the frame headers
{
FrameTranslationMap translationMap = parseTranslationMap(_fileHeader);
if (translationMap.empty()) {
qWarning() << "Header missing frame type map, invalid file";
return;
}
qDebug() << translationMap;
// Update the loaded headers with the frame data
_frames.reserve(parsedFrameHeaders.size());
for (auto& frameHeader : parsedFrameHeaders) {
if (!translationMap.contains(frameHeader.type)) {
continue;
}
frameHeader.type = translationMap[frameHeader.type];
_frames.push_back(frameHeader);
}
}
auto mappedFile = _file.map(0, size, QFile::MapPrivateOption);
init(mappedFile, size);
}
@ -147,41 +38,7 @@ QString FileClip::getName() const {
return _file.fileName();
}
// FIXME move to frame?
bool writeFrame(QIODevice& output, const Frame& frame, bool compressed = true) {
if (frame.type == Frame::TYPE_INVALID) {
qWarning() << "Attempting to write invalid frame";
return true;
}
auto written = output.write((char*)&(frame.type), sizeof(FrameType));
if (written != sizeof(FrameType)) {
return false;
}
//qDebug() << "Writing frame with time offset " << frame.timeOffset;
written = output.write((char*)&(frame.timeOffset), sizeof(Frame::Time));
if (written != sizeof(Frame::Time)) {
return false;
}
QByteArray frameData = frame.data;
if (compressed) {
frameData = qCompress(frameData);
}
uint16_t dataSize = frameData.size();
written = output.write((char*)&dataSize, sizeof(FrameSize));
if (written != sizeof(uint16_t)) {
return false;
}
if (dataSize != 0) {
written = output.write(frameData);
if (written != dataSize) {
return false;
}
}
return true;
}
bool FileClip::write(const QString& fileName, Clip::Pointer clip) {
// FIXME need to move this to a different thread
@ -197,62 +54,14 @@ bool FileClip::write(const QString& fileName, Clip::Pointer clip) {
}
Finally closer([&] { outputFile.close(); });
{
auto frameTypes = Frame::getFrameTypes();
QJsonObject frameTypeObj;
for (const auto& frameTypeName : frameTypes.keys()) {
frameTypeObj[frameTypeName] = frameTypes[frameTypeName];
}
QJsonObject rootObject;
rootObject.insert(FRAME_TYPE_MAP, frameTypeObj);
// Always mark new files as compressed
rootObject.insert(FRAME_COMREPSSION_FLAG, true);
QByteArray headerFrameData = QJsonDocument(rootObject).toBinaryData();
// Never compress the header frame
if (!writeFrame(outputFile, Frame({ Frame::TYPE_HEADER, 0, headerFrameData }), false)) {
return false;
}
}
clip->seek(0);
for (auto frame = clip->nextFrame(); frame; frame = clip->nextFrame()) {
if (!writeFrame(outputFile, *frame)) {
return false;
}
}
outputFile.close();
return true;
return clip->write(outputFile);
}
FileClip::~FileClip() {
Locker lock(_mutex);
_file.unmap(_map);
_map = nullptr;
_file.unmap(_data);
if (_file.isOpen()) {
_file.close();
}
}
// Internal only function, needs no locking
FrameConstPointer FileClip::readFrame(size_t frameIndex) const {
FramePointer result;
if (frameIndex < _frames.size()) {
result = std::make_shared<Frame>();
const auto& header = _frames[frameIndex];
result->type = header.type;
result->timeOffset = header.timeOffset;
if (header.size) {
result->data.insert(0, reinterpret_cast<char*>(_map)+header.fileOffset, header.size);
if (_compressed) {
result->data = qUncompress(result->data);
}
}
}
return result;
}
void FileClip::addFrame(FrameConstPointer) {
throw std::runtime_error("File clips are read only");
reset();
}

View file

@ -10,27 +10,13 @@
#ifndef hifi_Recording_Impl_FileClip_h
#define hifi_Recording_Impl_FileClip_h
#include "ArrayClip.h"
#include <mutex>
#include "PointerClip.h"
#include <QtCore/QFile>
#include <QtCore/QJsonDocument>
#include "../Frame.h"
namespace recording {
struct FileFrameHeader : public FrameHeader {
FrameType type;
Frame::Time timeOffset;
uint16_t size;
quint64 fileOffset;
};
using FileFrameHeaderList = std::list<FileFrameHeader>;
class FileClip : public ArrayClip<FileFrameHeader> {
class FileClip : public PointerClip {
public:
using Pointer = std::shared_ptr<FileClip>;
@ -38,20 +24,11 @@ public:
virtual ~FileClip();
virtual QString getName() const override;
virtual void addFrame(FrameConstPointer) override;
const QJsonDocument& getHeader() {
return _fileHeader;
}
static bool write(const QString& filePath, Clip::Pointer clip);
private:
virtual FrameConstPointer readFrame(size_t index) const override;
QJsonDocument _fileHeader;
QFile _file;
uchar* _map { nullptr };
bool _compressed { true };
};
}

View file

@ -0,0 +1,160 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "PointerClip.h"
#include <algorithm>
#include <QtCore/QDebug>
#include <QtCore/QJsonDocument>
#include <QtCore/QJsonObject>
#include <Finally.h>
#include "../Frame.h"
#include "../Logging.h"
#include "BufferClip.h"
using namespace recording;
using FrameTranslationMap = QMap<FrameType, FrameType>;
FrameTranslationMap parseTranslationMap(const QJsonDocument& doc) {
FrameTranslationMap results;
auto headerObj = doc.object();
if (headerObj.contains(Clip::FRAME_TYPE_MAP)) {
auto frameTypeObj = headerObj[Clip::FRAME_TYPE_MAP].toObject();
auto currentFrameTypes = Frame::getFrameTypes();
for (auto frameTypeName : frameTypeObj.keys()) {
qDebug() << frameTypeName;
if (!currentFrameTypes.contains(frameTypeName)) {
continue;
}
FrameType currentTypeEnum = currentFrameTypes[frameTypeName];
FrameType storedTypeEnum = static_cast<FrameType>(frameTypeObj[frameTypeName].toInt());
results[storedTypeEnum] = currentTypeEnum;
}
}
return results;
}
PointerFrameHeaderList parseFrameHeaders(uchar* const start, const size_t& size) {
PointerFrameHeaderList results;
auto current = start;
auto end = current + size;
// Read all the frame headers
// FIXME move to Frame::readHeader?
while (end - current >= PointerClip::MINIMUM_FRAME_SIZE) {
PointerFrameHeader header;
memcpy(&(header.type), current, sizeof(FrameType));
current += sizeof(FrameType);
memcpy(&(header.timeOffset), current, sizeof(Frame::Time));
current += sizeof(Frame::Time);
memcpy(&(header.size), current, sizeof(FrameSize));
current += sizeof(FrameSize);
header.fileOffset = current - start;
if (end - current < header.size) {
current = end;
break;
}
current += header.size;
results.push_back(header);
}
qDebug() << "Parsed source data into " << results.size() << " frames";
// int i = 0;
// for (const auto& frameHeader : results) {
// qDebug() << "Frame " << i++ << " time " << frameHeader.timeOffset << " Type " << frameHeader.type;
// }
return results;
}
void PointerClip::reset() {
_frames.clear();
_data = nullptr;
_size = 0;
_header = QJsonDocument();
}
void PointerClip::init(uchar* data, size_t size) {
reset();
_data = data;
_size = size;
auto parsedFrameHeaders = parseFrameHeaders(data, size);
// Verify that at least one frame exists and that the first frame is a header
if (0 == parsedFrameHeaders.size()) {
qWarning() << "No frames found, invalid file";
reset();
return;
}
// Grab the file header
{
auto fileHeaderFrameHeader = *parsedFrameHeaders.begin();
parsedFrameHeaders.pop_front();
if (fileHeaderFrameHeader.type != Frame::TYPE_HEADER) {
qWarning() << "Missing header frame, invalid file";
reset();
return;
}
QByteArray fileHeaderData((char*)_data + fileHeaderFrameHeader.fileOffset, fileHeaderFrameHeader.size);
_header = QJsonDocument::fromBinaryData(fileHeaderData);
}
// Check for compression
{
_compressed = _header.object()[FRAME_COMREPSSION_FLAG].toBool();
}
// Find the type enum translation map and fix up the frame headers
{
FrameTranslationMap translationMap = parseTranslationMap(_header);
if (translationMap.empty()) {
qWarning() << "Header missing frame type map, invalid file";
reset();
return;
}
// Update the loaded headers with the frame data
_frames.reserve(parsedFrameHeaders.size());
for (auto& frameHeader : parsedFrameHeaders) {
if (!translationMap.contains(frameHeader.type)) {
continue;
}
frameHeader.type = translationMap[frameHeader.type];
_frames.push_back(frameHeader);
}
}
}
// Internal only function, needs no locking
FrameConstPointer PointerClip::readFrame(size_t frameIndex) const {
FramePointer result;
if (frameIndex < _frames.size()) {
result = std::make_shared<Frame>();
const auto& header = _frames[frameIndex];
result->type = header.type;
result->timeOffset = header.timeOffset;
if (header.size) {
result->data.insert(0, reinterpret_cast<char*>(_data)+header.fileOffset, header.size);
if (_compressed) {
result->data = qUncompress(result->data);
}
}
}
return result;
}
void PointerClip::addFrame(FrameConstPointer) {
throw std::runtime_error("Pointer clips are read only, use duplicate to create a read/write clip");
}

View file

@ -0,0 +1,58 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Recording_Impl_PointerClip_h
#define hifi_Recording_Impl_PointerClip_h
#include "ArrayClip.h"
#include <mutex>
#include <QtCore/QJsonDocument>
#include "../Frame.h"
namespace recording {
struct PointerFrameHeader : public FrameHeader {
FrameType type;
Frame::Time timeOffset;
uint16_t size;
quint64 fileOffset;
};
using PointerFrameHeaderList = std::list<PointerFrameHeader>;
class PointerClip : public ArrayClip<PointerFrameHeader> {
public:
using Pointer = std::shared_ptr<PointerClip>;
PointerClip() {};
PointerClip(uchar* data, size_t size) { init(data, size); }
void init(uchar* data, size_t size);
virtual void addFrame(FrameConstPointer) override;
const QJsonDocument& getHeader() const {
return _header;
}
// FIXME move to frame?
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(Frame::Time) + sizeof(FrameSize);
protected:
void reset();
virtual FrameConstPointer readFrame(size_t index) const override;
QJsonDocument _header;
uchar* _data { nullptr };
size_t _size { 0 };
bool _compressed { true };
};
}
#endif

View file

@ -8,14 +8,15 @@
#include "RecordingScriptingInterface.h"
#include <QThread>
#include <QtCore/QThread>
#include <NumericalConstants.h>
#include <Transform.h>
#include <recording/Deck.h>
#include <recording/Recorder.h>
#include <recording/Clip.h>
#include <recording/Frame.h>
#include <NumericalConstants.h>
#include <Transform.h>
#include <recording/ClipCache.h>
#include "ScriptEngineLogging.h"
@ -43,22 +44,25 @@ float RecordingScriptingInterface::playerLength() const {
return _player->length();
}
void RecordingScriptingInterface::loadRecording(const QString& filename) {
bool RecordingScriptingInterface::loadRecording(const QString& url) {
using namespace recording;
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection,
Q_ARG(QString, filename));
return;
auto loader = ClipCache::instance().getClipLoader(url);
QEventLoop loop;
QObject::connect(loader.data(), &Resource::loaded, &loop, &QEventLoop::quit);
QObject::connect(loader.data(), &Resource::failed, &loop, &QEventLoop::quit);
loop.exec();
if (!loader->isLoaded()) {
qWarning() << "Clip failed to load from " << url;
return false;
}
ClipPointer clip = Clip::fromFile(filename);
if (!clip) {
qWarning() << "Unable to load clip data from " << filename;
}
_player->queueClip(clip);
_player->queueClip(loader->getClip());
return true;
}
void RecordingScriptingInterface::startPlaying() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection);

View file

@ -12,7 +12,7 @@
#include <atomic>
#include <mutex>
#include <QObject>
#include <QtCore/QObject>
#include <DependencyManager.h>
#include <recording/Forward.h>
@ -25,7 +25,7 @@ public:
RecordingScriptingInterface();
public slots:
void loadRecording(const QString& filename);
bool loadRecording(const QString& url);
void startPlaying();
void pausePlayer();

View file

@ -381,6 +381,9 @@ void ScriptEngine::init() {
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
registerGlobalObject("Recording", recordingInterface.data());
registerGlobalObject("Assets", &_assetScriptingInterface);
}
void ScriptEngine::registerValue(const QString& valueName, QScriptValue value) {

View file

@ -23,6 +23,7 @@
#include <AnimationCache.h>
#include <AnimVariant.h>
#include <AssetClient.h>
#include <AvatarData.h>
#include <AvatarHashMap.h>
#include <LimitedNodeList.h>
@ -195,6 +196,8 @@ private:
ArrayBufferClass* _arrayBufferClass;
AssetScriptingInterface _assetScriptingInterface;
QHash<EntityItemID, RegisteredEventHandlers> _registeredHandlers;
void forwardHandlerCall(const EntityItemID& entityID, const QString& eventName, QScriptValueList eventHanderArgs);
Q_INVOKABLE void entityScriptContentAvailable(const EntityItemID& entityID, const QString& scriptOrURL, const QString& contents, bool isURL, bool success);

View file

@ -29,6 +29,7 @@ glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseP
void OculusBaseDisplayPlugin::resetSensors() {
#if (OVR_MAJOR_VERSION >= 6)
ovr_RecenterPose(_hmd);
preRender();
#endif
}