mirror of
https://github.com/lubosz/overte.git
synced 2025-04-23 20:34:07 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into audio-reverb
This commit is contained in:
commit
a725c8b12e
259 changed files with 9442 additions and 7927 deletions
13
BUILD_OSX.md
13
BUILD_OSX.md
|
@ -7,11 +7,18 @@ Please read the [general build guide](BUILD.md) for information on dependencies
|
|||
|
||||
We no longer require install of qt5 via our [homebrew formulas repository](https://github.com/highfidelity/homebrew-formulas). Versions of Qt that are 5.5.x and above provide a mechanism to disable the wireless scanning we previously had a custom patch for.
|
||||
|
||||
###Qt
|
||||
###OpenSSL and Qt
|
||||
|
||||
Assuming you've installed Qt 5 using the homebrew instructions above, you'll need to set QT_CMAKE_PREFIX_PATH so CMake can find your installation of Qt. For Qt 5.5.1 installed via homebrew, set QT_CMAKE_PREFIX_PATH as follows.
|
||||
Assuming you've installed OpenSSL or Qt 5 using the homebrew instructions above, you'll need to set OPENSSL_ROOT_DIR and QT_CMAKE_PREFIX_PATH so CMake can find your installations.
|
||||
For OpenSSL installed via homebrew, set OPENSSL_ROOT_DIR:
|
||||
|
||||
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.5.1/lib/cmake
|
||||
export OPENSSL_ROOT_DIR=/usr/local/Cellar/openssl/1.0.2d_1
|
||||
|
||||
For Qt 5.5.1 installed via homebrew, set QT_CMAKE_PREFIX_PATH as follows.
|
||||
|
||||
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.5.1_2/lib/cmake
|
||||
|
||||
Not that these use the versions from homebrew formulae at the time of this writing, and the version in the path will likely change.
|
||||
|
||||
###Xcode
|
||||
If Xcode is your editor of choice, you can ask CMake to generate Xcode project files instead of Unix Makefiles.
|
||||
|
|
|
@ -202,6 +202,8 @@ if (NOT ANDROID)
|
|||
set_target_properties(ice-server PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(interface)
|
||||
set_target_properties(interface PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(stack-manager)
|
||||
set_target_properties(stack-manager PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(tests)
|
||||
add_subdirectory(plugins)
|
||||
add_subdirectory(tools)
|
||||
|
|
|
@ -10,5 +10,4 @@ link_hifi_libraries(
|
|||
)
|
||||
|
||||
include_application_version()
|
||||
|
||||
copy_dlls_beside_windows_executable()
|
||||
package_libraries_for_deployment()
|
||||
|
|
|
@ -17,11 +17,14 @@
|
|||
#include <QtNetwork/QNetworkReply>
|
||||
|
||||
#include <AvatarHashMap.h>
|
||||
#include <AudioInjectorManager.h>
|
||||
#include <AssetClient.h>
|
||||
#include <MessagesClient.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
#include <NodeList.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <ResourceCache.h>
|
||||
#include <ScriptCache.h>
|
||||
#include <SoundCache.h>
|
||||
#include <UUID.h>
|
||||
|
||||
|
@ -50,14 +53,23 @@ Agent::Agent(NLPacket& packet) :
|
|||
{
|
||||
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
|
||||
|
||||
auto assetClient = DependencyManager::set<AssetClient>();
|
||||
|
||||
QThread* assetThread = new QThread;
|
||||
assetThread->setObjectName("Asset Thread");
|
||||
assetClient->moveToThread(assetThread);
|
||||
connect(assetThread, &QThread::started, assetClient.data(), &AssetClient::init);
|
||||
assetThread->start();
|
||||
|
||||
DependencyManager::set<ResourceCacheSharedItems>();
|
||||
DependencyManager::set<SoundCache>();
|
||||
DependencyManager::set<AudioInjectorManager>();
|
||||
DependencyManager::set<recording::Deck>();
|
||||
DependencyManager::set<recording::Recorder>();
|
||||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
|
||||
|
||||
packetReceiver.registerListenerForTypes(
|
||||
{ PacketType::MixedAudio, PacketType::SilentAudioFrame },
|
||||
this, "handleAudioPacket");
|
||||
|
@ -76,7 +88,7 @@ void Agent::handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointe
|
|||
if (packet->getPayloadSize() > statsMessageLength) {
|
||||
// pull out the piggybacked packet and create a new QSharedPointer<NLPacket> for it
|
||||
int piggyBackedSizeWithHeader = packet->getPayloadSize() - statsMessageLength;
|
||||
|
||||
|
||||
auto buffer = std::unique_ptr<char[]>(new char[piggyBackedSizeWithHeader]);
|
||||
memcpy(buffer.get(), packet->getPayload() + statsMessageLength, piggyBackedSizeWithHeader);
|
||||
|
||||
|
@ -116,6 +128,11 @@ void Agent::handleAudioPacket(QSharedPointer<NLPacket> packet) {
|
|||
const QString AGENT_LOGGING_NAME = "agent";
|
||||
|
||||
void Agent::run() {
|
||||
|
||||
// make sure we request our script once the agent connects to the domain
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
connect(&nodeList->getDomainHandler(), &DomainHandler::connectedToDomain, this, &Agent::requestScript);
|
||||
|
||||
ThreadedAssignment::commonInit(AGENT_LOGGING_NAME, NodeType::Agent);
|
||||
|
||||
// Setup MessagesClient
|
||||
|
@ -126,49 +143,76 @@ void Agent::run() {
|
|||
connect(messagesThread, &QThread::started, messagesClient.data(), &MessagesClient::init);
|
||||
messagesThread->start();
|
||||
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet({
|
||||
NodeType::AudioMixer, NodeType::AvatarMixer, NodeType::EntityServer, NodeType::MessagesMixer, NodeType::AssetServer
|
||||
});
|
||||
}
|
||||
|
||||
void Agent::requestScript() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->addSetOfNodeTypesToNodeInterestSet(NodeSet()
|
||||
<< NodeType::AudioMixer
|
||||
<< NodeType::AvatarMixer
|
||||
<< NodeType::EntityServer
|
||||
<< NodeType::MessagesMixer
|
||||
);
|
||||
disconnect(&nodeList->getDomainHandler(), &DomainHandler::connectedToDomain, this, &Agent::requestScript);
|
||||
|
||||
// figure out the URL for the script for this agent assignment
|
||||
QUrl scriptURL;
|
||||
if (_payload.isEmpty()) {
|
||||
scriptURL = QUrl(QString("http://%1:%2/assignment/%3")
|
||||
.arg(DependencyManager::get<NodeList>()->getDomainHandler().getIP().toString())
|
||||
.arg(DOMAIN_SERVER_HTTP_PORT)
|
||||
.arg(uuidStringWithoutCurlyBraces(_uuid)));
|
||||
scriptURL = QUrl(QString("http://%1:%2/assignment/%3/")
|
||||
.arg(nodeList->getDomainHandler().getIP().toString())
|
||||
.arg(DOMAIN_SERVER_HTTP_PORT)
|
||||
.arg(uuidStringWithoutCurlyBraces(nodeList->getSessionUUID())));
|
||||
} else {
|
||||
scriptURL = QUrl(_payload);
|
||||
}
|
||||
|
||||
// setup a network access manager and
|
||||
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
|
||||
QNetworkRequest networkRequest = QNetworkRequest(scriptURL);
|
||||
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||
QNetworkReply* reply = networkAccessManager.get(networkRequest);
|
||||
|
||||
QNetworkDiskCache* cache = new QNetworkDiskCache();
|
||||
QString cachePath = QStandardPaths::writableLocation(QStandardPaths::DataLocation);
|
||||
cache->setCacheDirectory(!cachePath.isEmpty() ? cachePath : "agentCache");
|
||||
networkAccessManager.setCache(cache);
|
||||
|
||||
QNetworkRequest networkRequest = QNetworkRequest(scriptURL);
|
||||
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||
|
||||
// setup a timeout for script request
|
||||
static const int SCRIPT_TIMEOUT_MS = 10000;
|
||||
_scriptRequestTimeout = new QTimer(this);
|
||||
connect(_scriptRequestTimeout, &QTimer::timeout, this, &Agent::scriptRequestFinished);
|
||||
_scriptRequestTimeout->start(SCRIPT_TIMEOUT_MS);
|
||||
|
||||
qDebug() << "Downloading script at" << scriptURL.toString();
|
||||
QNetworkReply* reply = networkAccessManager.get(networkRequest);
|
||||
connect(reply, &QNetworkReply::finished, this, &Agent::scriptRequestFinished);
|
||||
}
|
||||
|
||||
QEventLoop loop;
|
||||
QObject::connect(reply, SIGNAL(finished()), &loop, SLOT(quit()));
|
||||
void Agent::scriptRequestFinished() {
|
||||
auto reply = qobject_cast<QNetworkReply*>(sender());
|
||||
|
||||
loop.exec();
|
||||
_scriptRequestTimeout->stop();
|
||||
|
||||
QString scriptContents(reply->readAll());
|
||||
delete reply;
|
||||
if (reply && reply->error() == QNetworkReply::NoError) {
|
||||
_scriptContents = reply->readAll();
|
||||
qDebug() << "Downloaded script:" << _scriptContents;
|
||||
|
||||
qDebug() << "Downloaded script:" << scriptContents;
|
||||
// we could just call executeScript directly - we use a QueuedConnection to allow scriptRequestFinished
|
||||
// to return before calling executeScript
|
||||
QMetaObject::invokeMethod(this, "executeScript", Qt::QueuedConnection);
|
||||
} else {
|
||||
if (reply) {
|
||||
qDebug() << "Failed to download script at" << reply->url().toString() << " - bailing on assignment.";
|
||||
qDebug() << "QNetworkReply error was" << reply->errorString();
|
||||
} else {
|
||||
qDebug() << "Failed to download script - request timed out. Bailing on assignment.";
|
||||
}
|
||||
|
||||
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(scriptContents, _payload));
|
||||
setFinished(true);
|
||||
}
|
||||
|
||||
reply->deleteLater();
|
||||
}
|
||||
|
||||
void Agent::executeScript() {
|
||||
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(_scriptContents, _payload));
|
||||
_scriptEngine->setParent(this); // be the parent of the script engine so it gets moved when we do
|
||||
|
||||
// setup an Avatar for the script to use
|
||||
|
@ -185,14 +229,13 @@ void Agent::run() {
|
|||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
// FIXME how to deal with driving multiple avatars locally?
|
||||
// FIXME how to deal with driving multiple avatars locally?
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this, scriptedAvatar](Frame::ConstPointer frame) {
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::AUDIO_FRAME_NAME);
|
||||
static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
|
||||
const QByteArray& audio = frame->data;
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
|
@ -202,8 +245,6 @@ void Agent::run() {
|
|||
AbstractAudioInterface::emitAudioPacket(audio.data(), audio.size(), audioSequenceNumber, audioTransform, PacketType::MicrophoneAudioNoEcho);
|
||||
});
|
||||
|
||||
|
||||
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
_scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
|
||||
|
||||
|
@ -216,7 +257,7 @@ void Agent::run() {
|
|||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", this);
|
||||
|
||||
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
|
||||
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
|
||||
// viewers would need this called.
|
||||
//_scriptEngine->init(); // must be done before we set up the viewers
|
||||
|
||||
|
@ -228,14 +269,14 @@ void Agent::run() {
|
|||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
_scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
|
||||
|
||||
|
||||
// we need to make sure that init has been called for our EntityScriptingInterface
|
||||
// so that it actually has a jurisdiction listener when we ask it for it next
|
||||
entityScriptingInterface->init();
|
||||
_entityViewer.setJurisdictionListener(entityScriptingInterface->getJurisdictionListener());
|
||||
|
||||
|
||||
_entityViewer.init();
|
||||
|
||||
|
||||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
// wire up our additional agent related processing to the update signal
|
||||
|
@ -249,6 +290,11 @@ void Agent::run() {
|
|||
setFinished(true);
|
||||
}
|
||||
|
||||
QUuid Agent::getSessionUUID() const {
|
||||
return DependencyManager::get<NodeList>()->getSessionUUID();
|
||||
}
|
||||
|
||||
|
||||
void Agent::setIsAvatar(bool isAvatar) {
|
||||
_isAvatar = isAvatar;
|
||||
|
||||
|
@ -273,7 +319,7 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
delete _avatarIdentityTimer;
|
||||
_avatarIdentityTimer = nullptr;
|
||||
}
|
||||
|
||||
|
||||
if (_avatarBillboardTimer) {
|
||||
_avatarBillboardTimer->stop();
|
||||
delete _avatarBillboardTimer;
|
||||
|
@ -374,7 +420,7 @@ void Agent::processAgentAvatarAndAudio(float deltaTime) {
|
|||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
}else if (nextSoundOutput) {
|
||||
} else if (nextSoundOutput) {
|
||||
// assume scripted avatar audio is mono and set channel flag to zero
|
||||
audioPacket->writePrimitive((quint8)0);
|
||||
|
||||
|
@ -407,10 +453,20 @@ void Agent::processAgentAvatarAndAudio(float deltaTime) {
|
|||
|
||||
void Agent::aboutToFinish() {
|
||||
setIsAvatar(false);// will stop timers for sending billboards and identity packets
|
||||
|
||||
if (_scriptEngine) {
|
||||
_scriptEngine->stop();
|
||||
}
|
||||
|
||||
// our entity tree is going to go away so tell that to the EntityScriptingInterface
|
||||
DependencyManager::get<EntityScriptingInterface>()->setEntityTree(NULL);
|
||||
|
||||
// cleanup the AssetClient thread
|
||||
QThread* assetThread = DependencyManager::get<AssetClient>()->thread();
|
||||
DependencyManager::destroy<AssetClient>();
|
||||
assetThread->quit();
|
||||
assetThread->wait();
|
||||
|
||||
// cleanup the AudioInjectorManager (and any still running injectors)
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
#include <QtScript/QScriptEngine>
|
||||
#include <QtCore/QObject>
|
||||
#include <QtCore/QUrl>
|
||||
#include <QUuid>
|
||||
|
||||
#include <EntityEditPacketSender.h>
|
||||
#include <EntityTree.h>
|
||||
|
@ -30,14 +31,16 @@
|
|||
|
||||
class Agent : public ThreadedAssignment {
|
||||
Q_OBJECT
|
||||
|
||||
|
||||
Q_PROPERTY(bool isAvatar READ isAvatar WRITE setIsAvatar)
|
||||
Q_PROPERTY(bool isPlayingAvatarSound READ isPlayingAvatarSound)
|
||||
Q_PROPERTY(bool isListeningToAudioStream READ isListeningToAudioStream WRITE setIsListeningToAudioStream)
|
||||
Q_PROPERTY(float lastReceivedAudioLoudness READ getLastReceivedAudioLoudness)
|
||||
Q_PROPERTY(QUuid sessionUUID READ getSessionUUID)
|
||||
|
||||
public:
|
||||
Agent(NLPacket& packet);
|
||||
|
||||
|
||||
void setIsAvatar(bool isAvatar);
|
||||
bool isAvatar() const { return _isAvatar; }
|
||||
|
||||
|
@ -47,14 +50,19 @@ public:
|
|||
void setIsListeningToAudioStream(bool isListeningToAudioStream) { _isListeningToAudioStream = isListeningToAudioStream; }
|
||||
|
||||
float getLastReceivedAudioLoudness() const { return _lastReceivedAudioLoudness; }
|
||||
QUuid getSessionUUID() const;
|
||||
|
||||
virtual void aboutToFinish();
|
||||
|
||||
|
||||
public slots:
|
||||
void run();
|
||||
void playAvatarSound(Sound* avatarSound) { setAvatarSound(avatarSound); }
|
||||
|
||||
private slots:
|
||||
void requestScript();
|
||||
void scriptRequestFinished();
|
||||
void executeScript();
|
||||
|
||||
void handleAudioPacket(QSharedPointer<NLPacket> packet);
|
||||
void handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
|
||||
void handleJurisdictionPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
|
||||
|
@ -64,7 +72,7 @@ private:
|
|||
std::unique_ptr<ScriptEngine> _scriptEngine;
|
||||
EntityEditPacketSender _entityEditSender;
|
||||
EntityTreeHeadlessViewer _entityViewer;
|
||||
|
||||
|
||||
MixedAudioStream _receivedAudioStream;
|
||||
float _lastReceivedAudioLoudness;
|
||||
|
||||
|
@ -73,6 +81,8 @@ private:
|
|||
void sendAvatarIdentityPacket();
|
||||
void sendAvatarBillboardPacket();
|
||||
|
||||
QString _scriptContents;
|
||||
QTimer* _scriptRequestTimeout { nullptr };
|
||||
bool _isListeningToAudioStream = false;
|
||||
Sound* _avatarSound = nullptr;
|
||||
int _numAvatarSoundSentBytes = 0;
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <memory>
|
||||
#include <signal.h>
|
||||
|
||||
#include <AddressManager.h>
|
||||
|
@ -227,8 +228,9 @@ void AssignmentClientMonitor::handleChildStatusPacket(QSharedPointer<NLPacket> p
|
|||
matchingNode = DependencyManager::get<LimitedNodeList>()->addOrUpdateNode
|
||||
(senderID, NodeType::Unassigned, senderSockAddr, senderSockAddr, false, false);
|
||||
|
||||
childData = new AssignmentClientChildData(Assignment::Type::AllTypes);
|
||||
matchingNode->setLinkedData(childData);
|
||||
auto childData = std::unique_ptr<AssignmentClientChildData>
|
||||
{ new AssignmentClientChildData(Assignment::Type::AllTypes) };
|
||||
matchingNode->setLinkedData(std::move(childData));
|
||||
} else {
|
||||
// tell unknown assignment-client child to exit.
|
||||
qDebug() << "Asking unknown child at" << senderSockAddr << "to exit.";
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <math.h>
|
||||
#include <memory>
|
||||
#include <signal.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
@ -644,188 +645,187 @@ void AudioMixer::sendStatsPacket() {
|
|||
}
|
||||
|
||||
void AudioMixer::run() {
|
||||
|
||||
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [](Node* node) {
|
||||
node->setLinkedData(new AudioMixerClientData());
|
||||
};
|
||||
|
||||
// wait until we have the domain-server settings, otherwise we bail
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
|
||||
qDebug() << "Waiting for domain settings from domain-server.";
|
||||
|
||||
// block until we get the settingsRequestComplete signal
|
||||
QEventLoop loop;
|
||||
connect(&domainHandler, &DomainHandler::settingsReceived, &loop, &QEventLoop::quit);
|
||||
connect(&domainHandler, &DomainHandler::settingsReceiveFail, &loop, &QEventLoop::quit);
|
||||
domainHandler.requestDomainSettings();
|
||||
loop.exec();
|
||||
|
||||
if (domainHandler.getSettingsObject().isEmpty()) {
|
||||
qDebug() << "Failed to retreive settings object from domain-server. Bailing on assignment.";
|
||||
setFinished(true);
|
||||
return;
|
||||
}
|
||||
qDebug() << "Waiting for connection to domain to request settings from domain-server.";
|
||||
|
||||
// wait until we have the domain-server settings, otherwise we bail
|
||||
DomainHandler& domainHandler = DependencyManager::get<NodeList>()->getDomainHandler();
|
||||
connect(&domainHandler, &DomainHandler::settingsReceived, this, &AudioMixer::domainSettingsRequestComplete);
|
||||
connect(&domainHandler, &DomainHandler::settingsReceiveFail, this, &AudioMixer::domainSettingsRequestFailed);
|
||||
|
||||
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
|
||||
}
|
||||
|
||||
void AudioMixer::domainSettingsRequestComplete() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [](Node* node) {
|
||||
node->setLinkedData(std::unique_ptr<AudioMixerClientData> { new AudioMixerClientData });
|
||||
};
|
||||
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
const QJsonObject& settingsObject = domainHandler.getSettingsObject();
|
||||
|
||||
|
||||
// check the settings object to see if we have anything we can parse out
|
||||
parseSettingsObject(settingsObject);
|
||||
|
||||
// queue up a connection to start broadcasting mixes now that we're ready to go
|
||||
QMetaObject::invokeMethod(this, "broadcastMixes", Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
void AudioMixer::broadcastMixes() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
int nextFrame = 0;
|
||||
QElapsedTimer timer;
|
||||
timer.start();
|
||||
|
||||
|
||||
int usecToSleep = AudioConstants::NETWORK_FRAME_USECS;
|
||||
|
||||
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
|
||||
|
||||
|
||||
while (!_isFinished) {
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
|
||||
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
|
||||
|
||||
if (usecToSleep < 0) {
|
||||
usecToSleep = 0;
|
||||
}
|
||||
|
||||
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
|
||||
+ (usecToSleep * CURRENT_FRAME_RATIO / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
|
||||
float lastCutoffRatio = _performanceThrottlingRatio;
|
||||
bool hasRatioChanged = false;
|
||||
|
||||
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
// we're struggling - change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
|
||||
|
||||
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
// we've recovered and can back off the required loudness
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
|
||||
|
||||
|
||||
if (_performanceThrottlingRatio < 0) {
|
||||
_performanceThrottlingRatio = 0;
|
||||
}
|
||||
|
||||
|
||||
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
}
|
||||
|
||||
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
qDebug() << "Minimum audability required to be mixed is now" << _minAudibilityThreshold;
|
||||
|
||||
|
||||
framesSinceCutoffEvent = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
|
||||
|
||||
quint64 now = usecTimestampNow();
|
||||
if (now - _lastPerSecondCallbackTime > USECS_PER_SECOND) {
|
||||
perSecondActions();
|
||||
_lastPerSecondCallbackTime = now;
|
||||
}
|
||||
|
||||
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
|
||||
|
||||
if (node->getLinkedData()) {
|
||||
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
|
||||
|
||||
|
||||
// this function will attempt to pop a frame from each audio stream.
|
||||
// a pointer to the popped data is stored as a member in InboundAudioStream.
|
||||
// That's how the popped audio data will be read for mixing (but only if the pop was successful)
|
||||
nodeData->checkBuffersBeforeFrameSend();
|
||||
|
||||
|
||||
// if the stream should be muted, send mute packet
|
||||
if (nodeData->getAvatarAudioStream()
|
||||
&& shouldMute(nodeData->getAvatarAudioStream()->getQuietestFrameLoudness())) {
|
||||
auto mutePacket = NLPacket::create(PacketType::NoisyMute, 0);
|
||||
nodeList->sendPacket(std::move(mutePacket), *node);
|
||||
}
|
||||
|
||||
|
||||
if (node->getType() == NodeType::Agent && node->getActiveSocket()
|
||||
&& nodeData->getAvatarAudioStream()) {
|
||||
|
||||
|
||||
int streamsMixed = prepareMixForListeningNode(node.data());
|
||||
|
||||
|
||||
std::unique_ptr<NLPacket> mixPacket;
|
||||
|
||||
|
||||
if (streamsMixed > 0) {
|
||||
int mixPacketBytes = sizeof(quint16) + AudioConstants::NETWORK_FRAME_BYTES_STEREO;
|
||||
mixPacket = NLPacket::create(PacketType::MixedAudio, mixPacketBytes);
|
||||
|
||||
|
||||
// pack sequence number
|
||||
quint16 sequence = nodeData->getOutgoingSequenceNumber();
|
||||
mixPacket->writePrimitive(sequence);
|
||||
|
||||
|
||||
// pack mixed audio samples
|
||||
mixPacket->write(reinterpret_cast<char*>(_mixSamples),
|
||||
AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
} else {
|
||||
int silentPacketBytes = sizeof(quint16) + sizeof(quint16);
|
||||
mixPacket = NLPacket::create(PacketType::SilentAudioFrame, silentPacketBytes);
|
||||
|
||||
|
||||
// pack sequence number
|
||||
quint16 sequence = nodeData->getOutgoingSequenceNumber();
|
||||
mixPacket->writePrimitive(sequence);
|
||||
|
||||
|
||||
// pack number of silent audio samples
|
||||
quint16 numSilentSamples = AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
|
||||
mixPacket->writePrimitive(numSilentSamples);
|
||||
}
|
||||
|
||||
|
||||
// Send audio environment
|
||||
sendAudioEnvironmentPacket(node);
|
||||
|
||||
|
||||
// send mixed audio packet
|
||||
nodeList->sendPacket(std::move(mixPacket), *node);
|
||||
nodeData->incrementOutgoingMixedAudioSequenceNumber();
|
||||
|
||||
|
||||
// send an audio stream stats packet if it's time
|
||||
if (_sendAudioStreamStats) {
|
||||
nodeData->sendAudioStreamStatsPackets(node);
|
||||
_sendAudioStreamStats = false;
|
||||
}
|
||||
|
||||
|
||||
++_sumListeners;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
++_numStatFrames;
|
||||
|
||||
|
||||
// since we're a while loop we need to help Qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
|
||||
|
||||
if (_isFinished) {
|
||||
// at this point the audio-mixer is done
|
||||
// check if we have a deferred delete event to process (which we should once finished)
|
||||
QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete);
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
usecToSleep = (++nextFrame * AudioConstants::NETWORK_FRAME_USECS) - timer.nsecsElapsed() / 1000; // ns to us
|
||||
|
||||
|
||||
if (usecToSleep > 0) {
|
||||
usleep(usecToSleep);
|
||||
}
|
||||
|
|
|
@ -40,10 +40,13 @@ public slots:
|
|||
static const InboundAudioStream::Settings& getStreamSettings() { return _streamSettings; }
|
||||
|
||||
private slots:
|
||||
void broadcastMixes();
|
||||
void handleNodeAudioPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
|
||||
void handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
|
||||
|
||||
private:
|
||||
private:
|
||||
void domainSettingsRequestComplete();
|
||||
|
||||
/// adds one stream to the mix for a listening node
|
||||
int addStreamToMixForListeningNodeWithStream(AudioMixerClientData* listenerNodeData,
|
||||
const QUuid& streamUUID,
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
|
||||
#include <cfloat>
|
||||
#include <random>
|
||||
#include <memory>
|
||||
|
||||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QDateTime>
|
||||
|
@ -71,7 +72,6 @@ const float BILLBOARD_AND_IDENTITY_SEND_PROBABILITY = 1.0f / 187.0f;
|
|||
// 1) use the view frustum to cull those avatars that are out of view. Since avatar data doesn't need to be present
|
||||
// if the avatar is not in view or in the keyhole.
|
||||
void AvatarMixer::broadcastAvatarData() {
|
||||
|
||||
int idleTime = QDateTime::currentMSecsSinceEpoch() - _lastFrameTimestamp;
|
||||
|
||||
++_numStatFrames;
|
||||
|
@ -513,15 +513,15 @@ void AvatarMixer::sendStatsPacket() {
|
|||
}
|
||||
|
||||
void AvatarMixer::run() {
|
||||
qDebug() << "Waiting for connection to domain to request settings from domain-server.";
|
||||
|
||||
// wait until we have the domain-server settings, otherwise we bail
|
||||
DomainHandler& domainHandler = DependencyManager::get<NodeList>()->getDomainHandler();
|
||||
connect(&domainHandler, &DomainHandler::settingsReceived, this, &AvatarMixer::domainSettingsRequestComplete);
|
||||
connect(&domainHandler, &DomainHandler::settingsReceiveFail, this, &AvatarMixer::domainSettingsRequestFailed);
|
||||
|
||||
ThreadedAssignment::commonInit(AVATAR_MIXER_LOGGING_NAME, NodeType::AvatarMixer);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [] (Node* node) {
|
||||
node->setLinkedData(new AvatarMixerClientData());
|
||||
};
|
||||
|
||||
// setup the timer that will be fired on the broadcast thread
|
||||
_broadcastTimer = new QTimer;
|
||||
_broadcastTimer->setInterval(AVATAR_DATA_SEND_INTERVAL_MSECS);
|
||||
|
@ -530,33 +530,24 @@ void AvatarMixer::run() {
|
|||
// connect appropriate signals and slots
|
||||
connect(_broadcastTimer, &QTimer::timeout, this, &AvatarMixer::broadcastAvatarData, Qt::DirectConnection);
|
||||
connect(&_broadcastThread, SIGNAL(started()), _broadcastTimer, SLOT(start()));
|
||||
}
|
||||
|
||||
// wait until we have the domain-server settings, otherwise we bail
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
|
||||
qDebug() << "Waiting for domain settings from domain-server.";
|
||||
|
||||
// block until we get the settingsRequestComplete signal
|
||||
|
||||
QEventLoop loop;
|
||||
connect(&domainHandler, &DomainHandler::settingsReceived, &loop, &QEventLoop::quit);
|
||||
connect(&domainHandler, &DomainHandler::settingsReceiveFail, &loop, &QEventLoop::quit);
|
||||
domainHandler.requestDomainSettings();
|
||||
loop.exec();
|
||||
|
||||
if (domainHandler.getSettingsObject().isEmpty()) {
|
||||
qDebug() << "Failed to retreive settings object from domain-server. Bailing on assignment.";
|
||||
setFinished(true);
|
||||
return;
|
||||
}
|
||||
|
||||
void AvatarMixer::domainSettingsRequestComplete() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [] (Node* node) {
|
||||
node->setLinkedData(std::unique_ptr<AvatarMixerClientData> { new AvatarMixerClientData });
|
||||
};
|
||||
|
||||
// parse the settings to pull out the values we need
|
||||
parseDomainServerSettings(domainHandler.getSettingsObject());
|
||||
|
||||
parseDomainServerSettings(nodeList->getDomainHandler().getSettingsObject());
|
||||
|
||||
// start the broadcastThread
|
||||
_broadcastThread.start();
|
||||
}
|
||||
|
||||
|
||||
void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
|
||||
const QString AVATAR_MIXER_SETTINGS_KEY = "avatar_mixer";
|
||||
const QString NODE_SEND_BANDWIDTH_KEY = "max_node_send_bandwidth";
|
||||
|
|
|
@ -36,6 +36,7 @@ private slots:
|
|||
void handleAvatarIdentityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
|
||||
void handleAvatarBillboardPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
|
||||
void handleKillAvatarPacket(QSharedPointer<NLPacket> packet);
|
||||
void domainSettingsRequestComplete();
|
||||
|
||||
private:
|
||||
void broadcastAvatarData();
|
||||
|
|
|
@ -46,8 +46,8 @@ void EntityServer::handleEntityPacket(QSharedPointer<NLPacket> packet, SharedNod
|
|||
}
|
||||
}
|
||||
|
||||
OctreeQueryNode* EntityServer::createOctreeQueryNode() {
|
||||
return new EntityNodeData();
|
||||
std::unique_ptr<OctreeQueryNode> EntityServer::createOctreeQueryNode() {
|
||||
return std::unique_ptr<OctreeQueryNode> { new EntityNodeData() };
|
||||
}
|
||||
|
||||
OctreePointer EntityServer::createTree() {
|
||||
|
@ -253,7 +253,7 @@ void EntityServer::pruneDeletedEntities() {
|
|||
}
|
||||
}
|
||||
|
||||
bool EntityServer::readAdditionalConfiguration(const QJsonObject& settingsSectionObject) {
|
||||
void EntityServer::readAdditionalConfiguration(const QJsonObject& settingsSectionObject) {
|
||||
bool wantEditLogging = false;
|
||||
readOptionBool(QString("wantEditLogging"), settingsSectionObject, wantEditLogging);
|
||||
qDebug("wantEditLogging=%s", debug::valueOf(wantEditLogging));
|
||||
|
@ -265,6 +265,4 @@ bool EntityServer::readAdditionalConfiguration(const QJsonObject& settingsSectio
|
|||
EntityTreePointer tree = std::static_pointer_cast<EntityTree>(_tree);
|
||||
tree->setWantEditLogging(wantEditLogging);
|
||||
tree->setWantTerseEditLogging(wantTerseEditLogging);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
#include "../octree/OctreeServer.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "EntityItem.h"
|
||||
#include "EntityServerConsts.h"
|
||||
#include "EntityTree.h"
|
||||
|
@ -26,7 +28,7 @@ public:
|
|||
~EntityServer();
|
||||
|
||||
// Subclasses must implement these methods
|
||||
virtual OctreeQueryNode* createOctreeQueryNode() override ;
|
||||
virtual std::unique_ptr<OctreeQueryNode> createOctreeQueryNode() override ;
|
||||
virtual char getMyNodeType() const override { return NodeType::EntityServer; }
|
||||
virtual PacketType getMyQueryMessageType() const override { return PacketType::EntityQuery; }
|
||||
virtual const char* getMyServerName() const override { return MODEL_SERVER_NAME; }
|
||||
|
@ -41,7 +43,7 @@ public:
|
|||
virtual int sendSpecialPackets(const SharedNodePointer& node, OctreeQueryNode* queryNode, int& packetsSent) override;
|
||||
|
||||
virtual void entityCreated(const EntityItem& newEntity, const SharedNodePointer& senderNode) override;
|
||||
virtual bool readAdditionalConfiguration(const QJsonObject& settingsSectionObject) override;
|
||||
virtual void readAdditionalConfiguration(const QJsonObject& settingsSectionObject) override;
|
||||
|
||||
public slots:
|
||||
void pruneDeletedEntities();
|
||||
|
|
|
@ -12,30 +12,23 @@
|
|||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QBuffer>
|
||||
|
||||
#include <LogHandler.h>
|
||||
#include <MessagesClient.h>
|
||||
#include <NodeList.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
|
||||
#include "MessagesMixer.h"
|
||||
|
||||
const QString MESSAGES_MIXER_LOGGING_NAME = "messages-mixer";
|
||||
|
||||
MessagesMixer::MessagesMixer(NLPacket& packet) :
|
||||
ThreadedAssignment(packet)
|
||||
MessagesMixer::MessagesMixer(NLPacket& packet) : ThreadedAssignment(packet)
|
||||
{
|
||||
// make sure we hear about node kills so we can tell the other nodes
|
||||
connect(DependencyManager::get<NodeList>().data(), &NodeList::nodeKilled, this, &MessagesMixer::nodeKilled);
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerMessageListener(PacketType::MessagesData, this, "handleMessages");
|
||||
packetReceiver.registerMessageListener(PacketType::MessagesSubscribe, this, "handleMessagesSubscribe");
|
||||
packetReceiver.registerMessageListener(PacketType::MessagesUnsubscribe, this, "handleMessagesUnsubscribe");
|
||||
}
|
||||
|
||||
MessagesMixer::~MessagesMixer() {
|
||||
}
|
||||
|
||||
void MessagesMixer::nodeKilled(SharedNodePointer killedNode) {
|
||||
for (auto& channel : _channelSubscribers) {
|
||||
channel.remove(killedNode->getUUID());
|
||||
|
@ -43,92 +36,52 @@ void MessagesMixer::nodeKilled(SharedNodePointer killedNode) {
|
|||
}
|
||||
|
||||
void MessagesMixer::handleMessages(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
|
||||
Q_ASSERT(packetList->getType() == PacketType::MessagesData);
|
||||
QString channel, message;
|
||||
QUuid senderID;
|
||||
MessagesClient::decodeMessagesPacket(packetList, channel, message, senderID);
|
||||
|
||||
QByteArray packetData = packetList->getMessage();
|
||||
QBuffer packet{ &packetData };
|
||||
packet.open(QIODevice::ReadOnly);
|
||||
|
||||
quint16 channelLength;
|
||||
packet.read(reinterpret_cast<char*>(&channelLength), sizeof(channelLength));
|
||||
auto channelData = packet.read(channelLength);
|
||||
QString channel = QString::fromUtf8(channelData);
|
||||
|
||||
quint16 messageLength;
|
||||
packet.read(reinterpret_cast<char*>(&messageLength), sizeof(messageLength));
|
||||
auto messageData = packet.read(messageLength);
|
||||
QString message = QString::fromUtf8(messageData);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
|
||||
return node->getType() == NodeType::Agent && node->getActiveSocket() &&
|
||||
_channelSubscribers[channel].contains(node->getUUID());
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
|
||||
auto packetList = NLPacketList::create(PacketType::MessagesData, QByteArray(), true, true);
|
||||
|
||||
auto channelUtf8 = channel.toUtf8();
|
||||
quint16 channelLength = channelUtf8.length();
|
||||
packetList->writePrimitive(channelLength);
|
||||
packetList->write(channelUtf8);
|
||||
|
||||
auto messageUtf8 = message.toUtf8();
|
||||
quint16 messageLength = messageUtf8.length();
|
||||
packetList->writePrimitive(messageLength);
|
||||
packetList->write(messageUtf8);
|
||||
|
||||
auto packetList = MessagesClient::encodeMessagesPacket(channel, message, senderID);
|
||||
nodeList->sendPacketList(std::move(packetList), *node);
|
||||
});
|
||||
}
|
||||
|
||||
void MessagesMixer::handleMessagesSubscribe(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
|
||||
Q_ASSERT(packetList->getType() == PacketType::MessagesSubscribe);
|
||||
QString channel = QString::fromUtf8(packetList->getMessage());
|
||||
qDebug() << "Node [" << senderNode->getUUID() << "] subscribed to channel:" << channel;
|
||||
_channelSubscribers[channel] << senderNode->getUUID();
|
||||
}
|
||||
|
||||
void MessagesMixer::handleMessagesUnsubscribe(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
|
||||
Q_ASSERT(packetList->getType() == PacketType::MessagesUnsubscribe);
|
||||
QString channel = QString::fromUtf8(packetList->getMessage());
|
||||
qDebug() << "Node [" << senderNode->getUUID() << "] unsubscribed from channel:" << channel;
|
||||
|
||||
if (_channelSubscribers.contains(channel)) {
|
||||
_channelSubscribers[channel].remove(senderNode->getUUID());
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME - make these stats relevant
|
||||
void MessagesMixer::sendStatsPacket() {
|
||||
QJsonObject statsObject;
|
||||
QJsonObject messagesObject;
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
QJsonObject statsObject, messagesMixerObject;
|
||||
|
||||
// add stats for each listerner
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
QJsonObject messagesStats;
|
||||
|
||||
// add the key to ask the domain-server for a username replacement, if it has it
|
||||
messagesStats[USERNAME_UUID_REPLACEMENT_STATS_KEY] = uuidStringWithoutCurlyBraces(node->getUUID());
|
||||
messagesStats["outbound_kbps"] = node->getOutboundBandwidth();
|
||||
messagesStats["inbound_kbps"] = node->getInboundBandwidth();
|
||||
|
||||
messagesObject[uuidStringWithoutCurlyBraces(node->getUUID())] = messagesStats;
|
||||
DependencyManager::get<NodeList>()->eachNode([&](const SharedNodePointer& node) {
|
||||
QJsonObject clientStats;
|
||||
clientStats[USERNAME_UUID_REPLACEMENT_STATS_KEY] = uuidStringWithoutCurlyBraces(node->getUUID());
|
||||
clientStats["outbound_kbps"] = node->getOutboundBandwidth();
|
||||
clientStats["inbound_kbps"] = node->getInboundBandwidth();
|
||||
messagesMixerObject[uuidStringWithoutCurlyBraces(node->getUUID())] = clientStats;
|
||||
});
|
||||
|
||||
statsObject["messages"] = messagesObject;
|
||||
statsObject["messages"] = messagesMixerObject;
|
||||
ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject);
|
||||
}
|
||||
|
||||
void MessagesMixer::run() {
|
||||
ThreadedAssignment::commonInit(MESSAGES_MIXER_LOGGING_NAME, NodeType::MessagesMixer);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
// The messages-mixer currently does currently have any domain settings. If it did, they would be
|
||||
// synchronously grabbed here.
|
||||
}
|
||||
DependencyManager::get<NodeList>()->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
}
|
|
@ -22,7 +22,6 @@ class MessagesMixer : public ThreadedAssignment {
|
|||
Q_OBJECT
|
||||
public:
|
||||
MessagesMixer(NLPacket& packet);
|
||||
~MessagesMixer();
|
||||
|
||||
public slots:
|
||||
void run();
|
||||
|
|
|
@ -317,6 +317,7 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
#endif
|
||||
|
||||
bool showStats = false;
|
||||
QString persistFile = "/" + getPersistFilename();
|
||||
|
||||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
|
||||
if (url.path() == "/") {
|
||||
|
@ -326,6 +327,18 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
_tree->resetEditStats();
|
||||
resetSendingStats();
|
||||
showStats = true;
|
||||
} else if ((url.path() == persistFile) || (url.path() == persistFile + "/")) {
|
||||
if (_persistFileDownload) {
|
||||
QByteArray persistFileContents = getPersistFileContents();
|
||||
if (persistFileContents.length() > 0) {
|
||||
connection->respond(HTTPConnection::StatusCode200, persistFileContents, qPrintable(getPersistFileMimeType()));
|
||||
} else {
|
||||
connection->respond(HTTPConnection::StatusCode500, HTTPConnection::StatusCode500);
|
||||
}
|
||||
} else {
|
||||
connection->respond(HTTPConnection::StatusCode403, HTTPConnection::StatusCode403); // not allowed
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -367,6 +380,12 @@ bool OctreeServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
statsString += getFileLoadTime();
|
||||
statsString += "\r\n";
|
||||
|
||||
if (_persistFileDownload) {
|
||||
statsString += QString("Persist file: <a href='%1'>%1</a>\r\n").arg(persistFile);
|
||||
} else {
|
||||
statsString += QString("Persist file: %1\r\n").arg(persistFile);
|
||||
}
|
||||
|
||||
} else {
|
||||
statsString += "Octree file not yet loaded...\r\n";
|
||||
}
|
||||
|
@ -932,31 +951,14 @@ bool OctreeServer::readOptionString(const QString& optionName, const QJsonObject
|
|||
return optionAvailable;
|
||||
}
|
||||
|
||||
bool OctreeServer::readConfiguration() {
|
||||
void OctreeServer::readConfiguration() {
|
||||
// if the assignment had a payload, read and parse that
|
||||
if (getPayload().size() > 0) {
|
||||
parsePayload();
|
||||
}
|
||||
|
||||
const QJsonObject& settingsObject = DependencyManager::get<NodeList>()->getDomainHandler().getSettingsObject();
|
||||
|
||||
// wait until we have the domain-server settings, otherwise we bail
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
|
||||
qDebug() << "Waiting for domain settings from domain-server.";
|
||||
|
||||
// block until we get the settingsRequestComplete signal
|
||||
QEventLoop loop;
|
||||
connect(&domainHandler, &DomainHandler::settingsReceived, &loop, &QEventLoop::quit);
|
||||
connect(&domainHandler, &DomainHandler::settingsReceiveFail, &loop, &QEventLoop::quit);
|
||||
domainHandler.requestDomainSettings();
|
||||
loop.exec();
|
||||
|
||||
if (domainHandler.getSettingsObject().isEmpty()) {
|
||||
qDebug() << "Failed to retreive settings object from domain-server. Bailing on assignment.";
|
||||
return false;
|
||||
}
|
||||
|
||||
const QJsonObject& settingsObject = domainHandler.getSettingsObject();
|
||||
QString settingsKey = getMyDomainSettingsKey();
|
||||
QJsonObject settingsSectionObject = settingsObject[settingsKey].toObject();
|
||||
_settings = settingsSectionObject; // keep this for later
|
||||
|
@ -1026,7 +1028,8 @@ bool OctreeServer::readConfiguration() {
|
|||
_wantBackup = !noBackup;
|
||||
qDebug() << "wantBackup=" << _wantBackup;
|
||||
|
||||
//qDebug() << "settingsSectionObject:" << settingsSectionObject;
|
||||
readOptionBool(QString("persistFileDownload"), settingsSectionObject, _persistFileDownload);
|
||||
qDebug() << "persistFileDownload=" << _persistFileDownload;
|
||||
|
||||
} else {
|
||||
qDebug("persistFilename= DISABLED");
|
||||
|
@ -1064,79 +1067,79 @@ bool OctreeServer::readConfiguration() {
|
|||
packetsPerSecondTotalMax, _packetsTotalPerInterval);
|
||||
|
||||
|
||||
return readAdditionalConfiguration(settingsSectionObject);
|
||||
readAdditionalConfiguration(settingsSectionObject);
|
||||
}
|
||||
|
||||
void OctreeServer::run() {
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerListener(getMyQueryMessageType(), this, "handleOctreeQueryPacket");
|
||||
packetReceiver.registerListener(PacketType::OctreeDataNack, this, "handleOctreeDataNackPacket");
|
||||
packetReceiver.registerListener(PacketType::JurisdictionRequest, this, "handleJurisdictionRequestPacket");
|
||||
|
||||
_safeServerName = getMyServerName();
|
||||
|
||||
// Before we do anything else, create our tree...
|
||||
OctreeElement::resetPopulationStatistics();
|
||||
_tree = createTree();
|
||||
_tree->setIsServer(true);
|
||||
|
||||
// make sure our NodeList knows what type we are
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->setOwnerType(getMyNodeType());
|
||||
|
||||
qDebug() << "Waiting for connection to domain to request settings from domain-server.";
|
||||
|
||||
// wait until we have the domain-server settings, otherwise we bail
|
||||
DomainHandler& domainHandler = DependencyManager::get<NodeList>()->getDomainHandler();
|
||||
connect(&domainHandler, &DomainHandler::settingsReceived, this, &OctreeServer::domainSettingsRequestComplete);
|
||||
connect(&domainHandler, &DomainHandler::settingsReceiveFail, this, &OctreeServer::domainSettingsRequestFailed);
|
||||
|
||||
// use common init to setup common timers and logging
|
||||
commonInit(getMyLoggingServerTargetName(), getMyNodeType());
|
||||
}
|
||||
|
||||
void OctreeServer::domainSettingsRequestComplete() {
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
// we need to ask the DS about agents so we can ping/reply with them
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
// read the configuration from either the payload or the domain server configuration
|
||||
if (!readConfiguration()) {
|
||||
qDebug() << "OctreeServer bailing on run since readConfiguration has failed.";
|
||||
setFinished(true);
|
||||
return; // bailing on run, because readConfiguration failed
|
||||
}
|
||||
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerListener(getMyQueryMessageType(), this, "handleOctreeQueryPacket");
|
||||
packetReceiver.registerListener(PacketType::OctreeDataNack, this, "handleOctreeDataNackPacket");
|
||||
packetReceiver.registerListener(PacketType::JurisdictionRequest, this, "handleJurisdictionRequestPacket");
|
||||
|
||||
readConfiguration();
|
||||
|
||||
beforeRun(); // after payload has been processed
|
||||
|
||||
|
||||
connect(nodeList.data(), SIGNAL(nodeAdded(SharedNodePointer)), SLOT(nodeAdded(SharedNodePointer)));
|
||||
connect(nodeList.data(), SIGNAL(nodeKilled(SharedNodePointer)), SLOT(nodeKilled(SharedNodePointer)));
|
||||
|
||||
#ifndef WIN32
|
||||
setvbuf(stdout, NULL, _IOLBF, 0);
|
||||
#endif
|
||||
|
||||
|
||||
nodeList->linkedDataCreateCallback = [] (Node* node) {
|
||||
OctreeQueryNode* newQueryNodeData = _instance->createOctreeQueryNode();
|
||||
newQueryNodeData->init();
|
||||
node->setLinkedData(newQueryNodeData);
|
||||
auto queryNodeData = _instance->createOctreeQueryNode();
|
||||
queryNodeData->init();
|
||||
node->setLinkedData(std::move(queryNodeData));
|
||||
};
|
||||
|
||||
|
||||
srand((unsigned)time(0));
|
||||
|
||||
|
||||
// if we want Persistence, set up the local file and persist thread
|
||||
if (_wantPersist) {
|
||||
|
||||
|
||||
// now set up PersistThread
|
||||
_persistThread = new OctreePersistThread(_tree, _persistFilename, _persistInterval,
|
||||
_wantBackup, _settings, _debugTimestampNow, _persistAsFileType);
|
||||
_persistThread->initialize(true);
|
||||
}
|
||||
|
||||
HifiSockAddr senderSockAddr;
|
||||
|
||||
|
||||
// set up our jurisdiction broadcaster...
|
||||
if (_jurisdiction) {
|
||||
_jurisdiction->setNodeType(getMyNodeType());
|
||||
}
|
||||
_jurisdictionSender = new JurisdictionSender(_jurisdiction, getMyNodeType());
|
||||
_jurisdictionSender->initialize(true);
|
||||
|
||||
|
||||
// set up our OctreeServerPacketProcessor
|
||||
_octreeInboundPacketProcessor = new OctreeInboundPacketProcessor(this);
|
||||
_octreeInboundPacketProcessor->initialize(true);
|
||||
|
||||
|
||||
// Convert now to tm struct for local timezone
|
||||
tm* localtm = localtime(&_started);
|
||||
const int MAX_TIME_LENGTH = 128;
|
||||
|
@ -1148,6 +1151,7 @@ void OctreeServer::run() {
|
|||
if (gmtm) {
|
||||
strftime(utcBuffer, MAX_TIME_LENGTH, " [%m/%d/%Y %X UTC]", gmtm);
|
||||
}
|
||||
|
||||
qDebug() << "Now running... started at: " << localBuffer << utcBuffer;
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
#ifndef hifi_OctreeServer_h
|
||||
#define hifi_OctreeServer_h
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QStringList>
|
||||
#include <QDateTime>
|
||||
#include <QtCore/QCoreApplication>
|
||||
|
@ -59,9 +61,12 @@ public:
|
|||
bool isInitialLoadComplete() const { return (_persistThread) ? _persistThread->isInitialLoadComplete() : true; }
|
||||
bool isPersistEnabled() const { return (_persistThread) ? true : false; }
|
||||
quint64 getLoadElapsedTime() const { return (_persistThread) ? _persistThread->getLoadElapsedTime() : 0; }
|
||||
QString getPersistFilename() const { return (_persistThread) ? _persistThread->getPersistFilename() : ""; }
|
||||
QString getPersistFileMimeType() const { return (_persistThread) ? _persistThread->getPersistFileMimeType() : "text/plain"; }
|
||||
QByteArray getPersistFileContents() const { return (_persistThread) ? _persistThread->getPersistFileContents() : QByteArray(); }
|
||||
|
||||
// Subclasses must implement these methods
|
||||
virtual OctreeQueryNode* createOctreeQueryNode() = 0;
|
||||
virtual std::unique_ptr<OctreeQueryNode> createOctreeQueryNode() = 0;
|
||||
virtual char getMyNodeType() const = 0;
|
||||
virtual PacketType getMyQueryMessageType() const = 0;
|
||||
virtual const char* getMyServerName() const = 0;
|
||||
|
@ -126,6 +131,7 @@ public slots:
|
|||
void sendStatsPacket();
|
||||
|
||||
private slots:
|
||||
void domainSettingsRequestComplete();
|
||||
void handleOctreeQueryPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
|
||||
void handleOctreeDataNackPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
|
||||
void handleJurisdictionRequestPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
|
||||
|
@ -135,8 +141,8 @@ protected:
|
|||
bool readOptionBool(const QString& optionName, const QJsonObject& settingsSectionObject, bool& result);
|
||||
bool readOptionInt(const QString& optionName, const QJsonObject& settingsSectionObject, int& result);
|
||||
bool readOptionString(const QString& optionName, const QJsonObject& settingsSectionObject, QString& result);
|
||||
bool readConfiguration();
|
||||
virtual bool readAdditionalConfiguration(const QJsonObject& settingsSectionObject) { return true; };
|
||||
void readConfiguration();
|
||||
virtual void readAdditionalConfiguration(const QJsonObject& settingsSectionObject) { };
|
||||
void parsePayload();
|
||||
void initHTTPManager(int port);
|
||||
void resetSendingStats();
|
||||
|
@ -173,6 +179,7 @@ protected:
|
|||
|
||||
int _persistInterval;
|
||||
bool _wantBackup;
|
||||
bool _persistFileDownload;
|
||||
QString _backupExtensionFormat;
|
||||
int _backupInterval;
|
||||
int _maxBackupVersions;
|
||||
|
|
10
cmake/externals/bullet/CMakeLists.txt
vendored
10
cmake/externals/bullet/CMakeLists.txt
vendored
|
@ -18,8 +18,8 @@ if (WIN32)
|
|||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
# URL https://bullet.googlecode.com/files/bullet-2.82-r2704.zip
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-r2704.zip
|
||||
URL_MD5 f5e8914fc9064ad32e0d62d19d33d977
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-ccd-fix.zip
|
||||
URL_MD5 d95b07eb120de7dd7786361c0b5a8d9f
|
||||
CMAKE_ARGS ${PLATFORM_CMAKE_ARGS} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DBUILD_EXTRAS=0 -DINSTALL_LIBS=1 -DBUILD_DEMOS=0 -DUSE_GLUT=0 -DUSE_DX11=0
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
|
@ -30,8 +30,8 @@ else ()
|
|||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
#URL http://bullet.googlecode.com/files/bullet-2.82-r2704.tgz
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-r2704.tgz
|
||||
URL_MD5 70b3c8d202dee91a0854b4cbc88173e8
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-ccd-fix.tgz
|
||||
URL_MD5 fb140a4983b4109aa1c825a162aa8d64
|
||||
CMAKE_ARGS ${PLATFORM_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DBUILD_EXTRAS=0 -DINSTALL_LIBS=1 -DBUILD_DEMOS=0 -DUSE_GLUT=0
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
|
@ -80,4 +80,4 @@ endif ()
|
|||
|
||||
if (DEFINED ${EXTERNAL_NAME_UPPER}_DYNAMICS_LIBRARY_RELEASE)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include/bullet CACHE PATH "Path to bullet include directory")
|
||||
endif ()
|
||||
endif ()
|
||||
|
|
52
cmake/externals/quazip/CMakeLists.txt
vendored
Normal file
52
cmake/externals/quazip/CMakeLists.txt
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
set(EXTERNAL_NAME quazip)
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
cmake_policy(SET CMP0046 OLD)
|
||||
|
||||
include(ExternalProject)
|
||||
|
||||
if (WIN32)
|
||||
# windows shell does not like backslashes expanded on the command line,
|
||||
# so convert all backslashes in the QT path to forward slashes
|
||||
string(REPLACE \\ / QT_CMAKE_PREFIX_PATH $ENV{QT_CMAKE_PREFIX_PATH})
|
||||
elseif ($ENV{QT_CMAKE_PREFIX_PATH})
|
||||
set(QT_CMAKE_PREFIX_PATH $ENV{QT_CMAKE_PREFIX_PATH})
|
||||
endif ()
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://s3-us-west-1.amazonaws.com/hifi-production/dependencies/quazip-0.6.2.zip
|
||||
URL_MD5 514851970f1a14d815bdc3ad6267af4d
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DCMAKE_PREFIX_PATH=${QT_CMAKE_PREFIX_PATH} -DCMAKE_INSTALL_NAME_DIR:PATH=<INSTALL_DIR>/lib -DZLIB_ROOT=${ZLIB_ROOT}
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
add_dependencies(quazip zlib)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES
|
||||
FOLDER "hidden/externals"
|
||||
INSTALL_NAME_DIR ${INSTALL_DIR}/lib
|
||||
BUILD_WITH_INSTALL_RPATH True)
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include CACHE PATH "List of QuaZip include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${${EXTERNAL_NAME_UPPER}_INCLUDE_DIR} CACHE PATH "List of QuaZip include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${INSTALL_DIR}/lib CACHE FILEPATH "Location of QuaZip DLL")
|
||||
|
||||
if (APPLE)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
|
||||
elseif (WIN32)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/quazip.lib CACHE FILEPATH "Location of QuaZip release library")
|
||||
else ()
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip.so CACHE FILEPATH "Location of QuaZip release library")
|
||||
endif ()
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(${EXTERNAL_NAME_UPPER})
|
||||
|
||||
# Force selected libraries into the cache
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY ${${EXTERNAL_NAME_UPPER}_LIBRARY} CACHE FILEPATH "Location of QuaZip libraries")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Location of QuaZip libraries")
|
15
cmake/externals/zlib/CMakeLists.txt
vendored
15
cmake/externals/zlib/CMakeLists.txt
vendored
|
@ -4,19 +4,20 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
|||
include(ExternalProject)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://zlib.net/zlib128.zip
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
${EXTERNAL_NAME}
|
||||
URL http://zlib.net/zlib128.zip
|
||||
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
set(${EXTERNAL_NAME_UPPER}_ROOT ${INSTALL_DIR} CACHE PATH "Path for Zlib install root")
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIR ${INSTALL_DIR}/include CACHE PATH "List of zlib include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${${EXTERNAL_NAME_UPPER}_INCLUDE_DIR} CACHE PATH "List of zlib include directories")
|
||||
set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${INSTALL_DIR}/bin CACHE FILEPATH "Location of ZLib DLL")
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
macro(COPY_DLLS_BESIDE_WINDOWS_EXECUTABLE)
|
||||
macro(PACKAGE_LIBRARIES_FOR_DEPLOYMENT)
|
||||
|
||||
if (WIN32)
|
||||
configure_file(
|
||||
|
@ -18,11 +18,7 @@ macro(COPY_DLLS_BESIDE_WINDOWS_EXECUTABLE)
|
|||
@ONLY
|
||||
)
|
||||
|
||||
if (APPLE)
|
||||
set(PLUGIN_PATH "interface.app/Contents/MacOS/plugins")
|
||||
else()
|
||||
set(PLUGIN_PATH "plugins")
|
||||
endif()
|
||||
set(PLUGIN_PATH "plugins")
|
||||
|
||||
# add a post-build command to copy DLLs beside the executable
|
||||
add_custom_command(
|
||||
|
@ -46,5 +42,18 @@ macro(COPY_DLLS_BESIDE_WINDOWS_EXECUTABLE)
|
|||
POST_BUILD
|
||||
COMMAND CMD /C "SET PATH=%PATH%;${QT_DIR}/bin && ${WINDEPLOYQT_COMMAND} $<$<OR:$<CONFIG:Release>,$<CONFIG:MinSizeRel>,$<CONFIG:RelWithDebInfo>>:--release> $<TARGET_FILE:${TARGET_NAME}>"
|
||||
)
|
||||
elseif (DEFINED BUILD_BUNDLE AND BUILD_BUNDLE AND APPLE)
|
||||
find_program(MACDEPLOYQT_COMMAND macdeployqt PATHS ${QT_DIR}/bin NO_DEFAULT_PATH)
|
||||
|
||||
if (NOT MACDEPLOYQT_COMMAND)
|
||||
message(FATAL_ERROR "Could not find macdeployqt at ${QT_DIR}/bin. macdeployqt is required.")
|
||||
endif ()
|
||||
|
||||
# add a post-build command to call macdeployqt to copy Qt plugins
|
||||
add_custom_command(
|
||||
TARGET ${TARGET_NAME}
|
||||
POST_BUILD
|
||||
COMMAND ${MACDEPLOYQT_COMMAND} ${CMAKE_CURRENT_BINARY_DIR}/\${CONFIGURATION}/${TARGET_NAME}.app -verbose 0
|
||||
)
|
||||
endif ()
|
||||
endmacro()
|
||||
endmacro()
|
||||
|
|
|
@ -21,7 +21,7 @@ macro(LINK_HIFI_LIBRARIES)
|
|||
include_directories("${HIFI_LIBRARY_DIR}/${HIFI_LIBRARY}/src")
|
||||
|
||||
add_dependencies(${TARGET_NAME} ${HIFI_LIBRARY})
|
||||
|
||||
|
||||
# link the actual library - it is static so don't bubble it up
|
||||
target_link_libraries(${TARGET_NAME} ${HIFI_LIBRARY})
|
||||
|
||||
|
|
|
@ -22,8 +22,11 @@ macro(SETUP_HIFI_PROJECT)
|
|||
endif ()
|
||||
endforeach()
|
||||
|
||||
# add the executable, include additional optional sources
|
||||
add_executable(${TARGET_NAME} ${TARGET_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
||||
if (DEFINED BUILD_BUNDLE AND BUILD_BUNDLE AND APPLE)
|
||||
add_executable(${TARGET_NAME} MACOSX_BUNDLE ${TARGET_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
||||
else ()
|
||||
add_executable(${TARGET_NAME} ${TARGET_SRCS} ${AUTOMTC_SRC} ${AUTOSCRIBE_SHADER_LIB_SRC})
|
||||
endif()
|
||||
|
||||
set(${TARGET_NAME}_DEPENDENCY_QT_MODULES ${ARGN})
|
||||
list(APPEND ${TARGET_NAME}_DEPENDENCY_QT_MODULES Core)
|
||||
|
@ -31,6 +34,13 @@ macro(SETUP_HIFI_PROJECT)
|
|||
# find these Qt modules and link them to our own target
|
||||
find_package(Qt5 COMPONENTS ${${TARGET_NAME}_DEPENDENCY_QT_MODULES} REQUIRED)
|
||||
|
||||
# disable /OPT:REF and /OPT:ICF for the Debug builds
|
||||
# This will prevent the following linker warnings
|
||||
# LINK : warning LNK4075: ignoring '/INCREMENTAL' due to '/OPT:ICF' specification
|
||||
if (WIN32)
|
||||
set_property(TARGET ${TARGET_NAME} APPEND_STRING PROPERTY LINK_FLAGS_DEBUG "/OPT:NOREF /OPT:NOICF")
|
||||
endif()
|
||||
|
||||
foreach(QT_MODULE ${${TARGET_NAME}_DEPENDENCY_QT_MODULES})
|
||||
target_link_libraries(${TARGET_NAME} Qt5::${QT_MODULE})
|
||||
endforeach()
|
||||
|
|
16
cmake/macros/TargetQuazip.cmake
Normal file
16
cmake/macros/TargetQuazip.cmake
Normal file
|
@ -0,0 +1,16 @@
|
|||
#
|
||||
# Copyright 2015 High Fidelity, Inc.
|
||||
# Created by Leonardo Murillo on 2015/11/20
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
macro(TARGET_QUAZIP)
|
||||
add_dependency_external_projects(quazip)
|
||||
find_package(QuaZip REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${QUAZIP_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${QUAZIP_LIBRARIES})
|
||||
if (WIN32)
|
||||
add_paths_to_fixup_libs(${QUAZIP_DLL_PATH})
|
||||
endif ()
|
||||
endmacro()
|
29
cmake/modules/FindQuaZip.cmake
Normal file
29
cmake/modules/FindQuaZip.cmake
Normal file
|
@ -0,0 +1,29 @@
|
|||
#
|
||||
# FindQuaZip.h
|
||||
# StackManagerQt/cmake/modules
|
||||
#
|
||||
# Created by Mohammed Nafees.
|
||||
# Copyright (c) 2014 High Fidelity. All rights reserved.
|
||||
#
|
||||
|
||||
# QUAZIP_FOUND - QuaZip library was found
|
||||
# QUAZIP_INCLUDE_DIR - Path to QuaZip include dir
|
||||
# QUAZIP_INCLUDE_DIRS - Path to QuaZip and zlib include dir (combined from QUAZIP_INCLUDE_DIR + ZLIB_INCLUDE_DIR)
|
||||
# QUAZIP_LIBRARIES - List of QuaZip libraries
|
||||
# QUAZIP_ZLIB_INCLUDE_DIR - The include dir of zlib headers
|
||||
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("quazip")
|
||||
|
||||
if (WIN32)
|
||||
find_path(QUAZIP_INCLUDE_DIRS quazip.h PATH_SUFFIXES include/quazip HINTS ${QUAZIP_SEARCH_DIRS})
|
||||
elseif (APPLE)
|
||||
find_path(QUAZIP_INCLUDE_DIRS quazip.h PATH_SUFFIXES include/quazip HINTS ${QUAZIP_SEARCH_DIRS})
|
||||
else ()
|
||||
find_path(QUAZIP_INCLUDE_DIRS quazip.h PATH_SUFFIXES quazip HINTS ${QUAZIP_SEARCH_DIRS})
|
||||
endif ()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(QUAZIP DEFAULT_MSG QUAZIP_INCLUDE_DIRS)
|
||||
|
||||
mark_as_advanced(QUAZIP_INCLUDE_DIRS QUAZIP_SEARCH_DIRS)
|
|
@ -37,4 +37,4 @@ if (UNIX)
|
|||
endif (UNIX)
|
||||
|
||||
include_application_version()
|
||||
copy_dlls_beside_windows_executable()
|
||||
package_libraries_for_deployment()
|
||||
|
|
|
@ -476,6 +476,14 @@
|
|||
"default": "",
|
||||
"advanced": true
|
||||
},
|
||||
{
|
||||
"name": "persistFileDownload",
|
||||
"type": "checkbox",
|
||||
"label": "Persist File Download",
|
||||
"help": "Includes a download link to the persist file in the server status page.",
|
||||
"default": false,
|
||||
"advanced": true
|
||||
},
|
||||
{
|
||||
"name": "wantEditLogging",
|
||||
"type": "checkbox",
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
|
||||
#include "DomainServer.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <QDir>
|
||||
#include <QJsonDocument>
|
||||
#include <QJsonObject>
|
||||
|
@ -1097,29 +1099,37 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
|
||||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation
|
||||
&& assignmentRegex.indexIn(url.path()) != -1) {
|
||||
QUuid matchingUUID = QUuid(assignmentRegex.cap(1));
|
||||
|
||||
SharedAssignmentPointer matchingAssignment = _allAssignments.value(matchingUUID);
|
||||
if (!matchingAssignment) {
|
||||
// check if we have a pending assignment that matches this temp UUID, and it is a scripted assignment
|
||||
QUuid assignmentUUID = _gatekeeper.assignmentUUIDForPendingAssignment(matchingUUID);
|
||||
if (!assignmentUUID.isNull()) {
|
||||
matchingAssignment = _allAssignments.value(assignmentUUID);
|
||||
|
||||
if (matchingAssignment && matchingAssignment->getType() == Assignment::AgentType) {
|
||||
// we have a matching assignment and it is for the right type, have the HTTP manager handle it
|
||||
// via correct URL for the script so the client can download
|
||||
|
||||
QUrl scriptURL = url;
|
||||
scriptURL.setPath(URI_ASSIGNMENT + "/scripts/"
|
||||
+ uuidStringWithoutCurlyBraces(assignmentUUID));
|
||||
|
||||
// have the HTTPManager serve the appropriate script file
|
||||
return _httpManager.handleHTTPRequest(connection, scriptURL, true);
|
||||
}
|
||||
}
|
||||
QUuid nodeUUID = QUuid(assignmentRegex.cap(1));
|
||||
|
||||
auto matchingNode = nodeList->nodeWithUUID(nodeUUID);
|
||||
|
||||
// don't handle if we don't have a matching node
|
||||
if (!matchingNode) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
auto nodeData = dynamic_cast<DomainServerNodeData*>(matchingNode->getLinkedData());
|
||||
|
||||
// don't handle if we don't have node data for this node
|
||||
if (!nodeData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
SharedAssignmentPointer matchingAssignment = _allAssignments.value(nodeData->getAssignmentUUID());
|
||||
|
||||
// check if we have an assignment that matches this temp UUID, and it is a scripted assignment
|
||||
if (matchingAssignment && matchingAssignment->getType() == Assignment::AgentType) {
|
||||
// we have a matching assignment and it is for the right type, have the HTTP manager handle it
|
||||
// via correct URL for the script so the client can download
|
||||
|
||||
QUrl scriptURL = url;
|
||||
scriptURL.setPath(URI_ASSIGNMENT + "/scripts/"
|
||||
+ uuidStringWithoutCurlyBraces(matchingAssignment->getUUID()));
|
||||
|
||||
// have the HTTPManager serve the appropriate script file
|
||||
return _httpManager.handleHTTPRequest(connection, scriptURL, true);
|
||||
}
|
||||
|
||||
// request not handled
|
||||
return false;
|
||||
}
|
||||
|
@ -1640,7 +1650,7 @@ void DomainServer::refreshStaticAssignmentAndAddToQueue(SharedAssignmentPointer&
|
|||
|
||||
void DomainServer::nodeAdded(SharedNodePointer node) {
|
||||
// we don't use updateNodeWithData, so add the DomainServerNodeData to the node here
|
||||
node->setLinkedData(new DomainServerNodeData());
|
||||
node->setLinkedData(std::unique_ptr<DomainServerNodeData> { new DomainServerNodeData() });
|
||||
}
|
||||
|
||||
void DomainServer::nodeKilled(SharedNodePointer node) {
|
||||
|
@ -1835,14 +1845,25 @@ void DomainServer::processNodeDisconnectRequestPacket(QSharedPointer<NLPacket> p
|
|||
|
||||
qDebug() << "Received a disconnect request from node with UUID" << nodeUUID;
|
||||
|
||||
if (limitedNodeList->killNodeWithUUID(nodeUUID)) {
|
||||
// we want to check what type this node was before going to kill it so that we can avoid sending the RemovedNode
|
||||
// packet to nodes that don't care about this type
|
||||
auto nodeToKill = limitedNodeList->nodeWithUUID(nodeUUID);
|
||||
|
||||
if (nodeToKill) {
|
||||
auto nodeType = nodeToKill->getType();
|
||||
limitedNodeList->killNodeWithUUID(nodeUUID);
|
||||
|
||||
static auto removedNodePacket = NLPacket::create(PacketType::DomainServerRemovedNode, NUM_BYTES_RFC4122_UUID);
|
||||
|
||||
removedNodePacket->reset();
|
||||
removedNodePacket->write(nodeUUID.toRfc4122());
|
||||
|
||||
// broadcast out the DomainServerRemovedNode message
|
||||
limitedNodeList->eachNode([&limitedNodeList](const SharedNodePointer& otherNode){
|
||||
limitedNodeList->eachMatchingNode([&nodeType](const SharedNodePointer& otherNode) -> bool {
|
||||
// only send the removed node packet to nodes that care about the type of node this was
|
||||
auto nodeLinkedData = dynamic_cast<DomainServerNodeData*>(otherNode->getLinkedData());
|
||||
return (nodeLinkedData != nullptr) && nodeLinkedData->getNodeInterestSet().contains(nodeType);
|
||||
}, [&limitedNodeList](const SharedNodePointer& otherNode){
|
||||
limitedNodeList->sendUnreliablePacket(*removedNodePacket, *otherNode);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -271,7 +271,7 @@ function loadBirds(howMany) {
|
|||
dimensions: { x: BIRD_SIZE, y: BIRD_SIZE, z: BIRD_SIZE },
|
||||
gravity: { x: 0, y: BIRD_GRAVITY, z: 0 },
|
||||
velocity: { x: 0, y: -0.1, z: 0 },
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
damping: LINEAR_DAMPING,
|
||||
collisionsWillMove: true,
|
||||
lifetime: STARTING_LIFETIME,
|
||||
color: colors[whichBird]
|
||||
|
|
|
@ -14,16 +14,17 @@
|
|||
// An assignment client script that animates one avatar at random location within 'spread' meters of 'origin'.
|
||||
// In Domain Server Settings, go to scripts and give the url of this script. Press '+', and then 'Save and restart'.
|
||||
|
||||
var origin = {x: 500, y: 502, z: 500};
|
||||
var spread = 10; // meters
|
||||
var origin = {x: 500, y: 500, z: 500};
|
||||
var spread = 20; // meters
|
||||
var animationData = {url: "https://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/walk_fwd.fbx", lastFrame: 35};
|
||||
Avatar.skeletonModelURL = "https://hifi-public.s3.amazonaws.com/marketplace/contents/dd03b8e3-52fb-4ab3-9ac9-3b17e00cd85d/98baa90b3b66803c5d7bd4537fca6993.fst"; //lovejoy
|
||||
Avatar.displayName = "'Bot";
|
||||
var millisecondsToWaitBeforeStarting = 10 * 1000; // To give the various servers a chance to start.
|
||||
|
||||
Agent.isAvatar = true;
|
||||
function coord() { return (Math.random() * spread) - (spread / 2); } // randomly distribute a coordinate zero += spread/2.
|
||||
Script.setTimeout(function () {
|
||||
Avatar.position = Vec3.sum(origin, {x: Math.random() * spread, y: 0, z: Math.random() * spread});
|
||||
Avatar.position = Vec3.sum(origin, {x: coord(), y: 0, z: coord()});
|
||||
print("Starting at", JSON.stringify(Avatar.position));
|
||||
Avatar.startAnimation(animationData.url, animationData.fps || 30, 1, true, false, animationData.firstFrame || 0, animationData.lastFrame);
|
||||
}, millisecondsToWaitBeforeStarting);
|
||||
|
|
|
@ -1,673 +0,0 @@
|
|||
//
|
||||
// bot_procedural.js
|
||||
// hifi
|
||||
//
|
||||
// Created by Ben Arnold on 7/29/2013
|
||||
//
|
||||
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
|
||||
//
|
||||
// This is an example script that demonstrates an NPC avatar.
|
||||
//
|
||||
//
|
||||
|
||||
//For procedural walk animation
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
Script.include(HIFI_PUBLIC_BUCKET + "scripts/acScripts/proceduralAnimationAPI.js");
|
||||
|
||||
var procAnimAPI = new ProcAnimAPI();
|
||||
|
||||
function getRandomFloat(min, max) {
|
||||
return Math.random() * (max - min) + min;
|
||||
}
|
||||
|
||||
function getRandomInt (min, max) {
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||
}
|
||||
|
||||
function printVector(string, vector) {
|
||||
print(string + " " + vector.x + ", " + vector.y + ", " + vector.z);
|
||||
}
|
||||
|
||||
var CHANCE_OF_MOVING = 0.005;
|
||||
var CHANCE_OF_SOUND = 0.005;
|
||||
var CHANCE_OF_HEAD_TURNING = 0.01;
|
||||
var CHANCE_OF_BIG_MOVE = 1.0;
|
||||
|
||||
var isMoving = false;
|
||||
var isTurningHead = false;
|
||||
var isPlayingAudio = false;
|
||||
|
||||
var X_MIN = 0.50;
|
||||
var X_MAX = 15.60;
|
||||
var Z_MIN = 0.50;
|
||||
var Z_MAX = 15.10;
|
||||
var Y_FEET = 0.0;
|
||||
var AVATAR_PELVIS_HEIGHT = 0.84;
|
||||
var Y_PELVIS = Y_FEET + AVATAR_PELVIS_HEIGHT;
|
||||
var MAX_PELVIS_DELTA = 2.5;
|
||||
|
||||
var MOVE_RANGE_SMALL = 3.0;
|
||||
var MOVE_RANGE_BIG = 10.0;
|
||||
var TURN_RANGE = 70.0;
|
||||
var STOP_TOLERANCE = 0.05;
|
||||
var MOVE_RATE = 0.05;
|
||||
var TURN_RATE = 0.2;
|
||||
var HEAD_TURN_RATE = 0.05;
|
||||
var PITCH_RANGE = 15.0;
|
||||
var YAW_RANGE = 35.0;
|
||||
|
||||
var firstPosition = { x: getRandomFloat(X_MIN, X_MAX), y: Y_PELVIS, z: getRandomFloat(Z_MIN, Z_MAX) };
|
||||
var targetPosition = { x: 0, y: 0, z: 0 };
|
||||
var targetOrientation = { x: 0, y: 0, z: 0, w: 0 };
|
||||
var currentOrientation = { x: 0, y: 0, z: 0, w: 0 };
|
||||
var targetHeadPitch = 0.0;
|
||||
var targetHeadYaw = 0.0;
|
||||
|
||||
var basePelvisHeight = 0.0;
|
||||
var pelvisOscillatorPosition = 0.0;
|
||||
var pelvisOscillatorVelocity = 0.0;
|
||||
|
||||
function clamp(val, min, max){
|
||||
return Math.max(min, Math.min(max, val))
|
||||
}
|
||||
|
||||
//Array of all valid bot numbers
|
||||
var validBotNumbers = [];
|
||||
|
||||
// right now we only use bot 63, since many other bots have messed up skeletons and LOD issues
|
||||
var botNumber = 63;//getRandomInt(0, 99);
|
||||
|
||||
var newFaceFilePrefix = "ron";
|
||||
|
||||
var newBodyFilePrefix = "bot" + botNumber;
|
||||
|
||||
// set the face model fst using the bot number
|
||||
// there is no need to change the body model - we're using the default
|
||||
Avatar.faceModelURL = HIFI_PUBLIC_BUCKET + "meshes/" + newFaceFilePrefix + ".fst";
|
||||
Avatar.skeletonModelURL = HIFI_PUBLIC_BUCKET + "meshes/" + newBodyFilePrefix + "_a.fst";
|
||||
Avatar.billboardURL = HIFI_PUBLIC_BUCKET + "meshes/billboards/bot" + botNumber + ".png";
|
||||
|
||||
Agent.isAvatar = true;
|
||||
Agent.isListeningToAudioStream = true;
|
||||
|
||||
// change the avatar's position to the random one
|
||||
Avatar.position = firstPosition;
|
||||
basePelvisHeight = firstPosition.y;
|
||||
printVector("New dancer, position = ", Avatar.position);
|
||||
|
||||
function loadSounds() {
|
||||
var sound_filenames = ["AB1.raw", "Anchorman2.raw", "B1.raw", "B1.raw", "Bale1.raw", "Bandcamp.raw",
|
||||
"Big1.raw", "Big2.raw", "Brian1.raw", "Buster1.raw", "CES1.raw", "CES2.raw", "CES3.raw", "CES4.raw",
|
||||
"Carrie1.raw", "Carrie3.raw", "Charlotte1.raw", "EN1.raw", "EN2.raw", "EN3.raw", "Eugene1.raw", "Francesco1.raw",
|
||||
"Italian1.raw", "Japanese1.raw", "Leigh1.raw", "Lucille1.raw", "Lucille2.raw", "MeanGirls.raw", "Murray2.raw",
|
||||
"Nigel1.raw", "PennyLane.raw", "Pitt1.raw", "Ricardo.raw", "SN.raw", "Sake1.raw", "Samantha1.raw", "Samantha2.raw",
|
||||
"Spicoli1.raw", "Supernatural.raw", "Swearengen1.raw", "TheDude.raw", "Tony.raw", "Triumph1.raw", "Uma1.raw",
|
||||
"Walken1.raw", "Walken2.raw", "Z1.raw", "Z2.raw"
|
||||
];
|
||||
|
||||
var footstep_filenames = ["FootstepW2Left-12db.wav", "FootstepW2Right-12db.wav", "FootstepW3Left-12db.wav", "FootstepW3Right-12db.wav",
|
||||
"FootstepW5Left-12db.wav", "FootstepW5Right-12db.wav"];
|
||||
|
||||
var SOUND_BASE_URL = HIFI_PUBLIC_BUCKET + "sounds/Cocktail+Party+Snippets/Raws/";
|
||||
|
||||
var FOOTSTEP_BASE_URL = HIFI_PUBLIC_BUCKET + "sounds/Footsteps/";
|
||||
|
||||
for (var i = 0; i < sound_filenames.length; i++) {
|
||||
sounds.push(SoundCache.getSound(SOUND_BASE_URL + sound_filenames[i]));
|
||||
}
|
||||
|
||||
for (var i = 0; i < footstep_filenames.length; i++) {
|
||||
footstepSounds.push(SoundCache.getSound(FOOTSTEP_BASE_URL + footstep_filenames[i]));
|
||||
}
|
||||
}
|
||||
|
||||
var sounds = [];
|
||||
var footstepSounds = [];
|
||||
loadSounds();
|
||||
|
||||
|
||||
function playRandomSound() {
|
||||
if (!Agent.isPlayingAvatarSound) {
|
||||
var whichSound = Math.floor((Math.random() * sounds.length));
|
||||
Agent.playAvatarSound(sounds[whichSound]);
|
||||
}
|
||||
}
|
||||
|
||||
function playRandomFootstepSound() {
|
||||
var whichSound = Math.floor((Math.random() * footstepSounds.length));
|
||||
Audio.playSound(footstepSounds[whichSound], {
|
||||
position: Avatar.position,
|
||||
volume: 1.0
|
||||
});
|
||||
}
|
||||
|
||||
// ************************************ Facial Animation **********************************
|
||||
var allBlendShapes = [];
|
||||
var targetBlendCoefficient = [];
|
||||
var currentBlendCoefficient = [];
|
||||
|
||||
//Blendshape constructor
|
||||
function addBlendshapeToPose(pose, shapeIndex, val) {
|
||||
var index = pose.blendShapes.length;
|
||||
pose.blendShapes[index] = {shapeIndex: shapeIndex, val: val };
|
||||
}
|
||||
//The mood of the avatar, determines face. 0 = happy, 1 = angry, 2 = sad.
|
||||
|
||||
//Randomly pick avatar mood. 80% happy, 10% mad 10% sad
|
||||
var randMood = Math.floor(Math.random() * 11);
|
||||
var avatarMood;
|
||||
if (randMood == 0) {
|
||||
avatarMood = 1;
|
||||
} else if (randMood == 2) {
|
||||
avatarMood = 2;
|
||||
} else {
|
||||
avatarMood = 0;
|
||||
}
|
||||
|
||||
var currentExpression = -1;
|
||||
//Face pose constructor
|
||||
var happyPoses = [];
|
||||
|
||||
happyPoses[0] = {blendShapes: []};
|
||||
addBlendshapeToPose(happyPoses[0], 28, 0.7); //MouthSmile_L
|
||||
addBlendshapeToPose(happyPoses[0], 29, 0.7); //MouthSmile_R
|
||||
|
||||
happyPoses[1] = {blendShapes: []};
|
||||
addBlendshapeToPose(happyPoses[1], 28, 1.0); //MouthSmile_L
|
||||
addBlendshapeToPose(happyPoses[1], 29, 1.0); //MouthSmile_R
|
||||
addBlendshapeToPose(happyPoses[1], 21, 0.2); //JawOpen
|
||||
|
||||
happyPoses[2] = {blendShapes: []};
|
||||
addBlendshapeToPose(happyPoses[2], 28, 1.0); //MouthSmile_L
|
||||
addBlendshapeToPose(happyPoses[2], 29, 1.0); //MouthSmile_R
|
||||
addBlendshapeToPose(happyPoses[2], 21, 0.5); //JawOpen
|
||||
addBlendshapeToPose(happyPoses[2], 46, 1.0); //CheekSquint_L
|
||||
addBlendshapeToPose(happyPoses[2], 47, 1.0); //CheekSquint_R
|
||||
addBlendshapeToPose(happyPoses[2], 17, 1.0); //BrowsU_L
|
||||
addBlendshapeToPose(happyPoses[2], 18, 1.0); //BrowsU_R
|
||||
|
||||
var angryPoses = [];
|
||||
|
||||
angryPoses[0] = {blendShapes: []};
|
||||
addBlendshapeToPose(angryPoses[0], 26, 0.6); //MouthFrown_L
|
||||
addBlendshapeToPose(angryPoses[0], 27, 0.6); //MouthFrown_R
|
||||
addBlendshapeToPose(angryPoses[0], 14, 0.6); //BrowsD_L
|
||||
addBlendshapeToPose(angryPoses[0], 15, 0.6); //BrowsD_R
|
||||
|
||||
angryPoses[1] = {blendShapes: []};
|
||||
addBlendshapeToPose(angryPoses[1], 26, 0.9); //MouthFrown_L
|
||||
addBlendshapeToPose(angryPoses[1], 27, 0.9); //MouthFrown_R
|
||||
addBlendshapeToPose(angryPoses[1], 14, 0.9); //BrowsD_L
|
||||
addBlendshapeToPose(angryPoses[1], 15, 0.9); //BrowsD_R
|
||||
|
||||
angryPoses[2] = {blendShapes: []};
|
||||
addBlendshapeToPose(angryPoses[2], 26, 1.0); //MouthFrown_L
|
||||
addBlendshapeToPose(angryPoses[2], 27, 1.0); //MouthFrown_R
|
||||
addBlendshapeToPose(angryPoses[2], 14, 1.0); //BrowsD_L
|
||||
addBlendshapeToPose(angryPoses[2], 15, 1.0); //BrowsD_R
|
||||
addBlendshapeToPose(angryPoses[2], 21, 0.5); //JawOpen
|
||||
addBlendshapeToPose(angryPoses[2], 46, 1.0); //CheekSquint_L
|
||||
addBlendshapeToPose(angryPoses[2], 47, 1.0); //CheekSquint_R
|
||||
|
||||
var sadPoses = [];
|
||||
|
||||
sadPoses[0] = {blendShapes: []};
|
||||
addBlendshapeToPose(sadPoses[0], 26, 0.6); //MouthFrown_L
|
||||
addBlendshapeToPose(sadPoses[0], 27, 0.6); //MouthFrown_R
|
||||
addBlendshapeToPose(sadPoses[0], 16, 0.2); //BrowsU_C
|
||||
addBlendshapeToPose(sadPoses[0], 2, 0.6); //EyeSquint_L
|
||||
addBlendshapeToPose(sadPoses[0], 3, 0.6); //EyeSquint_R
|
||||
|
||||
sadPoses[1] = {blendShapes: []};
|
||||
addBlendshapeToPose(sadPoses[1], 26, 0.9); //MouthFrown_L
|
||||
addBlendshapeToPose(sadPoses[1], 27, 0.9); //MouthFrown_R
|
||||
addBlendshapeToPose(sadPoses[1], 16, 0.6); //BrowsU_C
|
||||
addBlendshapeToPose(sadPoses[1], 2, 0.9); //EyeSquint_L
|
||||
addBlendshapeToPose(sadPoses[1], 3, 0.9); //EyeSquint_R
|
||||
|
||||
sadPoses[2] = {blendShapes: []};
|
||||
addBlendshapeToPose(sadPoses[2], 26, 1.0); //MouthFrown_L
|
||||
addBlendshapeToPose(sadPoses[2], 27, 1.0); //MouthFrown_R
|
||||
addBlendshapeToPose(sadPoses[2], 16, 0.1); //BrowsU_C
|
||||
addBlendshapeToPose(sadPoses[2], 2, 1.0); //EyeSquint_L
|
||||
addBlendshapeToPose(sadPoses[2], 3, 1.0); //EyeSquint_R
|
||||
addBlendshapeToPose(sadPoses[2], 21, 0.3); //JawOpen
|
||||
|
||||
var facePoses = [];
|
||||
facePoses[0] = happyPoses;
|
||||
facePoses[1] = angryPoses;
|
||||
facePoses[2] = sadPoses;
|
||||
|
||||
|
||||
function addBlendShape(s) {
|
||||
allBlendShapes[allBlendShapes.length] = s;
|
||||
}
|
||||
|
||||
//It is imperative that the following blendshapes are all present and are in the correct order
|
||||
addBlendShape("EyeBlink_L"); //0
|
||||
addBlendShape("EyeBlink_R"); //1
|
||||
addBlendShape("EyeSquint_L"); //2
|
||||
addBlendShape("EyeSquint_R"); //3
|
||||
addBlendShape("EyeDown_L"); //4
|
||||
addBlendShape("EyeDown_R"); //5
|
||||
addBlendShape("EyeIn_L"); //6
|
||||
addBlendShape("EyeIn_R"); //7
|
||||
addBlendShape("EyeOpen_L"); //8
|
||||
addBlendShape("EyeOpen_R"); //9
|
||||
addBlendShape("EyeOut_L"); //10
|
||||
addBlendShape("EyeOut_R"); //11
|
||||
addBlendShape("EyeUp_L"); //12
|
||||
addBlendShape("EyeUp_R"); //13
|
||||
addBlendShape("BrowsD_L"); //14
|
||||
addBlendShape("BrowsD_R"); //15
|
||||
addBlendShape("BrowsU_C"); //16
|
||||
addBlendShape("BrowsU_L"); //17
|
||||
addBlendShape("BrowsU_R"); //18
|
||||
addBlendShape("JawFwd"); //19
|
||||
addBlendShape("JawLeft"); //20
|
||||
addBlendShape("JawOpen"); //21
|
||||
addBlendShape("JawChew"); //22
|
||||
addBlendShape("JawRight"); //23
|
||||
addBlendShape("MouthLeft"); //24
|
||||
addBlendShape("MouthRight"); //25
|
||||
addBlendShape("MouthFrown_L"); //26
|
||||
addBlendShape("MouthFrown_R"); //27
|
||||
addBlendShape("MouthSmile_L"); //28
|
||||
addBlendShape("MouthSmile_R"); //29
|
||||
addBlendShape("MouthDimple_L"); //30
|
||||
addBlendShape("MouthDimple_R"); //31
|
||||
addBlendShape("LipsStretch_L"); //32
|
||||
addBlendShape("LipsStretch_R"); //33
|
||||
addBlendShape("LipsUpperClose"); //34
|
||||
addBlendShape("LipsLowerClose"); //35
|
||||
addBlendShape("LipsUpperUp"); //36
|
||||
addBlendShape("LipsLowerDown"); //37
|
||||
addBlendShape("LipsUpperOpen"); //38
|
||||
addBlendShape("LipsLowerOpen"); //39
|
||||
addBlendShape("LipsFunnel"); //40
|
||||
addBlendShape("LipsPucker"); //41
|
||||
addBlendShape("ChinLowerRaise"); //42
|
||||
addBlendShape("ChinUpperRaise"); //43
|
||||
addBlendShape("Sneer"); //44
|
||||
addBlendShape("Puff"); //45
|
||||
addBlendShape("CheekSquint_L"); //46
|
||||
addBlendShape("CheekSquint_R"); //47
|
||||
|
||||
for (var i = 0; i < allBlendShapes.length; i++) {
|
||||
targetBlendCoefficient[i] = 0;
|
||||
currentBlendCoefficient[i] = 0;
|
||||
}
|
||||
|
||||
function setRandomExpression() {
|
||||
|
||||
//Clear all expression data for current expression
|
||||
if (currentExpression != -1) {
|
||||
var expression = facePoses[avatarMood][currentExpression];
|
||||
for (var i = 0; i < expression.blendShapes.length; i++) {
|
||||
targetBlendCoefficient[expression.blendShapes[i].shapeIndex] = 0.0;
|
||||
}
|
||||
}
|
||||
//Get a new current expression
|
||||
currentExpression = Math.floor(Math.random() * facePoses[avatarMood].length);
|
||||
var expression = facePoses[avatarMood][currentExpression];
|
||||
for (var i = 0; i < expression.blendShapes.length; i++) {
|
||||
targetBlendCoefficient[expression.blendShapes[i].shapeIndex] = expression.blendShapes[i].val;
|
||||
}
|
||||
}
|
||||
|
||||
var expressionChangeSpeed = 0.1;
|
||||
function updateBlendShapes(deltaTime) {
|
||||
|
||||
for (var i = 0; i < allBlendShapes.length; i++) {
|
||||
currentBlendCoefficient[i] += (targetBlendCoefficient[i] - currentBlendCoefficient[i]) * expressionChangeSpeed;
|
||||
Avatar.setBlendshape(allBlendShapes[i], currentBlendCoefficient[i]);
|
||||
}
|
||||
}
|
||||
|
||||
var BLINK_SPEED = 0.15;
|
||||
var CHANCE_TO_BLINK = 0.0025;
|
||||
var MAX_BLINK = 0.85;
|
||||
var blink = 0.0;
|
||||
var isBlinking = false;
|
||||
function updateBlinking(deltaTime) {
|
||||
if (isBlinking == false) {
|
||||
if (Math.random() < CHANCE_TO_BLINK) {
|
||||
isBlinking = true;
|
||||
} else {
|
||||
blink -= BLINK_SPEED;
|
||||
if (blink < 0.0) blink = 0.0;
|
||||
}
|
||||
} else {
|
||||
blink += BLINK_SPEED;
|
||||
if (blink > MAX_BLINK) {
|
||||
blink = MAX_BLINK;
|
||||
isBlinking = false;
|
||||
}
|
||||
}
|
||||
|
||||
currentBlendCoefficient[0] = blink;
|
||||
currentBlendCoefficient[1] = blink;
|
||||
targetBlendCoefficient[0] = blink;
|
||||
targetBlendCoefficient[1] = blink;
|
||||
}
|
||||
|
||||
// *************************************************************************************
|
||||
|
||||
//Procedural walk animation using two keyframes
|
||||
//We use a separate array for front and back joints
|
||||
//Pitch, yaw, and roll for the joints
|
||||
var rightAngles = [];
|
||||
var leftAngles = [];
|
||||
//for non mirrored joints such as the spine
|
||||
var middleAngles = [];
|
||||
|
||||
//Actual joint mappings
|
||||
var SHOULDER_JOINT_NUMBER = 15;
|
||||
var ELBOW_JOINT_NUMBER = 16;
|
||||
var JOINT_R_HIP = 1;
|
||||
var JOINT_R_KNEE = 2;
|
||||
var JOINT_L_HIP = 6;
|
||||
var JOINT_L_KNEE = 7;
|
||||
var JOINT_R_ARM = 15;
|
||||
var JOINT_R_FOREARM = 16;
|
||||
var JOINT_L_ARM = 39;
|
||||
var JOINT_L_FOREARM = 40;
|
||||
var JOINT_SPINE = 11;
|
||||
var JOINT_R_FOOT = 3;
|
||||
var JOINT_L_FOOT = 8;
|
||||
var JOINT_R_TOE = 4;
|
||||
var JOINT_L_TOE = 9;
|
||||
|
||||
// ******************************* Animation Is Defined Below *************************************
|
||||
|
||||
var NUM_FRAMES = 2;
|
||||
for (var i = 0; i < NUM_FRAMES; i++) {
|
||||
rightAngles[i] = [];
|
||||
leftAngles[i] = [];
|
||||
middleAngles[i] = [];
|
||||
}
|
||||
//Joint order for actual joint mappings, should be interleaved R,L,R,L,...S,S,S for R = right, L = left, S = single
|
||||
var JOINT_ORDER = [];
|
||||
//*** right / left joints ***
|
||||
var HIP = 0;
|
||||
JOINT_ORDER.push(JOINT_R_HIP);
|
||||
JOINT_ORDER.push(JOINT_L_HIP);
|
||||
var KNEE = 1;
|
||||
JOINT_ORDER.push(JOINT_R_KNEE);
|
||||
JOINT_ORDER.push(JOINT_L_KNEE);
|
||||
var ARM = 2;
|
||||
JOINT_ORDER.push(JOINT_R_ARM);
|
||||
JOINT_ORDER.push(JOINT_L_ARM);
|
||||
var FOREARM = 3;
|
||||
JOINT_ORDER.push(JOINT_R_FOREARM);
|
||||
JOINT_ORDER.push(JOINT_L_FOREARM);
|
||||
var FOOT = 4;
|
||||
JOINT_ORDER.push(JOINT_R_FOOT);
|
||||
JOINT_ORDER.push(JOINT_L_FOOT);
|
||||
var TOE = 5;
|
||||
JOINT_ORDER.push(JOINT_R_TOE);
|
||||
JOINT_ORDER.push(JOINT_L_TOE);
|
||||
//*** middle joints ***
|
||||
var SPINE = 0;
|
||||
JOINT_ORDER.push(JOINT_SPINE);
|
||||
|
||||
//We have to store the angles so we can invert yaw and roll when making the animation
|
||||
//symmetrical
|
||||
|
||||
//Front refers to leg, not arm.
|
||||
//Legs Extending
|
||||
rightAngles[0][HIP] = [30.0, 0.0, 8.0];
|
||||
rightAngles[0][KNEE] = [-15.0, 0.0, 0.0];
|
||||
rightAngles[0][ARM] = [85.0, -25.0, 0.0];
|
||||
rightAngles[0][FOREARM] = [0.0, 0.0, -15.0];
|
||||
rightAngles[0][FOOT] = [0.0, 0.0, 0.0];
|
||||
rightAngles[0][TOE] = [0.0, 0.0, 0.0];
|
||||
|
||||
leftAngles[0][HIP] = [-15, 0.0, 8.0];
|
||||
leftAngles[0][KNEE] = [-26, 0.0, 0.0];
|
||||
leftAngles[0][ARM] = [85.0, 20.0, 0.0];
|
||||
leftAngles[0][FOREARM] = [10.0, 0.0, -25.0];
|
||||
leftAngles[0][FOOT] = [-13.0, 0.0, 0.0];
|
||||
leftAngles[0][TOE] = [34.0, 0.0, 0.0];
|
||||
|
||||
middleAngles[0][SPINE] = [0.0, -15.0, 5.0];
|
||||
|
||||
//Legs Passing
|
||||
rightAngles[1][HIP] = [6.0, 0.0, 8.0];
|
||||
rightAngles[1][KNEE] = [-12.0, 0.0, 0.0];
|
||||
rightAngles[1][ARM] = [85.0, 0.0, 0.0];
|
||||
rightAngles[1][FOREARM] = [0.0, 0.0, -15.0];
|
||||
rightAngles[1][FOOT] = [6.0, -8.0, 0.0];
|
||||
rightAngles[1][TOE] = [0.0, 0.0, 0.0];
|
||||
|
||||
leftAngles[1][HIP] = [10.0, 0.0, 8.0];
|
||||
leftAngles[1][KNEE] = [-60.0, 0.0, 0.0];
|
||||
leftAngles[1][ARM] = [85.0, 0.0, 0.0];
|
||||
leftAngles[1][FOREARM] = [0.0, 0.0, -15.0];
|
||||
leftAngles[1][FOOT] = [0.0, 0.0, 0.0];
|
||||
leftAngles[1][TOE] = [0.0, 0.0, 0.0];
|
||||
|
||||
middleAngles[1][SPINE] = [0.0, 0.0, 0.0];
|
||||
|
||||
//Actual keyframes for the animation
|
||||
var walkKeyFrames = procAnimAPI.generateKeyframes(rightAngles, leftAngles, middleAngles, NUM_FRAMES);
|
||||
|
||||
// ******************************* Animation Is Defined Above *************************************
|
||||
|
||||
// ********************************** Standing Key Frame ******************************************
|
||||
//We don't have to do any mirroring or anything, since this is just a single pose.
|
||||
var rightQuats = [];
|
||||
var leftQuats = [];
|
||||
var middleQuats = [];
|
||||
|
||||
rightQuats[HIP] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 7.0);
|
||||
rightQuats[KNEE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
|
||||
rightQuats[ARM] = Quat.fromPitchYawRollDegrees(85.0, 0.0, 0.0);
|
||||
rightQuats[FOREARM] = Quat.fromPitchYawRollDegrees(0.0, 0.0, -10.0);
|
||||
rightQuats[FOOT] = Quat.fromPitchYawRollDegrees(0.0, -8.0, 0.0);
|
||||
rightQuats[TOE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
|
||||
|
||||
leftQuats[HIP] = Quat.fromPitchYawRollDegrees(0, 0.0, -7.0);
|
||||
leftQuats[KNEE] = Quat.fromPitchYawRollDegrees(0, 0.0, 0.0);
|
||||
leftQuats[ARM] = Quat.fromPitchYawRollDegrees(85.0, 0.0, 0.0);
|
||||
leftQuats[FOREARM] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 10.0);
|
||||
leftQuats[FOOT] = Quat.fromPitchYawRollDegrees(0.0, 8.0, 0.0);
|
||||
leftQuats[TOE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
|
||||
|
||||
middleQuats[SPINE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
|
||||
|
||||
var standingKeyFrame = new procAnimAPI.KeyFrame(rightQuats, leftQuats, middleQuats);
|
||||
|
||||
// ************************************************************************************************
|
||||
|
||||
|
||||
var currentFrame = 0;
|
||||
|
||||
var walkTime = 0.0;
|
||||
|
||||
var walkWheelRadius = 0.5;
|
||||
var walkWheelRate = 2.0 * 3.141592 * walkWheelRadius / 8.0;
|
||||
|
||||
var avatarAcceleration = 0.75;
|
||||
var avatarVelocity = 0.0;
|
||||
var avatarMaxVelocity = 1.4;
|
||||
|
||||
function handleAnimation(deltaTime) {
|
||||
|
||||
updateBlinking(deltaTime);
|
||||
updateBlendShapes(deltaTime);
|
||||
|
||||
if (Math.random() < 0.01) {
|
||||
setRandomExpression();
|
||||
}
|
||||
|
||||
if (avatarVelocity == 0.0) {
|
||||
walkTime = 0.0;
|
||||
currentFrame = 0;
|
||||
} else {
|
||||
walkTime += avatarVelocity * deltaTime;
|
||||
if (walkTime > walkWheelRate) {
|
||||
walkTime = 0.0;
|
||||
currentFrame++;
|
||||
if (currentFrame % 2 == 1) {
|
||||
playRandomFootstepSound();
|
||||
}
|
||||
if (currentFrame > 3) {
|
||||
currentFrame = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var frame = walkKeyFrames[currentFrame];
|
||||
|
||||
var walkInterp = walkTime / walkWheelRate;
|
||||
var animInterp = avatarVelocity / (avatarMaxVelocity / 1.3);
|
||||
if (animInterp > 1.0) animInterp = 1.0;
|
||||
|
||||
for (var i = 0; i < JOINT_ORDER.length; i++) {
|
||||
var walkJoint = procAnimAPI.deCasteljau(frame.rotations[i], frame.nextFrame.rotations[i], frame.controlPoints[i][0], frame.controlPoints[i][1], walkInterp);
|
||||
var standJoint = standingKeyFrame.rotations[i];
|
||||
var finalJoint = Quat.mix(standJoint, walkJoint, animInterp);
|
||||
Avatar.setJointData(JOINT_ORDER[i], finalJoint);
|
||||
}
|
||||
}
|
||||
|
||||
function jumpWithLoudness(deltaTime) {
|
||||
// potentially change pelvis height depending on trailing average loudness
|
||||
|
||||
pelvisOscillatorVelocity += deltaTime * Agent.lastReceivedAudioLoudness * 700.0 ;
|
||||
|
||||
pelvisOscillatorVelocity -= pelvisOscillatorPosition * 0.75;
|
||||
pelvisOscillatorVelocity *= 0.97;
|
||||
pelvisOscillatorPosition += deltaTime * pelvisOscillatorVelocity;
|
||||
Avatar.headPitch = pelvisOscillatorPosition * 60.0;
|
||||
|
||||
var pelvisPosition = Avatar.position;
|
||||
pelvisPosition.y = (Y_PELVIS - 0.35) + pelvisOscillatorPosition;
|
||||
|
||||
if (pelvisPosition.y < Y_PELVIS) {
|
||||
pelvisPosition.y = Y_PELVIS;
|
||||
} else if (pelvisPosition.y > Y_PELVIS + 1.0) {
|
||||
pelvisPosition.y = Y_PELVIS + 1.0;
|
||||
}
|
||||
|
||||
Avatar.position = pelvisPosition;
|
||||
}
|
||||
|
||||
var forcedMove = false;
|
||||
|
||||
var wasMovingLastFrame = false;
|
||||
|
||||
function handleHeadTurn() {
|
||||
if (!isTurningHead && (Math.random() < CHANCE_OF_HEAD_TURNING)) {
|
||||
targetHeadPitch = getRandomFloat(-PITCH_RANGE, PITCH_RANGE);
|
||||
targetHeadYaw = getRandomFloat(-YAW_RANGE, YAW_RANGE);
|
||||
isTurningHead = true;
|
||||
} else {
|
||||
Avatar.headPitch = Avatar.headPitch + (targetHeadPitch - Avatar.headPitch) * HEAD_TURN_RATE;
|
||||
Avatar.headYaw = Avatar.headYaw + (targetHeadYaw - Avatar.headYaw) * HEAD_TURN_RATE;
|
||||
if (Math.abs(Avatar.headPitch - targetHeadPitch) < STOP_TOLERANCE &&
|
||||
Math.abs(Avatar.headYaw - targetHeadYaw) < STOP_TOLERANCE) {
|
||||
isTurningHead = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function stopWalking() {
|
||||
avatarVelocity = 0.0;
|
||||
isMoving = false;
|
||||
}
|
||||
|
||||
var MAX_ATTEMPTS = 40;
|
||||
function handleWalking(deltaTime) {
|
||||
|
||||
if (forcedMove || (!isMoving && Math.random() < CHANCE_OF_MOVING)) {
|
||||
// Set new target location
|
||||
|
||||
var moveRange;
|
||||
if (Math.random() < CHANCE_OF_BIG_MOVE) {
|
||||
moveRange = MOVE_RANGE_BIG;
|
||||
} else {
|
||||
moveRange = MOVE_RANGE_SMALL;
|
||||
}
|
||||
|
||||
//Keep trying new orientations if the desired target location is out of bounds
|
||||
var attempts = 0;
|
||||
do {
|
||||
targetOrientation = Quat.multiply(Avatar.orientation, Quat.angleAxis(getRandomFloat(-TURN_RANGE, TURN_RANGE), { x:0, y:1, z:0 }));
|
||||
var front = Quat.getFront(targetOrientation);
|
||||
|
||||
targetPosition = Vec3.sum(Avatar.position, Vec3.multiply(front, getRandomFloat(0.0, moveRange)));
|
||||
}
|
||||
while ((targetPosition.x < X_MIN || targetPosition.x > X_MAX || targetPosition.z < Z_MIN || targetPosition.z > Z_MAX)
|
||||
&& attempts < MAX_ATTEMPTS);
|
||||
|
||||
targetPosition.x = clamp(targetPosition.x, X_MIN, X_MAX);
|
||||
targetPosition.z = clamp(targetPosition.z, Z_MIN, Z_MAX);
|
||||
targetPosition.y = Y_PELVIS;
|
||||
|
||||
wasMovingLastFrame = true;
|
||||
isMoving = true;
|
||||
forcedMove = false;
|
||||
} else if (isMoving) {
|
||||
|
||||
var targetVector = Vec3.subtract(targetPosition, Avatar.position);
|
||||
var distance = Vec3.length(targetVector);
|
||||
if (distance <= avatarVelocity * deltaTime) {
|
||||
Avatar.position = targetPosition;
|
||||
stopWalking();
|
||||
} else {
|
||||
var direction = Vec3.normalize(targetVector);
|
||||
//Figure out if we should be slowing down
|
||||
var t = avatarVelocity / avatarAcceleration;
|
||||
var d = (avatarVelocity / 2.0) * t;
|
||||
if (distance < d) {
|
||||
avatarVelocity -= avatarAcceleration * deltaTime;
|
||||
if (avatarVelocity <= 0) {
|
||||
stopWalking();
|
||||
}
|
||||
} else {
|
||||
avatarVelocity += avatarAcceleration * deltaTime;
|
||||
if (avatarVelocity > avatarMaxVelocity) avatarVelocity = avatarMaxVelocity;
|
||||
}
|
||||
Avatar.position = Vec3.sum(Avatar.position, Vec3.multiply(direction, avatarVelocity * deltaTime));
|
||||
Avatar.orientation = Quat.mix(Avatar.orientation, targetOrientation, TURN_RATE);
|
||||
|
||||
wasMovingLastFrame = true;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleTalking() {
|
||||
if (Math.random() < CHANCE_OF_SOUND) {
|
||||
playRandomSound();
|
||||
}
|
||||
}
|
||||
|
||||
function changePelvisHeight(newHeight) {
|
||||
var newPosition = Avatar.position;
|
||||
newPosition.y = newHeight;
|
||||
Avatar.position = newPosition;
|
||||
}
|
||||
|
||||
function updateBehavior(deltaTime) {
|
||||
|
||||
if (AvatarList.containsAvatarWithDisplayName("mrdj")) {
|
||||
if (wasMovingLastFrame) {
|
||||
isMoving = false;
|
||||
}
|
||||
|
||||
// we have a DJ, shouldn't we be dancing?
|
||||
jumpWithLoudness(deltaTime);
|
||||
} else {
|
||||
|
||||
// no DJ, let's just chill on the dancefloor - randomly walking and talking
|
||||
handleHeadTurn();
|
||||
handleAnimation(deltaTime);
|
||||
handleWalking(deltaTime);
|
||||
handleTalking();
|
||||
}
|
||||
}
|
||||
|
||||
Script.update.connect(updateBehavior);
|
164
examples/acScripts/playbackAgents.js
Normal file
164
examples/acScripts/playbackAgents.js
Normal file
|
@ -0,0 +1,164 @@
|
|||
//
|
||||
// playbackAgents.js
|
||||
// acScripts
|
||||
//
|
||||
// Created by Edgar Pironti on 11/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set the following variables to the values needed
|
||||
var commandChannel = "com.highfidelity.PlaybackChannel1";
|
||||
var clip_url = null;
|
||||
var playFromCurrentLocation = true;
|
||||
var useDisplayName = true;
|
||||
var useAttachments = true;
|
||||
var useAvatarModel = true;
|
||||
|
||||
// ID of the agent. Two agents can't have the same ID.
|
||||
var announceIDChannel = "com.highfidelity.playbackAgent.announceID";
|
||||
var UNKNOWN_AGENT_ID = -2;
|
||||
var id = UNKNOWN_AGENT_ID; // unknown until aknowledged
|
||||
|
||||
// Set position/orientation/scale here if playFromCurrentLocation is true
|
||||
Avatar.position = { x:0, y: 0, z: 0 };
|
||||
Avatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
|
||||
Avatar.scale = 1.0;
|
||||
|
||||
var totalTime = 0;
|
||||
var subscribed = false;
|
||||
var WAIT_FOR_AUDIO_MIXER = 1;
|
||||
|
||||
// Script. DO NOT MODIFY BEYOND THIS LINE.
|
||||
var DO_NOTHING = 0;
|
||||
var PLAY = 1;
|
||||
var PLAY_LOOP = 2;
|
||||
var STOP = 3;
|
||||
var SHOW = 4;
|
||||
var HIDE = 5;
|
||||
var LOAD = 6;
|
||||
|
||||
Recording.setPlayFromCurrentLocation(playFromCurrentLocation);
|
||||
Recording.setPlayerUseDisplayName(useDisplayName);
|
||||
Recording.setPlayerUseAttachments(useAttachments);
|
||||
Recording.setPlayerUseHeadModel(false);
|
||||
Recording.setPlayerUseSkeletonModel(useAvatarModel);
|
||||
|
||||
function getAction(channel, message, senderID) {
|
||||
if(subscribed) {
|
||||
var command = JSON.parse(message);
|
||||
print("I'm the agent " + id + " and I received this: ID: " + command.id_key + " Action: " + command.action_key + " URL: " + command.clip_url_key);
|
||||
|
||||
if (command.id_key == id || command.id_key == -1) {
|
||||
if (command.action_key === 6) {
|
||||
clip_url = command.clip_url_key;
|
||||
}
|
||||
|
||||
action = command.action_key;
|
||||
print("That command was for me!");
|
||||
print("My clip is: " + clip_url);
|
||||
} else {
|
||||
action = DO_NOTHING;
|
||||
}
|
||||
|
||||
switch(action) {
|
||||
case PLAY:
|
||||
print("Play");
|
||||
if (!Agent.isAvatar) {
|
||||
Agent.isAvatar = true;
|
||||
}
|
||||
if (!Recording.isPlaying()) {
|
||||
Recording.startPlaying();
|
||||
}
|
||||
Recording.setPlayerLoop(false);
|
||||
break;
|
||||
case PLAY_LOOP:
|
||||
print("Play loop");
|
||||
if (!Agent.isAvatar) {
|
||||
Agent.isAvatar = true;
|
||||
}
|
||||
if (!Recording.isPlaying()) {
|
||||
Recording.startPlaying();
|
||||
}
|
||||
Recording.setPlayerLoop(true);
|
||||
break;
|
||||
case STOP:
|
||||
print("Stop");
|
||||
if (Recording.isPlaying()) {
|
||||
Recording.stopPlaying();
|
||||
}
|
||||
break;
|
||||
case SHOW:
|
||||
print("Show");
|
||||
if (!Agent.isAvatar) {
|
||||
Agent.isAvatar = true;
|
||||
}
|
||||
break;
|
||||
case HIDE:
|
||||
print("Hide");
|
||||
if (Recording.isPlaying()) {
|
||||
Recording.stopPlaying();
|
||||
}
|
||||
Agent.isAvatar = false;
|
||||
break;
|
||||
case LOAD:
|
||||
print("Load");
|
||||
if(clip_url !== null) {
|
||||
Recording.loadRecording(clip_url);
|
||||
}
|
||||
break;
|
||||
case DO_NOTHING:
|
||||
break;
|
||||
default:
|
||||
print("Unknown action: " + action);
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
if (Recording.isPlaying()) {
|
||||
Recording.play();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function update(deltaTime) {
|
||||
|
||||
totalTime += deltaTime;
|
||||
|
||||
if (totalTime > WAIT_FOR_AUDIO_MIXER) {
|
||||
if (!subscribed) {
|
||||
Messages.subscribe(commandChannel); // command channel
|
||||
Messages.subscribe(announceIDChannel); // id announce channel
|
||||
subscribed = true;
|
||||
print("I'm the agent and I am ready to receive!");
|
||||
}
|
||||
if (subscribed && id == UNKNOWN_AGENT_ID) {
|
||||
print("sending ready, id:" + id);
|
||||
Messages.sendMessage(announceIDChannel, "ready");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Messages.messageReceived.connect(function (channel, message, senderID) {
|
||||
if (channel == announceIDChannel && message != "ready") {
|
||||
// If I don't yet know if my ID has been recieved, then check to see if the master has acknowledged me
|
||||
if (id == UNKNOWN_AGENT_ID) {
|
||||
var parts = message.split(".");
|
||||
var agentID = parts[0];
|
||||
var agentIndex = parts[1];
|
||||
if (agentID == Agent.sessionUUID) {
|
||||
id = agentIndex;
|
||||
Messages.unsubscribe(announceIDChannel); // id announce channel
|
||||
}
|
||||
}
|
||||
}
|
||||
if (channel == commandChannel) {
|
||||
getAction(channel, message, senderID);
|
||||
}
|
||||
});
|
||||
|
||||
Script.update.connect(update);
|
284
examples/acScripts/playbackMaster.js
Normal file
284
examples/acScripts/playbackMaster.js
Normal file
|
@ -0,0 +1,284 @@
|
|||
//
|
||||
// playbackMaster.js
|
||||
// acScripts
|
||||
//
|
||||
// Created by Edgar Pironti on 11/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
|
||||
|
||||
var ac_number = 1; // This is the default number of ACs. Their ID need to be unique and between 0 (included) and ac_number (excluded)
|
||||
var names = new Array(); // It is possible to specify the name of the ACs in this array. ACs names ordered by IDs (Default name is "ACx", x = ID + 1))
|
||||
var channel = "com.highfidelity.PlaybackChannel1";
|
||||
var subscribed = false;
|
||||
var clip_url = null;
|
||||
var input_text = null;
|
||||
|
||||
var knownAgents = new Array; // We will add our known agents here when we discover them
|
||||
|
||||
// available playbackAgents will announce their sessionID here.
|
||||
var announceIDChannel = "com.highfidelity.playbackAgent.announceID";
|
||||
|
||||
// Script. DO NOT MODIFY BEYOND THIS LINE.
|
||||
Script.include("../libraries/toolBars.js");
|
||||
|
||||
var DO_NOTHING = 0;
|
||||
var PLAY = 1;
|
||||
var PLAY_LOOP = 2;
|
||||
var STOP = 3;
|
||||
var SHOW = 4;
|
||||
var HIDE = 5;
|
||||
var LOAD = 6;
|
||||
|
||||
var windowDimensions = Controller.getViewportDimensions();
|
||||
var TOOL_ICON_URL = HIFI_PUBLIC_BUCKET + "images/tools/";
|
||||
var ALPHA_ON = 1.0;
|
||||
var ALPHA_OFF = 0.7;
|
||||
var COLOR_TOOL_BAR = { red: 0, green: 0, blue: 0 };
|
||||
var COLOR_MASTER = { red: 0, green: 0, blue: 0 };
|
||||
var TEXT_HEIGHT = 12;
|
||||
var TEXT_MARGIN = 3;
|
||||
|
||||
var toolBars = new Array();
|
||||
var nameOverlays = new Array();
|
||||
var onOffIcon = new Array();
|
||||
var playIcon = new Array();
|
||||
var playLoopIcon = new Array();
|
||||
var stopIcon = new Array();
|
||||
var loadIcon = new Array();
|
||||
|
||||
setupPlayback();
|
||||
|
||||
function setupPlayback() {
|
||||
ac_number = Window.prompt("Insert number of agents: ","1");
|
||||
if (ac_number === "" || ac_number === null) {
|
||||
ac_number = 1;
|
||||
}
|
||||
Messages.subscribe(channel);
|
||||
subscribed = true;
|
||||
setupToolBars();
|
||||
}
|
||||
|
||||
function setupToolBars() {
|
||||
if (toolBars.length > 0) {
|
||||
print("Multiple calls to Recorder.js:setupToolBars()");
|
||||
return;
|
||||
}
|
||||
Tool.IMAGE_HEIGHT /= 2;
|
||||
Tool.IMAGE_WIDTH /= 2;
|
||||
|
||||
for (i = 0; i <= ac_number; i++) {
|
||||
toolBars.push(new ToolBar(0, 0, ToolBar.HORIZONTAL));
|
||||
toolBars[i].setBack((i == ac_number) ? COLOR_MASTER : COLOR_TOOL_BAR, ALPHA_OFF);
|
||||
|
||||
onOffIcon.push(toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "ac-on-off.svg",
|
||||
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
x: 0, y: 0,
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_ON,
|
||||
visible: true
|
||||
}, true, true));
|
||||
|
||||
playIcon[i] = toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "play.svg",
|
||||
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}, false);
|
||||
|
||||
var playLoopWidthFactor = 1.65;
|
||||
playLoopIcon[i] = toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "play-and-loop.svg",
|
||||
subImage: { x: 0, y: 0, width: playLoopWidthFactor * Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
width: playLoopWidthFactor * Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}, false);
|
||||
|
||||
stopIcon[i] = toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "recording-stop.svg",
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}, false);
|
||||
|
||||
loadIcon[i] = toolBars[i].addTool({
|
||||
imageURL: TOOL_ICON_URL + "recording-upload.svg",
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}, false);
|
||||
|
||||
nameOverlays.push(Overlays.addOverlay("text", {
|
||||
backgroundColor: { red: 0, green: 0, blue: 0 },
|
||||
font: { size: TEXT_HEIGHT },
|
||||
text: (i == ac_number) ? "Master" : i + ". " +
|
||||
((i < names.length) ? names[i] :
|
||||
"AC" + i),
|
||||
x: 0, y: 0,
|
||||
width: toolBars[i].width + ToolBar.SPACING,
|
||||
height: TEXT_HEIGHT + TEXT_MARGIN,
|
||||
leftMargin: TEXT_MARGIN,
|
||||
topMargin: TEXT_MARGIN,
|
||||
alpha: ALPHA_OFF,
|
||||
backgroundAlpha: ALPHA_OFF,
|
||||
visible: true
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
function sendCommand(id, action) {
|
||||
if (action === SHOW) {
|
||||
toolBars[id].selectTool(onOffIcon[id], false);
|
||||
toolBars[id].setAlpha(ALPHA_ON, playIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_ON, playLoopIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_ON, stopIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_ON, loadIcon[id]);
|
||||
} else if (action === HIDE) {
|
||||
toolBars[id].selectTool(onOffIcon[id], true);
|
||||
toolBars[id].setAlpha(ALPHA_OFF, playIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_OFF, playLoopIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_OFF, stopIcon[id]);
|
||||
toolBars[id].setAlpha(ALPHA_OFF, loadIcon[id]);
|
||||
} else if (toolBars[id].toolSelected(onOffIcon[id])) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (id == (toolBars.length - 1)) {
|
||||
id = -1; // Master command becomes broadcast.
|
||||
}
|
||||
|
||||
var message = {
|
||||
id_key: id,
|
||||
action_key: action,
|
||||
clip_url_key: clip_url
|
||||
};
|
||||
|
||||
if(subscribed){
|
||||
Messages.sendMessage(channel, JSON.stringify(message));
|
||||
print("Message sent!");
|
||||
clip_url = null;
|
||||
}
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
|
||||
|
||||
// Check master control
|
||||
var i = toolBars.length - 1;
|
||||
if (onOffIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
if (toolBars[i].toolSelected(onOffIcon[i])) {
|
||||
sendCommand(i, SHOW);
|
||||
} else {
|
||||
sendCommand(i, HIDE);
|
||||
}
|
||||
} else if (playIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, PLAY);
|
||||
} else if (playLoopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, PLAY_LOOP);
|
||||
} else if (stopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, STOP);
|
||||
} else if (loadIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
input_text = Window.prompt("Insert the url of the clip: ","");
|
||||
if (!(input_text === "" || input_text === null)) {
|
||||
clip_url = input_text;
|
||||
sendCommand(i, LOAD);
|
||||
}
|
||||
} else {
|
||||
// Check individual controls
|
||||
for (i = 0; i < ac_number; i++) {
|
||||
if (onOffIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
if (toolBars[i].toolSelected(onOffIcon[i], false)) {
|
||||
sendCommand(i, SHOW);
|
||||
} else {
|
||||
sendCommand(i, HIDE);
|
||||
}
|
||||
} else if (playIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, PLAY);
|
||||
} else if (playLoopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, PLAY_LOOP);
|
||||
} else if (stopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
sendCommand(i, STOP);
|
||||
} else if (loadIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
|
||||
input_text = Window.prompt("Insert the url of the clip: ","");
|
||||
if (!(input_text === "" || input_text === null)) {
|
||||
clip_url = input_text;
|
||||
sendCommand(i, LOAD);
|
||||
}
|
||||
} else {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function moveUI() {
|
||||
var textSize = TEXT_HEIGHT + 2 * TEXT_MARGIN;
|
||||
var relative = { x: 70, y: 75 + (ac_number) * (Tool.IMAGE_HEIGHT + ToolBar.SPACING + textSize) };
|
||||
|
||||
for (i = 0; i <= ac_number; i++) {
|
||||
toolBars[i].move(relative.x,
|
||||
windowDimensions.y - relative.y +
|
||||
i * (Tool.IMAGE_HEIGHT + ToolBar.SPACING + textSize));
|
||||
|
||||
Overlays.editOverlay(nameOverlays[i], {
|
||||
x: toolBars[i].x - ToolBar.SPACING,
|
||||
y: toolBars[i].y - textSize
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function update() {
|
||||
var newDimensions = Controller.getViewportDimensions();
|
||||
if (windowDimensions.x != newDimensions.x ||
|
||||
windowDimensions.y != newDimensions.y) {
|
||||
windowDimensions = newDimensions;
|
||||
moveUI();
|
||||
}
|
||||
}
|
||||
|
||||
function scriptEnding() {
|
||||
for (i = 0; i <= ac_number; i++) {
|
||||
toolBars[i].cleanup();
|
||||
Overlays.deleteOverlay(nameOverlays[i]);
|
||||
}
|
||||
|
||||
if (subscribed) {
|
||||
Messages.unsubscribe(channel);
|
||||
}
|
||||
Messages.unsubscribe(announceIDChannel);
|
||||
}
|
||||
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Script.update.connect(update);
|
||||
Script.scriptEnding.connect(scriptEnding);
|
||||
|
||||
|
||||
|
||||
Messages.subscribe(announceIDChannel);
|
||||
Messages.messageReceived.connect(function (channel, message, senderID) {
|
||||
if (channel == announceIDChannel && message == "ready") {
|
||||
// check to see if we know about this agent
|
||||
if (knownAgents.indexOf(senderID) < 0) {
|
||||
var indexOfNewAgent = knownAgents.length;
|
||||
knownAgents[indexOfNewAgent] = senderID;
|
||||
var acknowledgeMessage = senderID + "." + indexOfNewAgent;
|
||||
Messages.sendMessage(announceIDChannel, acknowledgeMessage);
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
moveUI();
|
|
@ -37,9 +37,22 @@ var BUMPER_ON_VALUE = 0.5;
|
|||
var DISTANCE_HOLDING_RADIUS_FACTOR = 5; // multiplied by distance between hand and object
|
||||
var DISTANCE_HOLDING_ACTION_TIMEFRAME = 0.1; // how quickly objects move to their new position
|
||||
var DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR = 2.0; // object rotates this much more than hand did
|
||||
var NO_INTERSECT_COLOR = { red: 10, green: 10, blue: 255}; // line color when pick misses
|
||||
var INTERSECT_COLOR = { red: 250, green: 10, blue: 10}; // line color when pick hits
|
||||
var LINE_ENTITY_DIMENSIONS = { x: 1000, y: 1000,z: 1000};
|
||||
|
||||
var NO_INTERSECT_COLOR = {
|
||||
red: 10,
|
||||
green: 10,
|
||||
blue: 255
|
||||
}; // line color when pick misses
|
||||
var INTERSECT_COLOR = {
|
||||
red: 250,
|
||||
green: 10,
|
||||
blue: 10
|
||||
}; // line color when pick hits
|
||||
var LINE_ENTITY_DIMENSIONS = {
|
||||
x: 1000,
|
||||
y: 1000,
|
||||
z: 1000
|
||||
};
|
||||
var LINE_LENGTH = 500;
|
||||
var PICK_MAX_DISTANCE = 500; // max length of pick-ray
|
||||
|
||||
|
@ -74,6 +87,7 @@ var ZERO_VEC = {
|
|||
y: 0,
|
||||
z: 0
|
||||
};
|
||||
|
||||
var NULL_ACTION_ID = "{00000000-0000-0000-000000000000}";
|
||||
var MSEC_PER_SEC = 1000.0;
|
||||
|
||||
|
@ -83,14 +97,15 @@ var ACTION_TTL = 15; // seconds
|
|||
var ACTION_TTL_REFRESH = 5;
|
||||
var PICKS_PER_SECOND_PER_HAND = 5;
|
||||
var MSECS_PER_SEC = 1000.0;
|
||||
var GRABBABLE_PROPERTIES = ["position",
|
||||
"rotation",
|
||||
"gravity",
|
||||
"ignoreForCollisions",
|
||||
"collisionsWillMove",
|
||||
"locked",
|
||||
"name"];
|
||||
|
||||
var GRABBABLE_PROPERTIES = [
|
||||
"position",
|
||||
"rotation",
|
||||
"gravity",
|
||||
"ignoreForCollisions",
|
||||
"collisionsWillMove",
|
||||
"locked",
|
||||
"name"
|
||||
];
|
||||
|
||||
var GRABBABLE_DATA_KEY = "grabbableKey"; // shared with grab.js
|
||||
var GRAB_USER_DATA_KEY = "grabKey"; // shared with grab.js
|
||||
|
@ -100,8 +115,6 @@ var DEFAULT_GRABBABLE_DATA = {
|
|||
invertSolidWhileHeld: false
|
||||
};
|
||||
|
||||
var disabledHand ='none';
|
||||
|
||||
|
||||
// states for the state machine
|
||||
var STATE_OFF = 0;
|
||||
|
@ -125,40 +138,40 @@ var STATE_EQUIP_SPRING = 16;
|
|||
|
||||
function stateToName(state) {
|
||||
switch (state) {
|
||||
case STATE_OFF:
|
||||
return "off";
|
||||
case STATE_SEARCHING:
|
||||
return "searching";
|
||||
case STATE_DISTANCE_HOLDING:
|
||||
return "distance_holding";
|
||||
case STATE_CONTINUE_DISTANCE_HOLDING:
|
||||
return "continue_distance_holding";
|
||||
case STATE_NEAR_GRABBING:
|
||||
return "near_grabbing";
|
||||
case STATE_CONTINUE_NEAR_GRABBING:
|
||||
return "continue_near_grabbing";
|
||||
case STATE_NEAR_TRIGGER:
|
||||
return "near_trigger";
|
||||
case STATE_CONTINUE_NEAR_TRIGGER:
|
||||
return "continue_near_trigger";
|
||||
case STATE_FAR_TRIGGER:
|
||||
return "far_trigger";
|
||||
case STATE_CONTINUE_FAR_TRIGGER:
|
||||
return "continue_far_trigger";
|
||||
case STATE_RELEASE:
|
||||
return "release";
|
||||
case STATE_EQUIP_SEARCHING:
|
||||
return "equip_searching";
|
||||
case STATE_EQUIP:
|
||||
return "equip";
|
||||
case STATE_CONTINUE_EQUIP_BD:
|
||||
return "continue_equip_bd";
|
||||
case STATE_CONTINUE_EQUIP:
|
||||
return "continue_equip";
|
||||
case STATE_WAITING_FOR_BUMPER_RELEASE:
|
||||
return "waiting_for_bumper_release";
|
||||
case STATE_EQUIP_SPRING:
|
||||
return "state_equip_spring";
|
||||
case STATE_OFF:
|
||||
return "off";
|
||||
case STATE_SEARCHING:
|
||||
return "searching";
|
||||
case STATE_DISTANCE_HOLDING:
|
||||
return "distance_holding";
|
||||
case STATE_CONTINUE_DISTANCE_HOLDING:
|
||||
return "continue_distance_holding";
|
||||
case STATE_NEAR_GRABBING:
|
||||
return "near_grabbing";
|
||||
case STATE_CONTINUE_NEAR_GRABBING:
|
||||
return "continue_near_grabbing";
|
||||
case STATE_NEAR_TRIGGER:
|
||||
return "near_trigger";
|
||||
case STATE_CONTINUE_NEAR_TRIGGER:
|
||||
return "continue_near_trigger";
|
||||
case STATE_FAR_TRIGGER:
|
||||
return "far_trigger";
|
||||
case STATE_CONTINUE_FAR_TRIGGER:
|
||||
return "continue_far_trigger";
|
||||
case STATE_RELEASE:
|
||||
return "release";
|
||||
case STATE_EQUIP_SEARCHING:
|
||||
return "equip_searching";
|
||||
case STATE_EQUIP:
|
||||
return "equip";
|
||||
case STATE_CONTINUE_EQUIP_BD:
|
||||
return "continue_equip_bd";
|
||||
case STATE_CONTINUE_EQUIP:
|
||||
return "continue_equip";
|
||||
case STATE_WAITING_FOR_BUMPER_RELEASE:
|
||||
return "waiting_for_bumper_release";
|
||||
case STATE_EQUIP_SPRING:
|
||||
return "state_equip_spring";
|
||||
}
|
||||
|
||||
return "unknown";
|
||||
|
@ -187,7 +200,6 @@ function entityIsGrabbedByOther(entityID) {
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
function MyController(hand) {
|
||||
this.hand = hand;
|
||||
if (this.hand === RIGHT_HAND) {
|
||||
|
@ -211,8 +223,17 @@ function MyController(hand) {
|
|||
this.rawTriggerValue = 0;
|
||||
this.rawBumperValue = 0;
|
||||
|
||||
this.offsetPosition = { x: 0.0, y: 0.0, z: 0.0 };
|
||||
this.offsetRotation = { x: 0.0, y: 0.0, z: 0.0, w: 1.0 };
|
||||
this.offsetPosition = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0
|
||||
};
|
||||
this.offsetRotation = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0,
|
||||
w: 1.0
|
||||
};
|
||||
|
||||
var _this = this;
|
||||
|
||||
|
@ -277,7 +298,7 @@ function MyController(hand) {
|
|||
this.state = newState;
|
||||
}
|
||||
|
||||
this.debugLine = function(closePoint, farPoint, color){
|
||||
this.debugLine = function(closePoint, farPoint, color) {
|
||||
Entities.addEntity({
|
||||
type: "Line",
|
||||
name: "Grab Debug Entity",
|
||||
|
@ -286,7 +307,14 @@ function MyController(hand) {
|
|||
position: closePoint,
|
||||
linePoints: [ZERO_VEC, farPoint],
|
||||
color: color,
|
||||
lifetime: 0.1
|
||||
lifetime: 0.1,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: true,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
grabbable: false
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -301,7 +329,14 @@ function MyController(hand) {
|
|||
position: closePoint,
|
||||
linePoints: [ZERO_VEC, farPoint],
|
||||
color: color,
|
||||
lifetime: LIFETIME
|
||||
lifetime: LIFETIME,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: true,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
grabbable: false
|
||||
}
|
||||
})
|
||||
});
|
||||
} else {
|
||||
var age = Entities.getEntityProperties(this.pointer, "age").age;
|
||||
|
@ -321,16 +356,16 @@ function MyController(hand) {
|
|||
this.pointer = null;
|
||||
};
|
||||
|
||||
this.triggerPress = function (value) {
|
||||
this.triggerPress = function(value) {
|
||||
_this.rawTriggerValue = value;
|
||||
};
|
||||
|
||||
this.bumperPress = function (value) {
|
||||
this.bumperPress = function(value) {
|
||||
_this.rawBumperValue = value;
|
||||
};
|
||||
|
||||
|
||||
this.updateSmoothedTrigger = function () {
|
||||
this.updateSmoothedTrigger = function() {
|
||||
var triggerValue = this.rawTriggerValue;
|
||||
// smooth out trigger value
|
||||
this.triggerValue = (this.triggerValue * TRIGGER_SMOOTH_RATIO) +
|
||||
|
@ -375,11 +410,6 @@ function MyController(hand) {
|
|||
this.search = function() {
|
||||
this.grabbedEntity = null;
|
||||
|
||||
// if this hand is the one that's disabled, we don't want to search for anything at all
|
||||
if (this.hand === disabledHand) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.state == STATE_SEARCHING ? this.triggerSmoothedReleased() : this.bumperReleased()) {
|
||||
this.setState(STATE_RELEASE);
|
||||
return;
|
||||
|
@ -401,7 +431,7 @@ function MyController(hand) {
|
|||
this.lastPickTime = now;
|
||||
}
|
||||
|
||||
for (var index=0; index < pickRays.length; ++index) {
|
||||
for (var index = 0; index < pickRays.length; ++index) {
|
||||
var pickRay = pickRays[index];
|
||||
var directionNormalized = Vec3.normalize(pickRay.direction);
|
||||
var directionBacked = Vec3.multiply(directionNormalized, PICK_BACKOFF_DISTANCE);
|
||||
|
@ -424,17 +454,7 @@ function MyController(hand) {
|
|||
// the ray is intersecting something we can move.
|
||||
var intersectionDistance = Vec3.distance(pickRay.origin, intersection.intersection);
|
||||
|
||||
//this code will disabled the beam for the opposite hand of the one that grabbed it if the entity says so
|
||||
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, intersection.entityID, DEFAULT_GRABBABLE_DATA);
|
||||
if (grabbableData["turnOffOppositeBeam"]) {
|
||||
if (this.hand === RIGHT_HAND) {
|
||||
disabledHand = LEFT_HAND;
|
||||
} else {
|
||||
disabledHand = RIGHT_HAND;
|
||||
}
|
||||
} else {
|
||||
disabledHand = 'none';
|
||||
}
|
||||
|
||||
if (intersection.properties.name == "Grab Debug Entity") {
|
||||
continue;
|
||||
|
@ -466,10 +486,9 @@ function MyController(hand) {
|
|||
}
|
||||
return;
|
||||
}
|
||||
} else if (! entityIsGrabbedByOther(intersection.entityID)) {
|
||||
} else if (!entityIsGrabbedByOther(intersection.entityID)) {
|
||||
// don't allow two people to distance grab the same object
|
||||
if (intersection.properties.collisionsWillMove
|
||||
&& !intersection.properties.locked) {
|
||||
if (intersection.properties.collisionsWillMove && !intersection.properties.locked) {
|
||||
// the hand is far from the intersected object. go into distance-holding mode
|
||||
this.grabbedEntity = intersection.entityID;
|
||||
if (typeof grabbableData.spatialKey !== 'undefined' && this.state == STATE_EQUIP_SEARCHING) {
|
||||
|
@ -494,11 +513,26 @@ function MyController(hand) {
|
|||
Entities.addEntity({
|
||||
type: "Sphere",
|
||||
name: "Grab Debug Entity",
|
||||
dimensions: {x: GRAB_RADIUS, y: GRAB_RADIUS, z: GRAB_RADIUS},
|
||||
dimensions: {
|
||||
x: GRAB_RADIUS,
|
||||
y: GRAB_RADIUS,
|
||||
z: GRAB_RADIUS
|
||||
},
|
||||
visible: true,
|
||||
position: handPosition,
|
||||
color: { red: 0, green: 255, blue: 0},
|
||||
lifetime: 0.1
|
||||
color: {
|
||||
red: 0,
|
||||
green: 255,
|
||||
blue: 0
|
||||
},
|
||||
lifetime: 0.1,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: true,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
grabbable: false
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -512,8 +546,7 @@ function MyController(hand) {
|
|||
if (typeof grabbableDataForCandidate.grabbable !== 'undefined' && !grabbableDataForCandidate.grabbable) {
|
||||
continue;
|
||||
}
|
||||
var propsForCandidate =
|
||||
Entities.getEntityProperties(nearbyEntities[i], GRABBABLE_PROPERTIES);
|
||||
var propsForCandidate = Entities.getEntityProperties(nearbyEntities[i], GRABBABLE_PROPERTIES);
|
||||
|
||||
if (propsForCandidate.type == 'Unknown') {
|
||||
continue;
|
||||
|
@ -604,6 +637,7 @@ function MyController(hand) {
|
|||
} else {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
|
||||
}
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startDistantGrab");
|
||||
}
|
||||
|
||||
|
@ -639,7 +673,7 @@ function MyController(hand) {
|
|||
|
||||
// the action was set up on a previous call. update the targets.
|
||||
var radius = Math.max(Vec3.distance(this.currentObjectPosition, handControllerPosition) *
|
||||
DISTANCE_HOLDING_RADIUS_FACTOR, DISTANCE_HOLDING_RADIUS_FACTOR);
|
||||
DISTANCE_HOLDING_RADIUS_FACTOR, DISTANCE_HOLDING_RADIUS_FACTOR);
|
||||
// how far did avatar move this timestep?
|
||||
var currentPosition = MyAvatar.position;
|
||||
var avatarDeltaPosition = Vec3.subtract(currentPosition, this.currentAvatarPosition);
|
||||
|
@ -688,9 +722,9 @@ function MyController(hand) {
|
|||
|
||||
// this doubles hand rotation
|
||||
var handChange = Quat.multiply(Quat.slerp(this.handPreviousRotation,
|
||||
handRotation,
|
||||
DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR),
|
||||
Quat.inverse(this.handPreviousRotation));
|
||||
handRotation,
|
||||
DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR),
|
||||
Quat.inverse(this.handPreviousRotation));
|
||||
this.handPreviousRotation = handRotation;
|
||||
this.currentObjectRotation = Quat.multiply(handChange, this.currentObjectRotation);
|
||||
|
||||
|
@ -708,15 +742,8 @@ function MyController(hand) {
|
|||
|
||||
this.nearGrabbing = function() {
|
||||
var now = Date.now();
|
||||
|
||||
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, this.grabbedEntity, DEFAULT_GRABBABLE_DATA);
|
||||
|
||||
var turnOffOtherHand = grabbableData["turnOffOtherHand"];
|
||||
if (turnOffOtherHand) {
|
||||
//don't activate the second hand grab because the script is handling the second hand logic
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.state == STATE_NEAR_GRABBING && this.triggerSmoothedReleased()) {
|
||||
this.setState(STATE_RELEASE);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "releaseGrab");
|
||||
|
@ -773,6 +800,8 @@ function MyController(hand) {
|
|||
this.setState(STATE_CONTINUE_NEAR_GRABBING);
|
||||
} else {
|
||||
// equipping
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startEquip", [JSON.stringify(this.hand)]);
|
||||
this.startHandGrasp();
|
||||
this.setState(STATE_CONTINUE_EQUIP_BD);
|
||||
}
|
||||
|
||||
|
@ -781,6 +810,9 @@ function MyController(hand) {
|
|||
} else {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
|
||||
}
|
||||
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
|
||||
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startNearGrab");
|
||||
|
||||
}
|
||||
|
@ -807,6 +839,7 @@ function MyController(hand) {
|
|||
}
|
||||
if (this.state == STATE_CONTINUE_NEAR_GRABBING && this.bumperSqueezed()) {
|
||||
this.setState(STATE_CONTINUE_EQUIP_BD);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startEquip", [JSON.stringify(this.hand)]);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -827,6 +860,10 @@ function MyController(hand) {
|
|||
this.currentObjectTime = now;
|
||||
Entities.callEntityMethod(this.grabbedEntity, "continueNearGrab");
|
||||
|
||||
if (this.state === STATE_CONTINUE_EQUIP_BD) {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "continueEquip");
|
||||
}
|
||||
|
||||
if (this.actionTimeout - now < ACTION_TTL_REFRESH * MSEC_PER_SEC) {
|
||||
// if less than a 5 seconds left, refresh the actions ttl
|
||||
Entities.updateAction(this.grabbedEntity, this.actionID, {
|
||||
|
@ -846,6 +883,8 @@ function MyController(hand) {
|
|||
if (this.bumperReleased()) {
|
||||
this.setState(STATE_RELEASE);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "releaseGrab");
|
||||
Entities.callEntityMethod(this.grabbedEntity, "unequip");
|
||||
this.endHandGrasp();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -856,8 +895,17 @@ function MyController(hand) {
|
|||
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, this.grabbedEntity, DEFAULT_GRABBABLE_DATA);
|
||||
|
||||
// use a spring to pull the object to where it will be when equipped
|
||||
var relativeRotation = { x: 0.0, y: 0.0, z: 0.0, w: 1.0 };
|
||||
var relativePosition = { x: 0.0, y: 0.0, z: 0.0 };
|
||||
var relativeRotation = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0,
|
||||
w: 1.0
|
||||
};
|
||||
var relativePosition = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0
|
||||
};
|
||||
if (grabbableData.spatialKey.relativePosition) {
|
||||
relativePosition = grabbableData.spatialKey.relativePosition;
|
||||
}
|
||||
|
@ -913,6 +961,9 @@ function MyController(hand) {
|
|||
} else {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
|
||||
}
|
||||
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
|
||||
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startNearTrigger");
|
||||
this.setState(STATE_CONTINUE_NEAR_TRIGGER);
|
||||
};
|
||||
|
@ -929,6 +980,7 @@ function MyController(hand) {
|
|||
} else {
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setLeftHand");
|
||||
}
|
||||
Entities.callEntityMethod(this.grabbedEntity, "setHand", [this.hand]);
|
||||
Entities.callEntityMethod(this.grabbedEntity, "startFarTrigger");
|
||||
this.setState(STATE_CONTINUE_FAR_TRIGGER);
|
||||
};
|
||||
|
@ -1040,10 +1092,6 @@ function MyController(hand) {
|
|||
|
||||
this.release = function() {
|
||||
|
||||
if(this.hand !== disabledHand){
|
||||
//release the disabled hand when we let go with the main one
|
||||
disabledHand = 'none';
|
||||
}
|
||||
this.lineOff();
|
||||
|
||||
if (this.grabbedEntity !== null) {
|
||||
|
@ -1061,6 +1109,7 @@ function MyController(hand) {
|
|||
|
||||
this.cleanup = function() {
|
||||
this.release();
|
||||
this.endHandGrasp();
|
||||
};
|
||||
|
||||
this.activateEntity = function(entityID, grabbedProperties) {
|
||||
|
@ -1075,9 +1124,15 @@ function MyController(hand) {
|
|||
data["gravity"] = grabbedProperties.gravity;
|
||||
data["ignoreForCollisions"] = grabbedProperties.ignoreForCollisions;
|
||||
data["collisionsWillMove"] = grabbedProperties.collisionsWillMove;
|
||||
var whileHeldProperties = {gravity: {x:0, y:0, z:0}};
|
||||
var whileHeldProperties = {
|
||||
gravity: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
}
|
||||
};
|
||||
if (invertSolidWhileHeld) {
|
||||
whileHeldProperties["ignoreForCollisions"] = ! grabbedProperties.ignoreForCollisions;
|
||||
whileHeldProperties["ignoreForCollisions"] = !grabbedProperties.ignoreForCollisions;
|
||||
}
|
||||
Entities.editEntity(entityID, whileHeldProperties);
|
||||
}
|
||||
|
@ -1103,6 +1158,44 @@ function MyController(hand) {
|
|||
}
|
||||
setEntityCustomData(GRAB_USER_DATA_KEY, entityID, data);
|
||||
};
|
||||
|
||||
|
||||
//this is our handler, where we do the actual work of changing animation settings
|
||||
this.graspHand = function(animationProperties) {
|
||||
var result = {};
|
||||
//full alpha on overlay for this hand
|
||||
//set grab to true
|
||||
//set idle to false
|
||||
//full alpha on the blend btw open and grab
|
||||
if (_this.hand === RIGHT_HAND) {
|
||||
result['rightHandOverlayAlpha'] = 1.0;
|
||||
result['isRightHandGrab'] = true;
|
||||
result['isRightHandIdle'] = false;
|
||||
result['rightHandGrabBlend'] = 1.0;
|
||||
} else if (_this.hand === LEFT_HAND) {
|
||||
result['leftHandOverlayAlpha'] = 1.0;
|
||||
result['isLeftHandGrab'] = true;
|
||||
result['isLeftHandIdle'] = false;
|
||||
result['leftHandGrabBlend'] = 1.0;
|
||||
}
|
||||
//return an object with our updated settings
|
||||
return result;
|
||||
}
|
||||
|
||||
this.graspHandler = null
|
||||
this.startHandGrasp = function() {
|
||||
if (this.hand === RIGHT_HAND) {
|
||||
this.graspHandler = MyAvatar.addAnimationStateHandler(this.graspHand, ['isRightHandGrab']);
|
||||
} else if (this.hand === LEFT_HAND) {
|
||||
this.graspHandler = MyAvatar.addAnimationStateHandler(this.graspHand, ['isLeftHandGrab']);
|
||||
}
|
||||
}
|
||||
|
||||
this.endHandGrasp = function() {
|
||||
// Tell the animation system we don't need any more callbacks.
|
||||
MyAvatar.removeAnimationStateHandler(this.graspHandler);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var rightController = new MyController(RIGHT_HAND);
|
||||
|
@ -1119,12 +1212,35 @@ mapping.from([Controller.Standard.LB]).peek().to(leftController.bumperPress);
|
|||
|
||||
Controller.enableMapping(MAPPING_NAME);
|
||||
|
||||
var handToDisable = 'none';
|
||||
|
||||
function update() {
|
||||
rightController.update();
|
||||
leftController.update();
|
||||
if (handToDisable !== LEFT_HAND) {
|
||||
leftController.update();
|
||||
}
|
||||
if (handToDisable !== RIGHT_HAND) {
|
||||
rightController.update();
|
||||
}
|
||||
}
|
||||
|
||||
Messages.subscribe('Hifi-Hand-Disabler');
|
||||
|
||||
handleHandDisablerMessages = function(channel, message, sender) {
|
||||
|
||||
if (sender === MyAvatar.sessionUUID) {
|
||||
handToDisable = message;
|
||||
if (message === 'left') {
|
||||
handToDisable = LEFT_HAND;
|
||||
}
|
||||
if (message === 'right') {
|
||||
handToDisable = RIGHT_HAND;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Messages.messageReceived.connect(handleHandDisablerMessages);
|
||||
|
||||
function cleanup() {
|
||||
rightController.cleanup();
|
||||
leftController.cleanup();
|
||||
|
@ -1132,4 +1248,4 @@ function cleanup() {
|
|||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
Script.update.connect(update);
|
||||
Script.update.connect(update);
|
|
@ -25,8 +25,6 @@ var LAST_FRAME = 15.0; // What is the number of the last frame we want to us
|
|||
var SMOOTH_FACTOR = 0.75;
|
||||
var MAX_FRAMES = 30.0;
|
||||
|
||||
var LEFT_HAND_CLICK = Controller.findAction("LEFT_HAND_CLICK");
|
||||
var RIGHT_HAND_CLICK = Controller.findAction("RIGHT_HAND_CLICK");
|
||||
|
||||
var CONTROLLER_DEAD_SPOT = 0.25;
|
||||
|
||||
|
@ -45,8 +43,8 @@ function normalizeControllerValue(val) {
|
|||
}
|
||||
|
||||
Script.update.connect(function(deltaTime) {
|
||||
var leftTrigger = normalizeControllerValue(Controller.getActionValue(LEFT_HAND_CLICK));
|
||||
var rightTrigger = normalizeControllerValue(Controller.getActionValue(RIGHT_HAND_CLICK));
|
||||
var leftTrigger = normalizeControllerValue(Controller.getValue(Controller.Standard.LT));
|
||||
var rightTrigger = normalizeControllerValue(Controller.getValue(Controller.Standard.RT));
|
||||
|
||||
// Average last few trigger values together for a bit of smoothing
|
||||
var smoothLeftTrigger = leftTrigger * (1.0 - SMOOTH_FACTOR) + lastLeftTrigger * SMOOTH_FACTOR;
|
||||
|
|
|
@ -19,4 +19,3 @@ Script.load("controllers/handControllerGrab.js");
|
|||
Script.load("grab.js");
|
||||
Script.load("directory.js");
|
||||
Script.load("dialTone.js");
|
||||
Script.load("libraries/omniTool.js");
|
||||
|
|
56
examples/entityScripts/messagesReceiverEntityExample.js
Normal file
56
examples/entityScripts/messagesReceiverEntityExample.js
Normal file
|
@ -0,0 +1,56 @@
|
|||
//
|
||||
// messagesReceiverEntityExample.js
|
||||
// examples/entityScripts
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 11/18/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// This is an example of an entity script which when assigned to an entity, will detect when the entity is being grabbed by the hydraGrab script
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
(function () {
|
||||
|
||||
var _this;
|
||||
|
||||
var messageReceived = function (channel, message, senderID) {
|
||||
print("message received on channel:" + channel + ", message:" + message + ", senderID:" + senderID);
|
||||
};
|
||||
|
||||
// this is the "constructor" for the entity as a JS object we don't do much here, but we do want to remember
|
||||
// our this object, so we can access it in cases where we're called without a this (like in the case of various global signals)
|
||||
MessagesReceiver = function () {
|
||||
_this = this;
|
||||
};
|
||||
|
||||
MessagesReceiver.prototype = {
|
||||
|
||||
// preload() will be called when the entity has become visible (or known) to the interface
|
||||
// it gives us a chance to set our local JavaScript object up. In this case it means:
|
||||
// * remembering our entityID, so we can access it in cases where we're called without an entityID
|
||||
// * unsubscribing from messages
|
||||
// * connectingf to the messageReceived signal
|
||||
preload: function (entityID) {
|
||||
this.entityID = entityID;
|
||||
|
||||
print("---- subscribing ----");
|
||||
Messages.subscribe("example");
|
||||
Messages.messageReceived.connect(messageReceived);
|
||||
},
|
||||
|
||||
// unload() will be called when the entity has become no longer known to the interface
|
||||
// it gives us a chance to clean up our local JavaScript object. In this case it means:
|
||||
// * unsubscribing from messages
|
||||
// * disconnecting from the messageReceived signal
|
||||
unload: function (entityID) {
|
||||
print("---- unsubscribing ----");
|
||||
Messages.unsubscribe("example");
|
||||
Messages.messageReceived.disconnect(messageReceived);
|
||||
},
|
||||
};
|
||||
|
||||
// entity scripts always need to return a newly constructed object of our type
|
||||
return new MessagesReceiver();
|
||||
})
|
|
@ -16,24 +16,39 @@
|
|||
|
||||
var _this;
|
||||
var isAvatarRecording = false;
|
||||
var channel = "groupRecordingChannel";
|
||||
var startMessage = "RECONDING STARTED";
|
||||
var stopMessage = "RECONDING ENDED";
|
||||
var MASTER_TO_CLIENTS_CHANNEL = "startStopChannel";
|
||||
var CLIENTS_TO_MASTER_CHANNEL = "resultsChannel";
|
||||
var START_MESSAGE = "recordingStarted";
|
||||
var STOP_MESSAGE = "recordingEnded";
|
||||
var PARTICIPATING_MESSAGE = "participatingToRecording";
|
||||
var RECORDING_ICON_URL = "http://cdn.highfidelity.com/alan/production/icons/ICO_rec-active.svg";
|
||||
var NOT_RECORDING_ICON_URL = "http://cdn.highfidelity.com/alan/production/icons/ICO_rec-inactive.svg";
|
||||
var ICON_WIDTH = 60;
|
||||
var ICON_HEIGHT = 60;
|
||||
var overlay = null;
|
||||
|
||||
|
||||
function recordingEntity() {
|
||||
_this = this;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
function receivingMessage(channel, message, senderID) {
|
||||
print("message received on channel:" + channel + ", message:" + message + ", senderID:" + senderID);
|
||||
if(message === startMessage) {
|
||||
_this.startRecording();
|
||||
} else if(message === stopMessage) {
|
||||
_this.stopRecording();
|
||||
if (channel === MASTER_TO_CLIENTS_CHANNEL) {
|
||||
print("CLIENT received message:" + message);
|
||||
if (message === START_MESSAGE) {
|
||||
_this.startRecording();
|
||||
} else if (message === STOP_MESSAGE) {
|
||||
_this.stopRecording();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function getClipUrl(url) {
|
||||
Messages.sendMessage(CLIENTS_TO_MASTER_CHANNEL, url); //send back the url to the master
|
||||
print("clip uploaded and url sent to master");
|
||||
};
|
||||
|
||||
recordingEntity.prototype = {
|
||||
|
||||
preload: function (entityID) {
|
||||
|
@ -50,41 +65,54 @@
|
|||
|
||||
enterEntity: function (entityID) {
|
||||
print("entering in the recording area");
|
||||
Messages.subscribe(channel);
|
||||
|
||||
Messages.subscribe(MASTER_TO_CLIENTS_CHANNEL);
|
||||
overlay = Overlays.addOverlay("image", {
|
||||
imageURL: NOT_RECORDING_ICON_URL,
|
||||
width: ICON_HEIGHT,
|
||||
height: ICON_WIDTH,
|
||||
x: 275,
|
||||
y: 0,
|
||||
visible: true
|
||||
});
|
||||
},
|
||||
|
||||
leaveEntity: function (entityID) {
|
||||
print("leaving the recording area");
|
||||
_this.stopRecording();
|
||||
Messages.unsubscribe(channel);
|
||||
Messages.unsubscribe(MASTER_TO_CLIENTS_CHANNEL);
|
||||
Overlays.deleteOverlay(overlay);
|
||||
overlay = null;
|
||||
},
|
||||
|
||||
startRecording: function (entityID) {
|
||||
startRecording: function () {
|
||||
if (!isAvatarRecording) {
|
||||
print("RECORDING STARTED");
|
||||
Messages.sendMessage(CLIENTS_TO_MASTER_CHANNEL, PARTICIPATING_MESSAGE); //tell to master that I'm participating
|
||||
Recording.startRecording();
|
||||
isAvatarRecording = true;
|
||||
Overlays.editOverlay(overlay, {imageURL: RECORDING_ICON_URL});
|
||||
}
|
||||
},
|
||||
|
||||
stopRecording: function (entityID) {
|
||||
stopRecording: function () {
|
||||
if (isAvatarRecording) {
|
||||
print("RECORDING ENDED");
|
||||
Recording.stopRecording();
|
||||
isAvatarRecording = false;
|
||||
recordingFile = Window.save("Save recording to file", "./groupRecording", "Recordings (*.hfr)");
|
||||
if (!(recordingFile === "null" || recordingFile === null || recordingFile === "")) {
|
||||
Recording.saveRecording(recordingFile);
|
||||
}
|
||||
Recording.saveRecordingToAsset(getClipUrl); //save the clip to the asset and link a callback to get its url
|
||||
Overlays.editOverlay(overlay, {imageURL: NOT_RECORDING_ICON_URL});
|
||||
}
|
||||
},
|
||||
|
||||
unload: function (entityID) {
|
||||
print("RECORDING ENTITY UNLOAD");
|
||||
_this.stopRecording();
|
||||
Messages.unsubscribe(channel);
|
||||
Messages.unsubscribe(MASTER_TO_CLIENTS_CHANNEL);
|
||||
Messages.messageReceived.disconnect(receivingMessage);
|
||||
if (overlay !== null) {
|
||||
Overlays.deleteOverlay(overlay);
|
||||
overlay = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,12 +22,27 @@ var TOOL_ICON_URL = HIFI_PUBLIC_BUCKET + "images/tools/";
|
|||
var ALPHA_ON = 1.0;
|
||||
var ALPHA_OFF = 0.7;
|
||||
var COLOR_TOOL_BAR = { red: 0, green: 0, blue: 0 };
|
||||
var MASTER_TO_CLIENTS_CHANNEL = "startStopChannel";
|
||||
var CLIENTS_TO_MASTER_CHANNEL = "resultsChannel";
|
||||
var START_MESSAGE = "recordingStarted";
|
||||
var STOP_MESSAGE = "recordingEnded";
|
||||
var PARTICIPATING_MESSAGE = "participatingToRecording";
|
||||
var TIMEOUT = 20;
|
||||
|
||||
|
||||
var toolBar = null;
|
||||
var recordIcon;
|
||||
var isRecording = false;
|
||||
var channel = "groupRecordingChannel";
|
||||
Messages.subscribe(channel);
|
||||
var performanceJSON = { "avatarClips" : [] };
|
||||
var responsesExpected = 0;
|
||||
var readyToPrintInfo = false;
|
||||
var performanceFileURL = null;
|
||||
var waitingForPerformanceFile = true;
|
||||
var totalWaitingTime = 0;
|
||||
var extension = "txt";
|
||||
|
||||
|
||||
Messages.subscribe(CLIENTS_TO_MASTER_CHANNEL);
|
||||
setupToolBar();
|
||||
|
||||
function setupToolBar() {
|
||||
|
@ -50,28 +65,91 @@ function setupToolBar() {
|
|||
visible: true,
|
||||
}, true, isRecording);
|
||||
}
|
||||
toolBar.selectTool(recordIcon, !isRecording);
|
||||
|
||||
function mousePressEvent(event) {
|
||||
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
|
||||
if (recordIcon === toolBar.clicked(clickedOverlay, false)) {
|
||||
if (!isRecording) {
|
||||
print("I'm the master. I want to start recording");
|
||||
var message = "RECONDING STARTED";
|
||||
Messages.sendMessage(channel, message);
|
||||
Messages.sendMessage(MASTER_TO_CLIENTS_CHANNEL, START_MESSAGE);
|
||||
isRecording = true;
|
||||
waitingForPerformanceFile = true;
|
||||
} else {
|
||||
print("I want to stop recording");
|
||||
var message = "RECONDING ENDED";
|
||||
Messages.sendMessage(channel, message);
|
||||
Script.update.connect(update);
|
||||
Messages.sendMessage(MASTER_TO_CLIENTS_CHANNEL, STOP_MESSAGE);
|
||||
isRecording = false;
|
||||
}
|
||||
toolBar.selectTool(recordIcon, !isRecording);
|
||||
}
|
||||
}
|
||||
|
||||
function masterReceivingMessage(channel, message, senderID) {
|
||||
if (channel === CLIENTS_TO_MASTER_CHANNEL) {
|
||||
print("MASTER received message:" + message );
|
||||
if (message === PARTICIPATING_MESSAGE) {
|
||||
//increment the counter of all the participants
|
||||
responsesExpected++;
|
||||
} else if (waitingForPerformanceFile) {
|
||||
//I get an atp url from one participant
|
||||
performanceJSON.avatarClips[performanceJSON.avatarClips.length] = message;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function update(deltaTime) {
|
||||
if (waitingForPerformanceFile) {
|
||||
totalWaitingTime += deltaTime;
|
||||
if (totalWaitingTime > TIMEOUT || performanceJSON.avatarClips.length === responsesExpected) {
|
||||
if (performanceJSON.avatarClips.length !== 0) {
|
||||
print("UPLOADING PERFORMANCE FILE");
|
||||
//I can upload the performance file on the asset
|
||||
Assets.uploadData(JSON.stringify(performanceJSON), extension, uploadFinished);
|
||||
} else {
|
||||
print("PERFORMANCE FILE EMPTY");
|
||||
}
|
||||
//clean things after upload performance file to asset
|
||||
waitingForPerformanceFile = false;
|
||||
totalWaitingTime = 0;
|
||||
Script.update.disconnect(update);
|
||||
}
|
||||
} else if (readyToPrintInfo == true){
|
||||
Window.prompt("Performance file and clips: ", getUtilityString());
|
||||
responsesExpected = 0;
|
||||
performanceJSON = { "avatarClips" : [] };
|
||||
Script.update.disconnect(update);
|
||||
}
|
||||
}
|
||||
|
||||
function getUtilityString() {
|
||||
var resultString = "JSON:\n" + performanceFileURL + "\n" + responsesExpected + " avatar clips:\n";
|
||||
var avatarClips = performanceJSON.avatarClips;
|
||||
avatarClips.forEach(function(param) {
|
||||
resultString += param + "\n";
|
||||
});
|
||||
return resultString;
|
||||
}
|
||||
|
||||
function uploadFinished(url){
|
||||
//need to print somehow the url here this way the master can copy the url
|
||||
print("some info:");
|
||||
performanceFileURL = url;
|
||||
print("PERFORMANCE FILE URL: " + performanceFileURL);
|
||||
print("number of clips obtained:" + responsesExpected);
|
||||
var avatarClips = performanceJSON.avatarClips;
|
||||
avatarClips.forEach(function(param) {
|
||||
print("clip url obtained: " + param);
|
||||
});
|
||||
readyToPrintInfo = true;
|
||||
Script.update.connect(update);
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
toolBar.cleanup();
|
||||
Messages.unsubscribe(channel);
|
||||
Messages.unsubscribe(CLIENTS_TO_MASTER_CHANNEL);
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Messages.messageReceived.connect(masterReceivingMessage);
|
|
@ -1,90 +0,0 @@
|
|||
//
|
||||
// synchronizerEntityScript.js
|
||||
// examples/entityScripts
|
||||
//
|
||||
// Created by Alessandro Signa on 11/12/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
|
||||
// This script shows how to create a synchronized event between avatars trhough an entity.
|
||||
// It works using the entity's userData: the master change its value and every client checks it every frame
|
||||
// This entity prints a message when the event starts and when it ends.
|
||||
// The client running synchronizerMaster.js is the event master and it decides when the event starts/ends by pressing a button.
|
||||
// All the avatars in the area when the master presses the button will receive a message.
|
||||
//
|
||||
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
|
||||
|
||||
|
||||
(function() {
|
||||
var insideArea = false;
|
||||
var isJoiningTheEvent = false;
|
||||
var _this;
|
||||
|
||||
|
||||
|
||||
function ParamsEntity() {
|
||||
_this = this;
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
ParamsEntity.prototype = {
|
||||
update: function(){
|
||||
var userData = JSON.parse(Entities.getEntityProperties(_this.entityID, ["userData"]).userData);
|
||||
var valueToCheck = userData.myKey.valueToCheck;
|
||||
if(valueToCheck && !isJoiningTheEvent){
|
||||
_this.sendMessage();
|
||||
}else if((!valueToCheck && isJoiningTheEvent) || (isJoiningTheEvent && !insideArea)){
|
||||
_this.stopMessage();
|
||||
}
|
||||
},
|
||||
preload: function(entityID) {
|
||||
print('entity loaded')
|
||||
this.entityID = entityID;
|
||||
Script.update.connect(_this.update);
|
||||
},
|
||||
enterEntity: function(entityID) {
|
||||
print("enterEntity("+entityID+")");
|
||||
var userData = JSON.parse(Entities.getEntityProperties(_this.entityID, ["userData"]).userData);
|
||||
var valueToCheck = userData.myKey.valueToCheck;
|
||||
if(!valueToCheck){
|
||||
//i'm in the area in time (before the event starts)
|
||||
insideArea = true;
|
||||
}
|
||||
change(entityID);
|
||||
},
|
||||
leaveEntity: function(entityID) {
|
||||
print("leaveEntity("+entityID+")");
|
||||
Entities.editEntity(entityID, { color: { red: 255, green: 190, blue: 20} });
|
||||
insideArea = false;
|
||||
},
|
||||
|
||||
sendMessage: function(myID){
|
||||
if(insideArea && !isJoiningTheEvent){
|
||||
print("The event started");
|
||||
isJoiningTheEvent = true;
|
||||
}
|
||||
},
|
||||
|
||||
stopMessage: function(myID){
|
||||
if(isJoiningTheEvent){
|
||||
print("The event ended");
|
||||
isJoiningTheEvent = false;
|
||||
}
|
||||
},
|
||||
clean: function(entityID) {
|
||||
Script.update.disconnect(_this.update);
|
||||
}
|
||||
}
|
||||
|
||||
function change(entityID) {
|
||||
Entities.editEntity(entityID, { color: { red: 255, green: 100, blue: 220} });
|
||||
}
|
||||
|
||||
|
||||
return new ParamsEntity();
|
||||
});
|
|
@ -1,117 +0,0 @@
|
|||
//
|
||||
// synchronizerMaster.js
|
||||
// examples/entityScripts
|
||||
//
|
||||
// Created by Alessandro Signa on 11/12/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Run this script to spawn a box (synchronizer) and drive the start/end of the event for anyone who is inside the box
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
var PARAMS_SCRIPT_URL = Script.resolvePath('synchronizerEntityScript.js');
|
||||
|
||||
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
Script.include("../libraries/toolBars.js");
|
||||
Script.include("../libraries/utils.js");
|
||||
|
||||
|
||||
|
||||
var rotation = Quat.safeEulerAngles(Camera.getOrientation());
|
||||
rotation = Quat.fromPitchYawRollDegrees(0, rotation.y, 0);
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(1, Quat.getFront(rotation)));
|
||||
|
||||
var TOOL_ICON_URL = HIFI_PUBLIC_BUCKET + "images/tools/";
|
||||
var ALPHA_ON = 1.0;
|
||||
var ALPHA_OFF = 0.7;
|
||||
var COLOR_TOOL_BAR = { red: 0, green: 0, blue: 0 };
|
||||
|
||||
var toolBar = null;
|
||||
var recordIcon;
|
||||
|
||||
|
||||
|
||||
var isHappening = false;
|
||||
|
||||
var testEntity = Entities.addEntity({
|
||||
name: 'paramsTestEntity',
|
||||
dimensions: {
|
||||
x: 2,
|
||||
y: 1,
|
||||
z: 2
|
||||
},
|
||||
type: 'Box',
|
||||
position: center,
|
||||
color: {
|
||||
red: 255,
|
||||
green: 255,
|
||||
blue: 255
|
||||
},
|
||||
visible: true,
|
||||
ignoreForCollisions: true,
|
||||
script: PARAMS_SCRIPT_URL,
|
||||
|
||||
userData: JSON.stringify({
|
||||
myKey: {
|
||||
valueToCheck: false
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
setupToolBar();
|
||||
|
||||
function setupToolBar() {
|
||||
if (toolBar != null) {
|
||||
print("Multiple calls to setupToolBar()");
|
||||
return;
|
||||
}
|
||||
Tool.IMAGE_HEIGHT /= 2;
|
||||
Tool.IMAGE_WIDTH /= 2;
|
||||
|
||||
toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL); //put the button in the up-left corner
|
||||
|
||||
toolBar.setBack(COLOR_TOOL_BAR, ALPHA_OFF);
|
||||
|
||||
recordIcon = toolBar.addTool({
|
||||
imageURL: TOOL_ICON_URL + "recording-record.svg",
|
||||
subImage: { x: 0, y: 0, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
x: 0, y: 0,
|
||||
width: Tool.IMAGE_WIDTH,
|
||||
height: Tool.IMAGE_HEIGHT,
|
||||
alpha: MyAvatar.isPlaying() ? ALPHA_OFF : ALPHA_ON,
|
||||
visible: true
|
||||
}, true, isHappening);
|
||||
|
||||
}
|
||||
|
||||
function mousePressEvent(event) {
|
||||
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
|
||||
if (recordIcon === toolBar.clicked(clickedOverlay, false)) {
|
||||
if (!isHappening) {
|
||||
print("I'm the event master. I want the event starts");
|
||||
isHappening = true;
|
||||
setEntityCustomData("myKey", testEntity, {valueToCheck: true});
|
||||
|
||||
} else {
|
||||
print("I want the event stops");
|
||||
isHappening = false;
|
||||
setEntityCustomData("myKey", testEntity, {valueToCheck: false});
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function cleanup() {
|
||||
toolBar.cleanup();
|
||||
Entities.callEntityMethod(testEntity, 'clean'); //have to call this before deleting to avoid the JSON warnings
|
||||
Entities.deleteEntity(testEntity);
|
||||
}
|
||||
|
||||
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
11
examples/example/assetsExample.js
Normal file
11
examples/example/assetsExample.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
var data = "this is some data";
|
||||
var extension = "txt";
|
||||
var uploadedFile;
|
||||
|
||||
Assets.uploadData(data, extension, function (url) {
|
||||
print("data uploaded to:" + url);
|
||||
uploadedFile = url;
|
||||
Assets.downloadData(url, function (data) {
|
||||
print("data downloaded from:" + url + " the data is:" + data);
|
||||
});
|
||||
});
|
68
examples/example/avatarcontrol/graspHands.js
Normal file
68
examples/example/avatarcontrol/graspHands.js
Normal file
|
@ -0,0 +1,68 @@
|
|||
// graspHands.js
|
||||
//
|
||||
// Created by James B. Pollack @imgntn -- 11/19/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Shows how to use the animation API to grasp an Avatar's hands.
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
//choose a hand. set it programatically if you'd like
|
||||
var handToGrasp = 'LEFT_HAND';
|
||||
|
||||
//this is our handler, where we do the actual work of changing animation settings
|
||||
function graspHand(animationProperties) {
|
||||
var result = {};
|
||||
//full alpha on overlay for this hand
|
||||
//set grab to true
|
||||
//set idle to false
|
||||
//full alpha on the blend btw open and grab
|
||||
if (handToGrasp === 'RIGHT_HAND') {
|
||||
result['rightHandOverlayAlpha'] = 1.0;
|
||||
result['isRightHandGrab'] = true;
|
||||
result['isRightHandIdle'] = false;
|
||||
result['rightHandGrabBlend'] = 1.0;
|
||||
} else if (handToGrasp === 'LEFT_HAND') {
|
||||
result['leftHandOverlayAlpha'] = 1.0;
|
||||
result['isLeftHandGrab'] = true;
|
||||
result['isLeftHandIdle'] = false;
|
||||
result['leftHandGrabBlend'] = 1.0;
|
||||
}
|
||||
//return an object with our updated settings
|
||||
return result;
|
||||
}
|
||||
|
||||
//keep a reference to this so we can clear it
|
||||
var handler;
|
||||
|
||||
//register our handler with the animation system
|
||||
function startHandGrasp() {
|
||||
if (handToGrasp === 'RIGHT_HAND') {
|
||||
handler = MyAvatar.addAnimationStateHandler(graspHand, ['isRightHandGrab']);
|
||||
} else if (handToGrasp === 'LEFT_HAND') {
|
||||
handler = MyAvatar.addAnimationStateHandler(graspHand, ['isLeftHandGrab']);
|
||||
}
|
||||
}
|
||||
|
||||
function endHandGrasp() {
|
||||
// Tell the animation system we don't need any more callbacks.
|
||||
MyAvatar.removeAnimationStateHandler(handler);
|
||||
}
|
||||
|
||||
//make sure to clean this up when the script ends so we don't get stuck.
|
||||
Script.scriptEnding.connect(function() {
|
||||
Script.clearInterval(graspInterval);
|
||||
endHandGrasp();
|
||||
})
|
||||
|
||||
//set an interval and toggle grasping
|
||||
var isGrasping = false;
|
||||
var graspInterval = Script.setInterval(function() {
|
||||
if (isGrasping === false) {
|
||||
startHandGrasp();
|
||||
isGrasping = true;
|
||||
} else {
|
||||
endHandGrasp();
|
||||
isGrasping = false
|
||||
}
|
||||
}, 1000)
|
|
@ -361,8 +361,8 @@ function update() {
|
|||
}
|
||||
|
||||
function updateControllerState() {
|
||||
rightTriggerValue = Controller.getActionValue(rightHandClick);
|
||||
leftTriggerValue = Controller.getActionValue(leftHandClick);
|
||||
rightTriggerValue = Controller.getValue(Controller.Standard.RT);
|
||||
leftTriggerValue =Controller.getValue(Controller.Standard.LT);
|
||||
|
||||
if (rightTriggerValue > TRIGGER_THRESHOLD && !swordHeld) {
|
||||
grabSword("right")
|
||||
|
|
91
examples/kneel.js
Normal file
91
examples/kneel.js
Normal file
|
@ -0,0 +1,91 @@
|
|||
//
|
||||
// kneel.js
|
||||
// examples
|
||||
//
|
||||
// Created by Anthony Thibault on 11/9/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
// Example of how to play an animation on an avatar.
|
||||
//
|
||||
|
||||
var buttonImageUrl = "https://s3.amazonaws.com/hifi-public/images/tools/kneel.svg";
|
||||
var windowDimensions = Controller.getViewportDimensions();
|
||||
|
||||
var buttonWidth = 37;
|
||||
var buttonHeight = 46;
|
||||
var buttonPadding = 10;
|
||||
|
||||
var buttonPositionX = windowDimensions.x - buttonPadding - buttonWidth;
|
||||
var buttonPositionY = (windowDimensions.y - buttonHeight) / 2 - (buttonHeight + buttonPadding);
|
||||
|
||||
var kneelDownImageOverlay = {
|
||||
x: buttonPositionX,
|
||||
y: buttonPositionY,
|
||||
width: buttonWidth,
|
||||
height: buttonHeight,
|
||||
subImage: { x: 0, y: buttonHeight, width: buttonWidth, height: buttonHeight },
|
||||
imageURL: buttonImageUrl,
|
||||
visible: true,
|
||||
alpha: 1.0
|
||||
};
|
||||
|
||||
var standUpImageOverlay = {
|
||||
x: buttonPositionX,
|
||||
y: buttonPositionY,
|
||||
width: buttonWidth,
|
||||
height: buttonHeight,
|
||||
subImage: { x: buttonWidth, y: buttonHeight, width: buttonWidth, height: buttonHeight },
|
||||
imageURL: buttonImageUrl,
|
||||
visible: false,
|
||||
alpha: 1.0
|
||||
};
|
||||
|
||||
var kneelDownButton = Overlays.addOverlay("image", kneelDownImageOverlay);
|
||||
var standUpButton = Overlays.addOverlay("image", standUpImageOverlay);
|
||||
var kneeling = false;
|
||||
|
||||
var KNEEL_ANIM_URL = "https://hifi-public.s3.amazonaws.com/ozan/anim/kneel/kneel.fbx";
|
||||
|
||||
function kneelDown() {
|
||||
kneeling = true;
|
||||
|
||||
var playbackRate = 30; // 30 fps is normal speed.
|
||||
var loopFlag = false;
|
||||
var startFrame = 0;
|
||||
var endFrame = 82;
|
||||
|
||||
// This will completly override all motion from the default animation system
|
||||
// including inverse kinematics for hand and head controllers.
|
||||
MyAvatar.overrideAnimation(KNEEL_ANIM_URL, playbackRate, loopFlag, startFrame, endFrame);
|
||||
|
||||
Overlays.editOverlay(kneelDownButton, { visible: false });
|
||||
Overlays.editOverlay(standUpButton, { visible: true });
|
||||
}
|
||||
|
||||
function standUp() {
|
||||
kneeling = false;
|
||||
|
||||
// this will restore all motion from the default animation system.
|
||||
// inverse kinematics will work again normally.
|
||||
MyAvatar.restoreAnimation();
|
||||
|
||||
Overlays.editOverlay(standUpButton, { visible: false });
|
||||
Overlays.editOverlay(kneelDownButton, { visible: true });
|
||||
}
|
||||
|
||||
Controller.mousePressEvent.connect(function (event) {
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
|
||||
if (clickedOverlay == kneelDownButton) {
|
||||
kneelDown();
|
||||
} else if (clickedOverlay == standUpButton) {
|
||||
standUp();
|
||||
}
|
||||
});
|
||||
|
||||
Script.scriptEnding.connect(function() {
|
||||
Overlays.deleteOverlay(kneelDownButton);
|
||||
Overlays.deleteOverlay(standUpButton);
|
||||
});
|
|
@ -76,7 +76,7 @@ var playerSphere = Entities.addEntity({
|
|||
z: 0
|
||||
},
|
||||
collisionsWillMove: true,
|
||||
linearDamping: 0.2
|
||||
damping: 0.2
|
||||
});
|
||||
|
||||
Script.setInterval(function(){
|
||||
|
|
|
@ -19,10 +19,11 @@ var MAX_LINE_LENGTH = 40; // This must be 2 or greater;
|
|||
var DEFAULT_STROKE_WIDTH = 0.1;
|
||||
var DEFAULT_LIFETIME = 20;
|
||||
var DEFAULT_COLOR = { red: 255, green: 255, blue: 255 };
|
||||
var PolyLine = function(position, color, lifetime) {
|
||||
var PolyLine = function(position, color, lifetime, texture) {
|
||||
this.position = position;
|
||||
this.color = color;
|
||||
this.lifetime = lifetime === undefined ? DEFAULT_LIFETIME : lifetime;
|
||||
this.texture = texture ? texture : "";
|
||||
this.points = [
|
||||
];
|
||||
this.strokeWidths = [
|
||||
|
@ -37,7 +38,8 @@ var PolyLine = function(position, color, lifetime) {
|
|||
strokeWidths: this.strokeWidths,
|
||||
dimensions: LINE_DIMENSIONS,
|
||||
color: color,
|
||||
lifetime: lifetime
|
||||
lifetime: lifetime,
|
||||
textures: this.texture
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -98,26 +100,29 @@ PolyLine.prototype.destroy = function() {
|
|||
|
||||
|
||||
// InfiniteLine
|
||||
InfiniteLine = function(position, color, lifetime) {
|
||||
InfiniteLine = function(position, color, lifetime, textureBegin, textureMiddle) {
|
||||
this.position = position;
|
||||
this.color = color;
|
||||
this.lifetime = lifetime === undefined ? DEFAULT_LIFETIME : lifetime;
|
||||
this.lines = [];
|
||||
this.size = 0;
|
||||
|
||||
this.textureBegin = textureBegin ? textureBegin : "";
|
||||
this.textureMiddle = textureMiddle ? textureMiddle : "";
|
||||
};
|
||||
|
||||
InfiniteLine.prototype.enqueuePoint = function(position, strokeWidth) {
|
||||
var currentLine;
|
||||
|
||||
if (this.lines.length == 0) {
|
||||
currentLine = new PolyLine(position, this.color, this.lifetime);
|
||||
currentLine = new PolyLine(position, this.color, this.lifetime, this.textureBegin);
|
||||
this.lines.push(currentLine);
|
||||
} else {
|
||||
currentLine = this.lines[this.lines.length - 1];
|
||||
}
|
||||
|
||||
if (currentLine.isFull()) {
|
||||
var newLine = new PolyLine(currentLine.getLastPoint(), this.color, this.lifetime);
|
||||
var newLine = new PolyLine(currentLine.getLastPoint(), this.color, this.lifetime, this.textureMiddle);
|
||||
newLine.enqueuePoint(currentLine.getLastPoint(), strokeWidth);
|
||||
this.lines.push(newLine);
|
||||
currentLine = newLine;
|
||||
|
|
|
@ -159,7 +159,8 @@ function MyController(hand, triggerAction) {
|
|||
}
|
||||
|
||||
this.updateControllerState = function() {
|
||||
this.triggerValue = Controller.getActionValue(this.triggerAction);
|
||||
this.triggerValue = Controller.getValue(this.triggerAction);
|
||||
|
||||
if (this.triggerValue > TRIGGER_ON_VALUE && this.prevTriggerValue <= TRIGGER_ON_VALUE) {
|
||||
this.squeeze();
|
||||
} else if (this.triggerValue < TRIGGER_ON_VALUE && this.prevTriggerValue >= TRIGGER_ON_VALUE) {
|
||||
|
@ -256,8 +257,8 @@ function MyController(hand, triggerAction) {
|
|||
}
|
||||
}
|
||||
|
||||
var rightController = new MyController(RIGHT_HAND, Controller.findAction("RIGHT_HAND_CLICK"));
|
||||
var leftController = new MyController(LEFT_HAND, Controller.findAction("LEFT_HAND_CLICK"));
|
||||
var rightController = new MyController(RIGHT_HAND, Controller.Standard.RT);
|
||||
var leftController = new MyController(LEFT_HAND, Controller.Standard.LT);
|
||||
|
||||
Controller.actionEvent.connect(function(action, state) {
|
||||
if (state === 0) {
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
var _this;
|
||||
var RIGHT_HAND = 1;
|
||||
var LEFT_HAND = 0;
|
||||
var MIN_POINT_DISTANCE = 0.01 ;
|
||||
var MIN_POINT_DISTANCE = 0.01;
|
||||
var MAX_POINT_DISTANCE = 0.5;
|
||||
var MAX_POINTS_PER_LINE = 40;
|
||||
var MAX_DISTANCE = 5;
|
||||
|
@ -29,6 +29,11 @@
|
|||
var MIN_STROKE_WIDTH = 0.0005;
|
||||
var MAX_STROKE_WIDTH = 0.03;
|
||||
|
||||
var TRIGGER_CONTROLS = [
|
||||
Controller.Standard.LT,
|
||||
Controller.Standard.RT,
|
||||
];
|
||||
|
||||
Whiteboard = function() {
|
||||
_this = this;
|
||||
};
|
||||
|
@ -51,11 +56,9 @@
|
|||
if (this.hand === RIGHT_HAND) {
|
||||
this.getHandPosition = MyAvatar.getRightPalmPosition;
|
||||
this.getHandRotation = MyAvatar.getRightPalmRotation;
|
||||
this.triggerAction = Controller.findAction("RIGHT_HAND_CLICK");
|
||||
} else if (this.hand === LEFT_HAND) {
|
||||
this.getHandPosition = MyAvatar.getLeftPalmPosition;
|
||||
this.getHandRotation = MyAvatar.getLeftPalmRotation;
|
||||
this.triggerAction = Controller.findAction("LEFT_HAND_CLICK");
|
||||
}
|
||||
Overlays.editOverlay(this.laserPointer, {
|
||||
visible: true
|
||||
|
@ -76,7 +79,7 @@
|
|||
if (this.intersection.intersects) {
|
||||
var distance = Vec3.distance(handPosition, this.intersection.intersection);
|
||||
if (distance < MAX_DISTANCE) {
|
||||
this.triggerValue = Controller.getActionValue(this.triggerAction);
|
||||
this.triggerValue = Controller.getValue(TRIGGER_CONTROLS[this.hand]);
|
||||
this.currentStrokeWidth = map(this.triggerValue, 0, 1, MIN_STROKE_WIDTH, MAX_STROKE_WIDTH);
|
||||
var displayPoint = this.intersection.intersection;
|
||||
displayPoint = Vec3.sum(displayPoint, Vec3.multiply(this.normal, 0.01));
|
||||
|
@ -184,7 +187,7 @@
|
|||
},
|
||||
|
||||
stopFarTrigger: function() {
|
||||
if(this.hand !== this.whichHand) {
|
||||
if (this.hand !== this.whichHand) {
|
||||
return;
|
||||
}
|
||||
this.stopPainting();
|
||||
|
@ -209,7 +212,7 @@
|
|||
entities.forEach(function(entity) {
|
||||
var props = Entities.getEntityProperties(entity, ["name, userData"]);
|
||||
var name = props.name;
|
||||
if(!props.userData) {
|
||||
if (!props.userData) {
|
||||
return;
|
||||
}
|
||||
var whiteboardID = JSON.parse(props.userData).whiteboard;
|
||||
|
|
|
@ -247,4 +247,4 @@ function cleanup() {
|
|||
|
||||
|
||||
// Uncomment this line to delete whiteboard and all associated entity on script close
|
||||
// Script.scriptEnding.connect(cleanup);
|
||||
//Script.scriptEnding.connect(cleanup);
|
||||
|
|
|
@ -75,7 +75,7 @@ function updateJoints(factor){
|
|||
for (var i = 0; i < startPoseAndTransition.length; i++){
|
||||
var scaledTransition = Vec3.multiply(startPoseAndTransition[i].transition, factor);
|
||||
var rotation = Vec3.sum(startPoseAndTransition[i].start, scaledTransition);
|
||||
MyAvatar.setJointData(startPoseAndTransition[i].joint, Quat.fromVec3Degrees( rotation ));
|
||||
MyAvatar.setJointRotation(startPoseAndTransition[i].joint, Quat.fromVec3Degrees( rotation ));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -282,7 +282,8 @@ function update(deltaTime){
|
|||
MyAvatar.position.z != avatarOldPosition.z ||
|
||||
locationChanged) {
|
||||
avatarOldPosition = MyAvatar.position;
|
||||
|
||||
|
||||
/*
|
||||
var SEARCH_RADIUS = 50;
|
||||
var foundModels = Entities.findEntities(MyAvatar.position, SEARCH_RADIUS);
|
||||
// Let's remove indicator that got out of radius
|
||||
|
@ -306,6 +307,7 @@ function update(deltaTime){
|
|||
if (hiddingSeats && passedTime >= animationLenght) {
|
||||
showIndicators(true);
|
||||
}
|
||||
*/
|
||||
}
|
||||
}
|
||||
var oldHost = location.hostname;
|
||||
|
|
42
examples/tests/injectorTest.js
Normal file
42
examples/tests/injectorTest.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
//
|
||||
// injectorTests.js
|
||||
// examples
|
||||
//
|
||||
// Created by Stephen Birarda on 11/16/15.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var soundURL = "http://hifi-public.s3.amazonaws.com/birarda/medium-crowd.wav";
|
||||
var audioOptions = {
|
||||
position: { x: 0.0, y: 0.0, z: 0.0 },
|
||||
volume: 0.5
|
||||
};
|
||||
|
||||
var sound = SoundCache.getSound(soundURL);
|
||||
var injector = null;
|
||||
var restarting = false;
|
||||
|
||||
Script.update.connect(function(){
|
||||
if (sound.downloaded) {
|
||||
if (!injector) {
|
||||
injector = Audio.playSound(sound, audioOptions);
|
||||
} else if (!injector.isPlaying && !restarting) {
|
||||
restarting = true;
|
||||
|
||||
Script.setTimeout(function(){
|
||||
print("Calling restart for a stopped injector from script.");
|
||||
injector.restart();
|
||||
}, 1000);
|
||||
} else if (injector.isPlaying) {
|
||||
restarting = false;
|
||||
|
||||
if (Math.random() < 0.0001) {
|
||||
print("Calling restart for a running injector from script.");
|
||||
injector.restart();
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
40
examples/tests/lodTest.js
Normal file
40
examples/tests/lodTest.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
//
|
||||
// lodTest.js
|
||||
// examples/tests
|
||||
//
|
||||
// Created by Ryan Huffman on 11/19/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var MIN_DIM = 0.001;
|
||||
var MAX_DIM = 2.0;
|
||||
var NUM_SPHERES = 20;
|
||||
|
||||
// Rough estimate of the width the spheres will span, not taking into account MIN_DIM
|
||||
var WIDTH = MAX_DIM * NUM_SPHERES;
|
||||
|
||||
var entities = [];
|
||||
var right = Quat.getRight(Camera.orientation);
|
||||
// Starting position will be 30 meters in front of the camera
|
||||
var position = Vec3.sum(Camera.position, Vec3.multiply(30, Quat.getFront(Camera.orientation)));
|
||||
position = Vec3.sum(position, Vec3.multiply(-WIDTH/2, right));
|
||||
|
||||
for (var i = 0; i < NUM_SPHERES; ++i) {
|
||||
var dim = (MAX_DIM - MIN_DIM) * ((i + 1) / NUM_SPHERES);
|
||||
entities.push(Entities.addEntity({
|
||||
type: "Sphere",
|
||||
dimensions: { x: dim, y: dim, z: dim },
|
||||
position: position,
|
||||
}));
|
||||
|
||||
position = Vec3.sum(position, Vec3.multiply(dim * 2, right));
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(function() {
|
||||
for (var i = 0; i < entities.length; ++i) {
|
||||
Entities.deleteEntity(entities[i]);
|
||||
}
|
||||
})
|
34
examples/theBird.js
Normal file
34
examples/theBird.js
Normal file
|
@ -0,0 +1,34 @@
|
|||
//
|
||||
// theBird.js
|
||||
// examples
|
||||
//
|
||||
// Created by Anthony Thibault on 11/9/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
// Example of how to play an animation on an avatar.
|
||||
//
|
||||
|
||||
var THE_BIRD_RIGHT_URL = "https://hifi-public.s3.amazonaws.com/ozan/anim/the_bird/the_bird_right.fbx";
|
||||
|
||||
var roles = MyAvatar.getAnimationRoles();
|
||||
var i, l = roles.length
|
||||
print("getAnimationRoles()");
|
||||
for (i = 0; i < l; i++) {
|
||||
print(roles[i]);
|
||||
}
|
||||
|
||||
MyAvatar.prefetchAnimation(THE_BIRD_RIGHT_URL);
|
||||
|
||||
// replace point animations with the bird!
|
||||
MyAvatar.overrideRoleAnimation("rightHandPointIntro", THE_BIRD_RIGHT_URL, 30, false, 0, 12);
|
||||
MyAvatar.overrideRoleAnimation("rightHandPointHold", THE_BIRD_RIGHT_URL, 30, false, 12, 12);
|
||||
MyAvatar.overrideRoleAnimation("rightHandPointOutro", THE_BIRD_RIGHT_URL, 30, false, 19, 30);
|
||||
|
||||
Script.scriptEnding.connect(function() {
|
||||
MyAvatar.restoreRoleAnimation("rightHandPointIntro");
|
||||
MyAvatar.restoreRoleAnimation("rightHandPointHold");
|
||||
MyAvatar.restoreRoleAnimation("rightHandPointOutro");
|
||||
});
|
|
@ -41,7 +41,7 @@ var rack = Entities.addEntity({
|
|||
y: -9.8,
|
||||
z: 0
|
||||
},
|
||||
linearDamping: 1,
|
||||
damping: 1,
|
||||
dimensions: {
|
||||
x: 0.4,
|
||||
y: 1.37,
|
||||
|
@ -83,7 +83,7 @@ function createBalls() {
|
|||
z: DIAMETER
|
||||
},
|
||||
restitution: 1.0,
|
||||
linearDamping: 0.00001,
|
||||
damping: 0.00001,
|
||||
gravity: {
|
||||
x: 0,
|
||||
y: -9.8,
|
||||
|
|
|
@ -46,7 +46,7 @@ function makeBasketball() {
|
|||
collisionSoundURL: collisionSoundURL,
|
||||
modelURL: basketballURL,
|
||||
restitution: 1.0,
|
||||
linearDamping: 0.00001,
|
||||
damping: 0.00001,
|
||||
shapeType: "sphere"
|
||||
});
|
||||
originalPosition = position;
|
||||
|
|
|
@ -88,7 +88,7 @@ var topBlock = Entities.addEntity({
|
|||
dimensions: blockDimensions,
|
||||
position: topBlock_position,
|
||||
rotation: topBlock_rotation,
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
damping: LINEAR_DAMPING,
|
||||
gravity: BLOCK_GRAVITY,
|
||||
collisionsWillMove: true,
|
||||
velocity: {
|
||||
|
@ -106,7 +106,7 @@ var sideBlock1 = Entities.addEntity({
|
|||
dimensions: blockDimensions,
|
||||
position: sideBlock1_position,
|
||||
rotation: sideBlock1_rotation,
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
damping: LINEAR_DAMPING,
|
||||
gravity: BLOCK_GRAVITY,
|
||||
collisionsWillMove: true
|
||||
});
|
||||
|
@ -120,7 +120,7 @@ var sideBlock2 = Entities.addEntity({
|
|||
position: sideBlock2_position,
|
||||
rotation: sideBlock2_rotation,
|
||||
collsionsWillMove: true,
|
||||
linearDamping: LINEAR_DAMPING,
|
||||
damping: LINEAR_DAMPING,
|
||||
gravity: BLOCK_GRAVITY,
|
||||
collisionsWillMove: true
|
||||
});
|
||||
|
|
628
examples/toybox/bow/bow.js
Normal file
628
examples/toybox/bow/bow.js
Normal file
|
@ -0,0 +1,628 @@
|
|||
//
|
||||
// bow.js
|
||||
//
|
||||
// This script attaches to a bow that you can pick up with a hand controller.
|
||||
// Created by James B. Pollack @imgntn on 10/19/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
(function() {
|
||||
|
||||
Script.include("../../libraries/utils.js");
|
||||
|
||||
var NOTCH_ARROW_SOUND_URL = 'http://hifi-content.s3.amazonaws.com/james/bow_and_arrow/sounds/notch.wav';
|
||||
var SHOOT_ARROW_SOUND_URL = 'http://hifi-content.s3.amazonaws.com/james/bow_and_arrow/sounds/String_release2.L.wav';
|
||||
var STRING_PULL_SOUND_URL = 'http://hifi-content.s3.amazonaws.com/james/bow_and_arrow/sounds/Bow_draw.1.L.wav';
|
||||
var ARROW_HIT_SOUND_URL = 'http://hifi-content.s3.amazonaws.com/james/bow_and_arrow/sounds/Arrow_impact1.L.wav'
|
||||
|
||||
var ARROW_DIMENSIONS = {
|
||||
x: 0.02,
|
||||
y: 0.02,
|
||||
z: 0.72
|
||||
};
|
||||
|
||||
var ARROW_OFFSET = -0.44;
|
||||
var ARROW_TIP_OFFSET = 0.32;
|
||||
var ARROW_GRAVITY = {
|
||||
x: 0,
|
||||
y: -4.8,
|
||||
z: 0
|
||||
};
|
||||
|
||||
var ARROW_MODEL_URL = "http://hifi-content.s3.amazonaws.com/james/bow_and_arrow/models/newarrow_textured.fbx";
|
||||
var ARROW_COLLISION_HULL_URL = "http://hifi-content.s3.amazonaws.com/james/bow_and_arrow/models/newarrow_collision_hull.obj";
|
||||
|
||||
var ARROW_DIMENSIONS = {
|
||||
x: 0.02,
|
||||
y: 0.02,
|
||||
z: 0.64
|
||||
};
|
||||
|
||||
|
||||
var TOP_NOTCH_OFFSET = 0.6;
|
||||
var BOTTOM_NOTCH_OFFSET = 0.6;
|
||||
|
||||
var LINE_DIMENSIONS = {
|
||||
x: 5,
|
||||
y: 5,
|
||||
z: 5
|
||||
};
|
||||
|
||||
var DRAW_STRING_THRESHOLD = 0.80;
|
||||
|
||||
var LEFT_TIP = 1;
|
||||
var RIGHT_TIP = 3;
|
||||
|
||||
var NOTCH_OFFSET_FORWARD = 0.08;
|
||||
var NOTCH_OFFSET_UP = 0.035;
|
||||
|
||||
var SHOT_SCALE = {
|
||||
min1: 0,
|
||||
max1: 0.6,
|
||||
min2: 1,
|
||||
max2: 15
|
||||
}
|
||||
|
||||
var BOW_SPATIAL_KEY = {
|
||||
relativePosition: {
|
||||
x: 0,
|
||||
y: 0.06,
|
||||
z: 0.11
|
||||
},
|
||||
relativeRotation: Quat.fromPitchYawRollDegrees(0, -90, 90)
|
||||
}
|
||||
|
||||
|
||||
var USE_DEBOUNCE = false;
|
||||
|
||||
var TRIGGER_CONTROLS = [
|
||||
Controller.Standard.LT,
|
||||
Controller.Standard.RT,
|
||||
];
|
||||
|
||||
function interval() {
|
||||
var lastTime = new Date().getTime();
|
||||
|
||||
return function getInterval() {
|
||||
var newTime = new Date().getTime();
|
||||
var delta = newTime - lastTime;
|
||||
lastTime = newTime;
|
||||
return delta;
|
||||
};
|
||||
}
|
||||
|
||||
var checkInterval = interval();
|
||||
|
||||
var _this;
|
||||
|
||||
function Bow() {
|
||||
_this = this;
|
||||
return;
|
||||
}
|
||||
|
||||
Bow.prototype = {
|
||||
isGrabbed: false,
|
||||
stringDrawn: false,
|
||||
aiming: false,
|
||||
arrowTipPosition: null,
|
||||
preNotchString: null,
|
||||
hasArrowNotched: false,
|
||||
arrow: null,
|
||||
stringData: {
|
||||
currentColor: {
|
||||
red: 255,
|
||||
green: 255,
|
||||
blue: 255
|
||||
}
|
||||
},
|
||||
sinceLastUpdate: 0,
|
||||
preload: function(entityID) {
|
||||
this.entityID = entityID;
|
||||
this.stringPullSound = SoundCache.getSound(STRING_PULL_SOUND_URL);
|
||||
this.shootArrowSound = SoundCache.getSound(SHOOT_ARROW_SOUND_URL);
|
||||
this.arrowHitSound = SoundCache.getSound(ARROW_HIT_SOUND_URL);
|
||||
this.arrowNotchSound = SoundCache.getSound(NOTCH_ARROW_SOUND_URL);
|
||||
|
||||
},
|
||||
|
||||
unload: function() {
|
||||
this.deleteStrings();
|
||||
Entities.deleteEntity(this.preNotchString);
|
||||
Entities.deleteEntity(this.arrow);
|
||||
},
|
||||
|
||||
setLeftHand: function() {
|
||||
if (this.isGrabbed === true) {
|
||||
return false;
|
||||
}
|
||||
this.hand = 'left';
|
||||
},
|
||||
|
||||
setRightHand: function() {
|
||||
if (this.isGrabbed === true) {
|
||||
return false;
|
||||
}
|
||||
this.hand = 'right';
|
||||
},
|
||||
|
||||
startNearGrab: function() {
|
||||
|
||||
print('START BOW GRAB')
|
||||
if (this.isGrabbed === true) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.isGrabbed = true;
|
||||
|
||||
this.initialHand = this.hand;
|
||||
|
||||
//disable the opposite hand in handControllerGrab.js by message
|
||||
var handToDisable = this.initialHand === 'right' ? 'left' : 'right';
|
||||
Messages.sendMessage('Hifi-Hand-Disabler', handToDisable);
|
||||
|
||||
setEntityCustomData('grabbableKey', this.entityID, {
|
||||
grabbable: false,
|
||||
invertSolidWhileHeld: true,
|
||||
spatialKey: BOW_SPATIAL_KEY
|
||||
});
|
||||
|
||||
},
|
||||
continueNearGrab: function() {
|
||||
this.deltaTime = checkInterval();
|
||||
|
||||
//debounce during debugging -- maybe we're updating too fast?
|
||||
if (USE_DEBOUNCE === true) {
|
||||
this.sinceLastUpdate = this.sinceLastUpdate + this.deltaTime;
|
||||
|
||||
if (this.sinceLastUpdate > 60) {
|
||||
this.sinceLastUpdate = 0;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.bowProperties = Entities.getEntityProperties(this.entityID);
|
||||
|
||||
//create a string across the bow when we pick it up
|
||||
if (this.preNotchString === null) {
|
||||
this.createPreNotchString();
|
||||
}
|
||||
|
||||
if (this.preNotchString !== null && this.aiming === false) {
|
||||
// print('DRAW PRE NOTCH STRING')
|
||||
this.drawPreNotchStrings();
|
||||
}
|
||||
|
||||
// create the notch detector that arrows will look for
|
||||
|
||||
if (this.aiming === true) {
|
||||
Entities.editEntity(this.preNotchString, {
|
||||
visible: false
|
||||
})
|
||||
} else {
|
||||
Entities.editEntity(this.preNotchString, {
|
||||
visible: true
|
||||
})
|
||||
}
|
||||
|
||||
this.checkStringHand();
|
||||
|
||||
},
|
||||
|
||||
releaseGrab: function() {
|
||||
// print('RELEASE GRAB EVENT')
|
||||
if (this.isGrabbed === true && this.hand === this.initialHand) {
|
||||
|
||||
Messages.sendMessage('Hifi-Hand-Disabler', "none")
|
||||
|
||||
this.isGrabbed = false;
|
||||
this.stringDrawn = false;
|
||||
this.deleteStrings();
|
||||
setEntityCustomData('grabbableKey', this.entityID, {
|
||||
grabbable: true,
|
||||
invertSolidWhileHeld: true,
|
||||
spatialKey: BOW_SPATIAL_KEY
|
||||
});
|
||||
Entities.deleteEntity(this.preNotchString);
|
||||
Entities.deleteEntity(this.arrow);
|
||||
this.aiming = false;
|
||||
this.hasArrowNotched = false;
|
||||
this.preNotchString = null;
|
||||
|
||||
}
|
||||
},
|
||||
|
||||
createArrow: function() {
|
||||
print('create arrow')
|
||||
this.playArrowNotchSound();
|
||||
|
||||
var arrow = Entities.addEntity({
|
||||
name: 'Hifi-Arrow',
|
||||
type: 'Model',
|
||||
modelURL: ARROW_MODEL_URL,
|
||||
shapeType: 'compound',
|
||||
compoundShapeURL: ARROW_COLLISION_HULL_URL,
|
||||
dimensions: ARROW_DIMENSIONS,
|
||||
position: this.bowProperties.position,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: true,
|
||||
collisionSoundURL: ARROW_HIT_SOUND_URL,
|
||||
damping: 0.01,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
grabbable: false
|
||||
}
|
||||
})
|
||||
|
||||
});
|
||||
|
||||
var makeArrowStick = function(entityA, entityB, collision) {
|
||||
Entities.editEntity(entityA, {
|
||||
angularVelocity: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
},
|
||||
velocity: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
},
|
||||
gravity: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
},
|
||||
position: collision.contactPoint,
|
||||
collisionsWillMove: false
|
||||
})
|
||||
// print('ARROW COLLIDED WITH::' + entityB);
|
||||
Script.removeEventHandler(arrow, "collisionWithEntity", makeArrowStick)
|
||||
}
|
||||
|
||||
Script.addEventHandler(arrow, "collisionWithEntity", makeArrowStick);
|
||||
|
||||
return arrow
|
||||
},
|
||||
|
||||
createStrings: function() {
|
||||
this.createTopString();
|
||||
this.createBottomString();
|
||||
},
|
||||
|
||||
createTopString: function() {
|
||||
var stringProperties = {
|
||||
name: 'Hifi-Bow-Top-String',
|
||||
type: 'Line',
|
||||
position: Vec3.sum(this.bowProperties.position, TOP_NOTCH_OFFSET),
|
||||
dimensions: LINE_DIMENSIONS,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: true,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
grabbable: false
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
this.topString = Entities.addEntity(stringProperties);
|
||||
},
|
||||
|
||||
createBottomString: function() {
|
||||
var stringProperties = {
|
||||
name: 'Hifi-Bow-Bottom-String',
|
||||
type: 'Line',
|
||||
position: Vec3.sum(this.bowProperties.position, BOTTOM_NOTCH_OFFSET),
|
||||
dimensions: LINE_DIMENSIONS,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: true,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
grabbable: false
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
this.bottomString = Entities.addEntity(stringProperties);
|
||||
},
|
||||
|
||||
deleteStrings: function() {
|
||||
Entities.deleteEntity(this.topString);
|
||||
Entities.deleteEntity(this.bottomString);
|
||||
},
|
||||
|
||||
updateStringPositions: function() {
|
||||
// print('update string positions!!!')
|
||||
var upVector = Quat.getUp(this.bowProperties.rotation);
|
||||
var upOffset = Vec3.multiply(upVector, TOP_NOTCH_OFFSET);
|
||||
var downVector = Vec3.multiply(-1, Quat.getUp(this.bowProperties.rotation));
|
||||
var downOffset = Vec3.multiply(downVector, BOTTOM_NOTCH_OFFSET);
|
||||
var backOffset = Vec3.multiply(-0.1, Quat.getFront(this.bowProperties.rotation));
|
||||
|
||||
var topStringPosition = Vec3.sum(this.bowProperties.position, upOffset);
|
||||
this.topStringPosition = Vec3.sum(topStringPosition, backOffset);
|
||||
var bottomStringPosition = Vec3.sum(this.bowProperties.position, downOffset);
|
||||
this.bottomStringPosition = Vec3.sum(bottomStringPosition, backOffset);
|
||||
|
||||
Entities.editEntity(this.preNotchString, {
|
||||
position: this.topStringPosition
|
||||
});
|
||||
|
||||
Entities.editEntity(this.topString, {
|
||||
position: this.topStringPosition
|
||||
});
|
||||
|
||||
Entities.editEntity(this.bottomString, {
|
||||
position: this.bottomStringPosition
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
drawStrings: function() {
|
||||
|
||||
this.updateStringPositions();
|
||||
var lineVectors = this.getLocalLineVectors();
|
||||
|
||||
Entities.editEntity(this.topString, {
|
||||
linePoints: [{
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
}, lineVectors[0]],
|
||||
lineWidth: 5,
|
||||
color: this.stringData.currentColor
|
||||
});
|
||||
|
||||
Entities.editEntity(this.bottomString, {
|
||||
linePoints: [{
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
}, lineVectors[1]],
|
||||
lineWidth: 5,
|
||||
color: this.stringData.currentColor
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
getLocalLineVectors: function() {
|
||||
var topVector = Vec3.subtract(this.arrowRearPosition, this.topStringPosition);
|
||||
var bottomVector = Vec3.subtract(this.arrowRearPosition, this.bottomStringPosition);
|
||||
return [topVector, bottomVector];
|
||||
},
|
||||
|
||||
createPreNotchString: function() {
|
||||
this.bowProperties = Entities.getEntityProperties(_this.entityID, ["position", "rotation", "userData"]);
|
||||
|
||||
var stringProperties = {
|
||||
type: 'Line',
|
||||
position: Vec3.sum(this.bowProperties.position, TOP_NOTCH_OFFSET),
|
||||
dimensions: LINE_DIMENSIONS,
|
||||
visible: true,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: true,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
grabbable: false
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
this.preNotchString = Entities.addEntity(stringProperties);
|
||||
},
|
||||
|
||||
drawPreNotchStrings: function() {
|
||||
this.bowProperties = Entities.getEntityProperties(_this.entityID, ["position", "rotation", "userData"]);
|
||||
|
||||
this.updateStringPositions();
|
||||
|
||||
var downVector = Vec3.multiply(-1, Quat.getUp(this.bowProperties.rotation));
|
||||
var downOffset = Vec3.multiply(downVector, BOTTOM_NOTCH_OFFSET * 2);
|
||||
|
||||
Entities.editEntity(this.preNotchString, {
|
||||
name: 'Hifi-Pre-Notch-String',
|
||||
linePoints: [{
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
}, Vec3.sum({
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
}, downOffset)],
|
||||
lineWidth: 5,
|
||||
color: this.stringData.currentColor,
|
||||
});
|
||||
},
|
||||
|
||||
checkStringHand: function() {
|
||||
//invert the hands because our string will be held with the opposite hand of the first one we pick up the bow with
|
||||
var triggerLookup;
|
||||
if (this.initialHand === 'left') {
|
||||
triggerLookup = 1;
|
||||
this.getStringHandPosition = MyAvatar.getRightPalmPosition;
|
||||
} else if (this.initialHand === 'right') {
|
||||
this.getStringHandPosition = MyAvatar.getLeftPalmPosition;
|
||||
triggerLookup = 0;
|
||||
}
|
||||
|
||||
this.triggerValue = Controller.getValue(TRIGGER_CONTROLS[triggerLookup]);
|
||||
|
||||
|
||||
if (this.triggerValue < DRAW_STRING_THRESHOLD && this.stringDrawn === true) {
|
||||
// firing the arrow
|
||||
// print('HIT RELEASE LOOP IN CHECK');
|
||||
|
||||
this.drawStrings();
|
||||
this.hasArrowNotched = false;
|
||||
this.aiming = false;
|
||||
this.stringDrawn = false;
|
||||
this.updateArrowPositionInNotch(true);
|
||||
|
||||
|
||||
} else if (this.triggerValue > DRAW_STRING_THRESHOLD && this.stringDrawn === true) {
|
||||
// print('HIT CONTINUE LOOP IN CHECK')
|
||||
//continuing to aim the arrow
|
||||
|
||||
this.aiming = true;
|
||||
this.drawStrings();
|
||||
this.updateArrowPositionInNotch();
|
||||
|
||||
} else if (this.triggerValue > DRAW_STRING_THRESHOLD && this.stringDrawn === false) {
|
||||
// print('HIT START LOOP IN CHECK');
|
||||
this.arrow = this.createArrow();
|
||||
this.playStringPullSound();
|
||||
|
||||
//the first time aiming the arrow
|
||||
this.stringDrawn = true;
|
||||
this.createStrings();
|
||||
this.drawStrings();
|
||||
this.updateArrowPositionInNotch();
|
||||
|
||||
}
|
||||
},
|
||||
|
||||
setArrowRearPosition: function(arrowPosition, arrowRotation) {
|
||||
var frontVector = Quat.getFront(arrowRotation);
|
||||
var frontOffset = Vec3.multiply(frontVector, -ARROW_TIP_OFFSET);
|
||||
var arrorRearPosition = Vec3.sum(arrowPosition, frontOffset);
|
||||
this.arrowRearPosition = arrorRearPosition;
|
||||
return arrorRearPosition;
|
||||
|
||||
},
|
||||
|
||||
updateArrowPositionInNotch: function(shouldReleaseArrow) {
|
||||
var bowProperties = Entities.getEntityProperties(this.entityID);
|
||||
//set the notch that the arrow should go through
|
||||
var frontVector = Quat.getFront(bowProperties.rotation);
|
||||
var notchVectorForward = Vec3.multiply(frontVector, NOTCH_OFFSET_FORWARD);
|
||||
var upVector = Quat.getUp(bowProperties.rotation);
|
||||
var notchVectorUp = Vec3.multiply(upVector, NOTCH_OFFSET_UP);
|
||||
var notchPosition;
|
||||
notchPosition = Vec3.sum(bowProperties.position, notchVectorForward);
|
||||
notchPosition = Vec3.sum(notchPosition, notchVectorUp);
|
||||
|
||||
//set the arrow rotation to be between the notch and other hand
|
||||
var stringHandPosition = this.getStringHandPosition();
|
||||
var handToNotch = Vec3.subtract(notchPosition, stringHandPosition);
|
||||
var arrowRotation = Quat.rotationBetween(Vec3.FRONT, handToNotch);
|
||||
|
||||
|
||||
|
||||
var pullBackDistance = Vec3.length(handToNotch);
|
||||
// this.changeStringPullSoundVolume(pullBackDistance);
|
||||
|
||||
if (pullBackDistance > 0.6) {
|
||||
pullBackDistance = 0.6;
|
||||
}
|
||||
|
||||
// //pull the arrow back a bit
|
||||
var pullBackOffset = Vec3.multiply(handToNotch, -pullBackDistance);
|
||||
var arrowPosition = Vec3.sum(notchPosition, pullBackOffset);
|
||||
|
||||
// // move it forward a bit
|
||||
var pushForwardOffset = Vec3.multiply(handToNotch, -ARROW_OFFSET);
|
||||
var finalArrowPosition = Vec3.sum(arrowPosition, pushForwardOffset);
|
||||
|
||||
//we draw strings to the rear of the arrow
|
||||
this.setArrowRearPosition(finalArrowPosition, arrowRotation);
|
||||
|
||||
//if we're not shooting, we're updating the arrow's orientation
|
||||
if (shouldReleaseArrow !== true) {
|
||||
Entities.editEntity(this.arrow, {
|
||||
position: finalArrowPosition,
|
||||
rotation: arrowRotation
|
||||
})
|
||||
}
|
||||
|
||||
//shoot the arrow
|
||||
if (shouldReleaseArrow === true) {
|
||||
var arrowProps = Entities.getEntityProperties(this.arrow);
|
||||
|
||||
//scale the shot strength by the distance you've pulled the arrow back and set its release velocity to be in the direction of the v
|
||||
var arrowForce = this.scaleArrowShotStrength(pullBackDistance);
|
||||
var handToNotch = Vec3.normalize(handToNotch);
|
||||
|
||||
var releaseVelocity = Vec3.multiply(handToNotch, arrowForce);
|
||||
// var releaseVelocity2 = Vec3.multiply()
|
||||
|
||||
//make the arrow physical, give it gravity, a lifetime, and set our velocity
|
||||
var arrowProperties = {
|
||||
collisionsWillMove: true,
|
||||
ignoreForCollisions: false,
|
||||
velocity: releaseVelocity,
|
||||
gravity: ARROW_GRAVITY,
|
||||
lifetime: 10,
|
||||
// position: arrowProps.position,
|
||||
// rotation: arrowProps.rotation
|
||||
};
|
||||
|
||||
//actually shoot the arrow and play its sound
|
||||
Entities.editEntity(this.arrow, arrowProperties);
|
||||
this.playShootArrowSound();
|
||||
|
||||
//clear the strings back to only the single straight one
|
||||
this.deleteStrings();
|
||||
Entities.editEntity(this.preNotchString, {
|
||||
visible: true
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
scaleArrowShotStrength: function(value) {
|
||||
var min1 = SHOT_SCALE.min1;
|
||||
var max1 = SHOT_SCALE.max1;
|
||||
var min2 = SHOT_SCALE.min2;
|
||||
var max2 = SHOT_SCALE.max2;
|
||||
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
|
||||
},
|
||||
|
||||
playStringPullSound: function() {
|
||||
var audioProperties = {
|
||||
volume: 0.10,
|
||||
position: this.bowProperties.position
|
||||
};
|
||||
this.stringPullInjector = Audio.playSound(this.stringPullSound, audioProperties);
|
||||
},
|
||||
|
||||
playShootArrowSound: function(sound) {
|
||||
var audioProperties = {
|
||||
volume: 0.15,
|
||||
position: this.bowProperties.position
|
||||
};
|
||||
Audio.playSound(this.shootArrowSound, audioProperties);
|
||||
},
|
||||
|
||||
playArrowNotchSound: function() {
|
||||
var audioProperties = {
|
||||
volume: 0.15,
|
||||
position: this.bowProperties.position
|
||||
};
|
||||
Audio.playSound(this.arrowNotchSound, audioProperties);
|
||||
},
|
||||
|
||||
changeStringPullSoundVolume: function(pullBackDistance) {
|
||||
var audioProperties = {
|
||||
volume: this.scaleSoundVolume(pullBackDistance),
|
||||
position: this.bowProperties.position
|
||||
}
|
||||
|
||||
this.stringPullInjector.options = audioProperties;
|
||||
},
|
||||
scaleSoundVolume: function(value) {
|
||||
var min1 = SHOT_SCALE.min1;
|
||||
var max1 = SHOT_SCALE.max1;
|
||||
var min2 = 0;
|
||||
var max2 = 0.2;
|
||||
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
return new Bow();
|
||||
});
|
66
examples/toybox/bow/createBow.js
Normal file
66
examples/toybox/bow/createBow.js
Normal file
|
@ -0,0 +1,66 @@
|
|||
//
|
||||
// createBow.js
|
||||
//
|
||||
// Created byJames Pollack @imgntn on 10/19/2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// This script creates a bow you can use to shoot an arrow.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
var SCRIPT_URL = Script.resolvePath('bow.js');
|
||||
|
||||
var MODEL_URL = "https://hifi-public.s3.amazonaws.com/models/bow/new/bow-deadly.fbx";
|
||||
var COLLISION_HULL_URL = "https://hifi-public.s3.amazonaws.com/models/bow/new/bow_collision_hull.obj";
|
||||
var BOW_DIMENSIONS = {
|
||||
x: 0.04,
|
||||
y: 1.3,
|
||||
z: 0.21
|
||||
};
|
||||
|
||||
var BOW_GRAVITY = {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
}
|
||||
|
||||
var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
|
||||
x: 0,
|
||||
y: 0.5,
|
||||
z: 0
|
||||
}), Vec3.multiply(1, Quat.getFront(Camera.getOrientation())));
|
||||
|
||||
var bow = Entities.addEntity({
|
||||
name: 'Hifi-Bow',
|
||||
type: "Model",
|
||||
modelURL: MODEL_URL,
|
||||
position: center,
|
||||
dimensions: BOW_DIMENSIONS,
|
||||
collisionsWillMove: true,
|
||||
gravity: BOW_GRAVITY,
|
||||
shapeType: 'compound',
|
||||
compoundShapeURL: COLLISION_HULL_URL,
|
||||
script: SCRIPT_URL,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
invertSolidWhileHeld: true,
|
||||
spatialKey: {
|
||||
relativePosition: {
|
||||
x: 0,
|
||||
y: 0.06,
|
||||
z: 0.11
|
||||
},
|
||||
relativeRotation: Quat.fromPitchYawRollDegrees(0, -90, 90)
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
function cleanup() {
|
||||
Entities.deleteEntity(bow);
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
|
@ -12,8 +12,8 @@
|
|||
|
||||
Script.include("../../libraries/utils.js");
|
||||
|
||||
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/wand.fbx';
|
||||
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/actual_no_top_collision_hull.obj';
|
||||
var WAND_MODEL = 'http://hifi-content.s3.amazonaws.com/james/bubblewand/wand.fbx';
|
||||
var WAND_COLLISION_SHAPE = 'http://hifi-content.s3.amazonaws.com/james/bubblewand/wand_collision_hull.obj';
|
||||
|
||||
var WAND_SCRIPT_URL = Script.resolvePath("wand.js");
|
||||
|
||||
|
@ -43,5 +43,18 @@ var wand = Entities.addEntity({
|
|||
//must be enabled to be grabbable in the physics engine
|
||||
collisionsWillMove: true,
|
||||
compoundShapeURL: WAND_COLLISION_SHAPE,
|
||||
script: WAND_SCRIPT_URL
|
||||
script: WAND_SCRIPT_URL,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
invertSolidWhileHeld: true,
|
||||
spatialKey: {
|
||||
relativePosition: {
|
||||
x: 0,
|
||||
y: 0.1,
|
||||
z: 0
|
||||
},
|
||||
relativeRotation: Quat.fromPitchYawRollDegrees(0, 0, 90)
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
/*global MyAvatar, Entities, AnimationCache, SoundCache, Scene, Camera, Overlays, HMD, AvatarList, AvatarManager, Controller, UndoStack, Window, Account, GlobalServices, Script, ScriptDiscoveryService, LODManager, Menu, Vec3, Quat, AudioDevice, Paths, Clipboard, Settings, XMLHttpRequest, randFloat, randInt */
|
||||
|
||||
(function () {
|
||||
(function() {
|
||||
|
||||
Script.include("../../libraries/utils.js");
|
||||
|
||||
|
@ -28,7 +28,7 @@
|
|||
var BUBBLE_LIFETIME_MAX = 8;
|
||||
var BUBBLE_SIZE_MIN = 0.02;
|
||||
var BUBBLE_SIZE_MAX = 0.1;
|
||||
var BUBBLE_LINEAR_DAMPING = 0.4;
|
||||
var BUBBLE_LINEAR_DAMPING = 0.2;
|
||||
var BUBBLE_GRAVITY_MIN = 0.1;
|
||||
var BUBBLE_GRAVITY_MAX = 0.3;
|
||||
var GROWTH_FACTOR = 0.005;
|
||||
|
@ -58,23 +58,23 @@
|
|||
BubbleWand.prototype = {
|
||||
timePassed: null,
|
||||
currentBubble: null,
|
||||
preload: function (entityID) {
|
||||
preload: function(entityID) {
|
||||
this.entityID = entityID;
|
||||
},
|
||||
getWandTipPosition: function (properties) {
|
||||
getWandTipPosition: function(properties) {
|
||||
//the tip of the wand is going to be in a different place than the center, so we move in space relative to the model to find that position
|
||||
var upVector = Quat.getUp(properties.rotation);
|
||||
var upOffset = Vec3.multiply(upVector, WAND_TIP_OFFSET);
|
||||
var wandTipPosition = Vec3.sum(properties.position, upOffset);
|
||||
return wandTipPosition;
|
||||
},
|
||||
addCollisionsToBubbleAfterCreation: function (bubble) {
|
||||
addCollisionsToBubbleAfterCreation: function(bubble) {
|
||||
//if the bubble collide immediately, we get weird effects. so we add collisions after release
|
||||
Entities.editEntity(bubble, {
|
||||
collisionsWillMove: true
|
||||
});
|
||||
},
|
||||
randomizeBubbleGravity: function () {
|
||||
randomizeBubbleGravity: function() {
|
||||
//change up the gravity a little bit for variation in floating effects
|
||||
var randomNumber = randFloat(BUBBLE_GRAVITY_MIN, BUBBLE_GRAVITY_MAX);
|
||||
var gravity = {
|
||||
|
@ -84,7 +84,7 @@
|
|||
};
|
||||
return gravity;
|
||||
},
|
||||
growBubbleWithWandVelocity: function (properties, deltaTime) {
|
||||
growBubbleWithWandVelocity: function(properties, deltaTime) {
|
||||
//get the wand and tip position for calculations
|
||||
var wandPosition = properties.position;
|
||||
this.getWandTipPosition(properties);
|
||||
|
@ -145,7 +145,7 @@
|
|||
dimensions: dimensions
|
||||
});
|
||||
},
|
||||
createBubbleAtTipOfWand: function () {
|
||||
createBubbleAtTipOfWand: function() {
|
||||
|
||||
//create a new bubble at the tip of the wand
|
||||
var properties = Entities.getEntityProperties(this.entityID, ["position", "rotation"]);
|
||||
|
@ -162,24 +162,23 @@
|
|||
position: this.getWandTipPosition(properties),
|
||||
dimensions: BUBBLE_INITIAL_DIMENSIONS,
|
||||
collisionsWillMove: false,
|
||||
ignoreForCollisions: false,
|
||||
linearDamping: BUBBLE_LINEAR_DAMPING,
|
||||
ignoreForCollisions: true,
|
||||
damping: BUBBLE_LINEAR_DAMPING,
|
||||
shapeType: "sphere"
|
||||
});
|
||||
|
||||
},
|
||||
startNearGrab: function () {
|
||||
startNearGrab: function() {
|
||||
//create a bubble to grow at the start of the grab
|
||||
if (this.currentBubble === null) {
|
||||
this.createBubbleAtTipOfWand();
|
||||
}
|
||||
},
|
||||
continueNearGrab: function () {
|
||||
continueNearGrab: function() {
|
||||
var deltaTime = checkInterval();
|
||||
//only get the properties that we need
|
||||
var properties = Entities.getEntityProperties(this.entityID, ["position", "rotation"]);
|
||||
|
||||
|
||||
var wandTipPosition = this.getWandTipPosition(properties);
|
||||
|
||||
//update the bubble to stay with the wand tip
|
||||
|
@ -189,7 +188,7 @@
|
|||
this.growBubbleWithWandVelocity(properties, deltaTime);
|
||||
|
||||
},
|
||||
releaseGrab: function () {
|
||||
releaseGrab: function() {
|
||||
//delete the current buble and reset state when the wand is released
|
||||
Entities.deleteEntity(this.currentBubble);
|
||||
this.currentBubble = null;
|
||||
|
|
|
@ -12,10 +12,9 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
/*global MyAvatar, Entities, AnimationCache, SoundCache, Scene, Camera, Overlays, HMD, AvatarList, AvatarManager, Controller, UndoStack, Window, Account, GlobalServices, Script, ScriptDiscoveryService, LODManager, Menu, Vec3, Quat, AudioDevice, Paths, Clipboard, Settings, XMLHttpRequest, randFloat, randInt */
|
||||
Script.include("https://hifi-public.s3.amazonaws.com/scripts/utilities.js");
|
||||
Script.include("../../libraries/utils.js");
|
||||
|
||||
|
||||
var scriptURL = Script.resolvePath('flashlight.js?123123');
|
||||
var scriptURL = Script.resolvePath('flashlight.js');
|
||||
|
||||
var modelURL = "https://hifi-public.s3.amazonaws.com/models/props/flashlight.fbx";
|
||||
|
||||
|
|
|
@ -183,11 +183,14 @@
|
|||
},
|
||||
|
||||
changeLightWithTriggerPressure: function(flashLightHand) {
|
||||
var handClickString = flashLightHand + "_HAND_CLICK";
|
||||
|
||||
var handClick = Controller.findAction(handClickString);
|
||||
if (flashLightHand === 'LEFT') {
|
||||
this.triggerValue = Controller.getValue(Controller.Standard.LT);
|
||||
}
|
||||
if (flashLightHand === 'RIGHT') {
|
||||
this.triggerValue = Controller.getValue(Controller.Standard.RT);
|
||||
|
||||
this.triggerValue = Controller.getActionValue(handClick);
|
||||
}
|
||||
|
||||
if (this.triggerValue < DISABLE_LIGHT_THRESHOLD && this.lightOn === true) {
|
||||
this.turnLightOff();
|
||||
|
@ -266,4 +269,4 @@
|
|||
|
||||
// entity scripts always need to return a newly constructed object of our type
|
||||
return new Flashlight();
|
||||
});
|
||||
});
|
|
@ -39,6 +39,8 @@ var startPosition = {
|
|||
z: 509.74
|
||||
};
|
||||
|
||||
startPosition = MyAvatar.position;
|
||||
|
||||
var rotation = Quat.fromPitchYawRollDegrees(0, -55.25, 0);
|
||||
|
||||
var targetIntervalClearer = Entities.addEntity({
|
||||
|
|
|
@ -123,7 +123,7 @@
|
|||
type:'Sphere',
|
||||
color: BALL_COLOR,
|
||||
dimensions: BALL_DIMENSIONS,
|
||||
linearDamping: BALL_LINEAR_DAMPING,
|
||||
damping: BALL_LINEAR_DAMPING,
|
||||
gravity: BALL_GRAVITY,
|
||||
restitution: BALL_RESTITUTION,
|
||||
collisionsWillMove: true,
|
||||
|
|
|
@ -5,5 +5,4 @@ setup_hifi_project(Network)
|
|||
|
||||
# link the shared hifi libraries
|
||||
link_hifi_libraries(embedded-webserver networking shared)
|
||||
|
||||
copy_dlls_beside_windows_executable()
|
||||
package_libraries_for_deployment()
|
||||
|
|
|
@ -100,6 +100,13 @@ else()
|
|||
add_executable(${TARGET_NAME} ${INTERFACE_SRCS} ${QM})
|
||||
endif()
|
||||
|
||||
# disable /OPT:REF and /OPT:ICF for the Debug builds
|
||||
# This will prevent the following linker warnings
|
||||
# LINK : warning LNK4075: ignoring '/INCREMENTAL' due to '/OPT:ICF' specification
|
||||
if (WIN32)
|
||||
set_property(TARGET ${TARGET_NAME} APPEND_STRING PROPERTY LINK_FLAGS_DEBUG "/OPT:NOREF /OPT:NOICF")
|
||||
endif()
|
||||
|
||||
# link required hifi libraries
|
||||
link_hifi_libraries(shared octree environment gpu gl procedural model render
|
||||
recording fbx networking model-networking entities avatars
|
||||
|
@ -201,4 +208,4 @@ else (APPLE)
|
|||
endif()
|
||||
endif (APPLE)
|
||||
|
||||
copy_dlls_beside_windows_executable()
|
||||
package_libraries_for_deployment()
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -256,6 +256,12 @@ Item {
|
|||
visible: root.expanded
|
||||
text: "LOD: " + root.lodStatus;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
visible: root.expanded
|
||||
text: "Renderable avatars: " + root.avatarRenderableCount + " w/in " + root.avatarRenderDistance + "m";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,6 +50,7 @@
|
|||
#include <AssetClient.h>
|
||||
#include <AssetUpload.h>
|
||||
#include <AutoUpdater.h>
|
||||
#include <AudioInjectorManager.h>
|
||||
#include <CursorManager.h>
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <display-plugins/DisplayPlugin.h>
|
||||
|
@ -340,6 +341,7 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
DependencyManager::set<PathUtils>();
|
||||
DependencyManager::set<InterfaceActionFactory>();
|
||||
DependencyManager::set<AssetClient>();
|
||||
DependencyManager::set<AudioInjectorManager>();
|
||||
DependencyManager::set<MessagesClient>();
|
||||
DependencyManager::set<UserInputMapper>();
|
||||
DependencyManager::set<controller::ScriptingInterface, ControllerScriptingInterface>();
|
||||
|
@ -454,14 +456,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
audioIO->setOrientationGetter([this]{ return getMyAvatar()->getOrientationForAudio(); });
|
||||
|
||||
audioIO->moveToThread(audioThread);
|
||||
recording::Frame::registerFrameHandler(AudioConstants::AUDIO_FRAME_NAME, [=](recording::Frame::ConstPointer frame) {
|
||||
recording::Frame::registerFrameHandler(AudioConstants::getAudioFrameName(), [=](recording::Frame::ConstPointer frame) {
|
||||
audioIO->handleRecordedAudioInput(frame->data);
|
||||
});
|
||||
|
||||
connect(audioIO.data(), &AudioClient::inputReceived, [](const QByteArray& audio){
|
||||
static auto recorder = DependencyManager::get<recording::Recorder>();
|
||||
if (recorder->isRecording()) {
|
||||
static const recording::FrameType AUDIO_FRAME_TYPE = recording::Frame::registerFrameType(AudioConstants::AUDIO_FRAME_NAME);
|
||||
static const recording::FrameType AUDIO_FRAME_TYPE = recording::Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
recorder->recordFrame(AUDIO_FRAME_TYPE, audio);
|
||||
}
|
||||
});
|
||||
|
@ -679,7 +681,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
}));
|
||||
|
||||
userInputMapper->registerDevice(_applicationStateDevice);
|
||||
|
||||
|
||||
// Setup the keyboardMouseDevice and the user input mapper with the default bindings
|
||||
userInputMapper->registerDevice(_keyboardMouseDevice->getInputDevice());
|
||||
|
||||
|
@ -749,7 +751,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
_oldHandRightClick[0] = false;
|
||||
_oldHandLeftClick[1] = false;
|
||||
_oldHandRightClick[1] = false;
|
||||
|
||||
|
||||
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
|
||||
connect(applicationUpdater.data(), &AutoUpdater::newVersionIsAvailable, dialogsManager.data(), &DialogsManager::showUpdateDialog);
|
||||
applicationUpdater->checkForUpdate();
|
||||
|
@ -768,7 +770,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
|
||||
// If the user clicks an an entity, we will check that it's an unlocked web entity, and if so, set the focus to it
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
connect(entityScriptingInterface.data(), &EntityScriptingInterface::clickDownOnEntity,
|
||||
connect(entityScriptingInterface.data(), &EntityScriptingInterface::clickDownOnEntity,
|
||||
[this, entityScriptingInterface](const EntityItemID& entityItemID, const MouseEvent& event) {
|
||||
if (_keyboardFocusedItem != entityItemID) {
|
||||
_keyboardFocusedItem = UNKNOWN_ENTITY_ID;
|
||||
|
@ -817,7 +819,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
});
|
||||
|
||||
// If the user clicks somewhere where there is NO entity at all, we will release focus
|
||||
connect(getEntities(), &EntityTreeRenderer::mousePressOffEntity,
|
||||
connect(getEntities(), &EntityTreeRenderer::mousePressOffEntity,
|
||||
[=](const RayToEntityIntersectionResult& entityItemID, const QMouseEvent* event, unsigned int deviceId) {
|
||||
_keyboardFocusedItem = UNKNOWN_ENTITY_ID;
|
||||
if (_keyboardFocusHighlight) {
|
||||
|
@ -826,17 +828,17 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
});
|
||||
|
||||
connect(this, &Application::applicationStateChanged, this, &Application::activeChanged);
|
||||
|
||||
|
||||
qCDebug(interfaceapp, "Startup time: %4.2f seconds.", (double)startupTimer.elapsed() / 1000.0);
|
||||
}
|
||||
|
||||
void Application::aboutToQuit() {
|
||||
emit beforeAboutToQuit();
|
||||
|
||||
|
||||
getActiveDisplayPlugin()->deactivate();
|
||||
|
||||
|
||||
_aboutToQuit = true;
|
||||
|
||||
|
||||
cleanupBeforeQuit();
|
||||
}
|
||||
|
||||
|
@ -860,16 +862,16 @@ void Application::cleanupBeforeQuit() {
|
|||
_keyboardFocusHighlight = nullptr;
|
||||
|
||||
_entities.clear(); // this will allow entity scripts to properly shutdown
|
||||
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
||||
// send the domain a disconnect packet, force stoppage of domain-server check-ins
|
||||
nodeList->getDomainHandler().disconnect();
|
||||
nodeList->setIsShuttingDown(true);
|
||||
|
||||
|
||||
// tell the packet receiver we're shutting down, so it can drop packets
|
||||
nodeList->getPacketReceiver().setShouldDropPackets(true);
|
||||
|
||||
|
||||
_entities.shutdown(); // tell the entities system we're shutting down, so it will stop running scripts
|
||||
ScriptEngine::stopAllScripts(this); // stop all currently running global scripts
|
||||
|
||||
|
@ -894,6 +896,10 @@ void Application::cleanupBeforeQuit() {
|
|||
|
||||
// destroy the AudioClient so it and its thread have a chance to go down safely
|
||||
DependencyManager::destroy<AudioClient>();
|
||||
|
||||
// destroy the AudioInjectorManager so it and its thread have a chance to go down safely
|
||||
// this will also stop any ongoing network injectors
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
|
||||
// Destroy third party processes after scripts have finished using them.
|
||||
#ifdef HAVE_DDE
|
||||
|
@ -947,7 +953,7 @@ Application::~Application() {
|
|||
DependencyManager::destroy<GeometryCache>();
|
||||
DependencyManager::destroy<ScriptCache>();
|
||||
DependencyManager::destroy<SoundCache>();
|
||||
|
||||
|
||||
// cleanup the AssetClient thread
|
||||
QThread* assetThread = DependencyManager::get<AssetClient>()->thread();
|
||||
DependencyManager::destroy<AssetClient>();
|
||||
|
@ -955,14 +961,14 @@ Application::~Application() {
|
|||
assetThread->wait();
|
||||
|
||||
QThread* nodeThread = DependencyManager::get<NodeList>()->thread();
|
||||
|
||||
|
||||
// remove the NodeList from the DependencyManager
|
||||
DependencyManager::destroy<NodeList>();
|
||||
|
||||
// ask the node thread to quit and wait until it is done
|
||||
nodeThread->quit();
|
||||
nodeThread->wait();
|
||||
|
||||
|
||||
Leapmotion::destroy();
|
||||
RealSense::destroy();
|
||||
|
||||
|
@ -1058,7 +1064,7 @@ void Application::initializeUi() {
|
|||
resizeGL();
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// This will set up the input plugins UI
|
||||
_activeInputPlugins.clear();
|
||||
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
|
||||
|
@ -1077,8 +1083,10 @@ void Application::paintGL() {
|
|||
uint64_t now = usecTimestampNow();
|
||||
static uint64_t lastPaintBegin{ now };
|
||||
uint64_t diff = now - lastPaintBegin;
|
||||
float instantaneousFps = 0.0f;
|
||||
if (diff != 0) {
|
||||
_framesPerSecond.updateAverage((float)USECS_PER_SECOND / (float)diff);
|
||||
instantaneousFps = (float)USECS_PER_SECOND / (float)diff;
|
||||
_framesPerSecond.updateAverage(_lastInstantaneousFps);
|
||||
}
|
||||
|
||||
lastPaintBegin = now;
|
||||
|
@ -1100,8 +1108,8 @@ void Application::paintGL() {
|
|||
return;
|
||||
}
|
||||
|
||||
// Some plugins process message events, potentially leading to
|
||||
// re-entering a paint event. don't allow further processing if this
|
||||
// Some plugins process message events, potentially leading to
|
||||
// re-entering a paint event. don't allow further processing if this
|
||||
// happens
|
||||
if (_inPaint) {
|
||||
return;
|
||||
|
@ -1109,6 +1117,29 @@ void Application::paintGL() {
|
|||
_inPaint = true;
|
||||
Finally clearFlagLambda([this] { _inPaint = false; });
|
||||
|
||||
// Some LOD-like controls need to know a smoothly varying "potential" frame rate that doesn't
|
||||
// include time waiting for vsync, and which can report a number above target if we've got the headroom.
|
||||
// For example, if we're shooting for 75fps and paintWait is 3.3333ms (= 75% * 13.33ms), our deducedNonVSyncFps
|
||||
// would be 100fps. In principle, a paintWait of zero would have deducedNonVSyncFps=75.
|
||||
// Here we make a guess for deducedNonVSyncFps = 1 / deducedNonVSyncPeriod.
|
||||
//
|
||||
// Time between previous paintGL call and this one, which can vary not only with vSync misses, but also with QT timing.
|
||||
// We're using this as a proxy for the time between vsync and displayEnd, below. (Not exact, but tends to be the same over time.)
|
||||
// This is not the same as update(deltaTime), because the latter attempts to throttle to 60hz and also clamps to 1/4 second.
|
||||
const float actualPeriod = diff / (float)USECS_PER_SECOND; // same as 1/instantaneousFps but easier for compiler to optimize
|
||||
// Note that _lastPaintWait (stored at end of last call) is for the same paint cycle.
|
||||
float deducedNonVSyncPeriod = actualPeriod - _lastPaintWait + _marginForDeducedFramePeriod; // plus a some non-zero time for machinery we can't measure
|
||||
// We don't know how much time to allow for that, but if we went over the target period, we know it's at least the portion
|
||||
// of paintWait up to the next vSync. This gives us enough of a penalty so that when actualPeriod crosses two cycles,
|
||||
// the key part (and not an exagerated part) of _lastPaintWait is accounted for.
|
||||
const float targetPeriod = getTargetFramePeriod();
|
||||
if (_lastPaintWait > EPSILON && actualPeriod > targetPeriod) {
|
||||
// Don't use C++ remainder(). It's authors are mathematically insane.
|
||||
deducedNonVSyncPeriod += fmod(actualPeriod, _lastPaintWait);
|
||||
}
|
||||
_lastDeducedNonVSyncFps = 1.0f / deducedNonVSyncPeriod;
|
||||
_lastInstantaneousFps = instantaneousFps;
|
||||
|
||||
auto displayPlugin = getActiveDisplayPlugin();
|
||||
displayPlugin->preRender();
|
||||
_offscreenContext->makeCurrent();
|
||||
|
@ -1137,17 +1168,17 @@ void Application::paintGL() {
|
|||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
PerformanceTimer perfTimer("Mirror");
|
||||
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebufferDepthColor();
|
||||
|
||||
|
||||
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
|
||||
renderRearViewMirror(&renderArgs, _mirrorViewRect);
|
||||
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
|
||||
|
||||
|
||||
{
|
||||
float ratio = ((float)QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale());
|
||||
// Flip the src and destination rect horizontally to do the mirror
|
||||
auto mirrorRect = glm::ivec4(0, 0, _mirrorViewRect.width() * ratio, _mirrorViewRect.height() * ratio);
|
||||
auto mirrorRectDest = glm::ivec4(mirrorRect.z, mirrorRect.y, mirrorRect.x, mirrorRect.w);
|
||||
|
||||
|
||||
auto selfieFbo = DependencyManager::get<FramebufferCache>()->getSelfieFramebuffer();
|
||||
gpu::doInBatch(renderArgs._context, [=](gpu::Batch& batch) {
|
||||
batch.setFramebuffer(selfieFbo);
|
||||
|
@ -1169,9 +1200,9 @@ void Application::paintGL() {
|
|||
|
||||
{
|
||||
PerformanceTimer perfTimer("CameraUpdates");
|
||||
|
||||
|
||||
auto myAvatar = getMyAvatar();
|
||||
|
||||
|
||||
myAvatar->startCapture();
|
||||
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON || _myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, myAvatar->getBoomLength() <= MyAvatar::ZOOM_MIN);
|
||||
|
@ -1208,26 +1239,26 @@ void Application::paintGL() {
|
|||
* (myAvatar->getScale() * myAvatar->getBoomLength() * glm::vec3(0.0f, 0.0f, 1.0f)));
|
||||
} else {
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
|
||||
+ myAvatar->getOrientation()
|
||||
+ myAvatar->getOrientation()
|
||||
* (myAvatar->getScale() * myAvatar->getBoomLength() * glm::vec3(0.0f, 0.0f, 1.0f)));
|
||||
}
|
||||
}
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
if (isHMDMode()) {
|
||||
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
|
||||
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
|
||||
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
|
||||
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)) * hmdRotation);
|
||||
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
|
||||
+ glm::vec3(0, _raiseMirror * myAvatar->getScale(), 0)
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
|
||||
+ glm::vec3(0, _raiseMirror * myAvatar->getScale(), 0)
|
||||
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
|
||||
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
|
||||
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
|
||||
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f))) * hmdOffset);
|
||||
} else {
|
||||
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
|
||||
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation()
|
||||
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
|
||||
+ glm::vec3(0, _raiseMirror * myAvatar->getScale(), 0)
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
|
||||
+ glm::vec3(0, _raiseMirror * myAvatar->getScale(), 0)
|
||||
+ (myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
|
||||
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
|
||||
}
|
||||
|
@ -1246,7 +1277,7 @@ void Application::paintGL() {
|
|||
}
|
||||
}
|
||||
}
|
||||
// Update camera position
|
||||
// Update camera position
|
||||
if (!isHMDMode()) {
|
||||
_myCamera.update(1.0f / _fps);
|
||||
}
|
||||
|
@ -1264,12 +1295,12 @@ void Application::paintGL() {
|
|||
if (displayPlugin->isStereo()) {
|
||||
// Stereo modes will typically have a larger projection matrix overall,
|
||||
// so we ask for the 'mono' projection matrix, which for stereo and HMD
|
||||
// plugins will imply the combined projection for both eyes.
|
||||
// plugins will imply the combined projection for both eyes.
|
||||
//
|
||||
// This is properly implemented for the Oculus plugins, but for OpenVR
|
||||
// and Stereo displays I'm not sure how to get / calculate it, so we're
|
||||
// just relying on the left FOV in each case and hoping that the
|
||||
// overall culling margin of error doesn't cause popping in the
|
||||
// and Stereo displays I'm not sure how to get / calculate it, so we're
|
||||
// just relying on the left FOV in each case and hoping that the
|
||||
// overall culling margin of error doesn't cause popping in the
|
||||
// right eye. There are FIXMEs in the relevant plugins
|
||||
_myCamera.setProjection(displayPlugin->getProjection(Mono, _myCamera.getProjection()));
|
||||
renderArgs._context->enableStereo(true);
|
||||
|
@ -1279,11 +1310,11 @@ void Application::paintGL() {
|
|||
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
|
||||
float IPDScale = hmdInterface->getIPDScale();
|
||||
// FIXME we probably don't need to set the projection matrix every frame,
|
||||
// only when the display plugin changes (or in non-HMD modes when the user
|
||||
// only when the display plugin changes (or in non-HMD modes when the user
|
||||
// changes the FOV manually, which right now I don't think they can.
|
||||
for_each_eye([&](Eye eye) {
|
||||
// For providing the stereo eye views, the HMD head pose has already been
|
||||
// applied to the avatar, so we need to get the difference between the head
|
||||
// For providing the stereo eye views, the HMD head pose has already been
|
||||
// applied to the avatar, so we need to get the difference between the head
|
||||
// pose applied to the avatar and the per eye pose, and use THAT as
|
||||
// the per-eye stereo matrix adjustment.
|
||||
mat4 eyeToHead = displayPlugin->getEyeToHeadTransform(eye);
|
||||
|
@ -1293,10 +1324,10 @@ void Application::paintGL() {
|
|||
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * IPDScale);
|
||||
eyeOffsets[eye] = eyeOffsetTransform;
|
||||
|
||||
// Tell the plugin what pose we're using to render. In this case we're just using the
|
||||
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
|
||||
// for rotational timewarp. If we move to support positonal timewarp, we need to
|
||||
// ensure this contains the full pose composed with the eye offsets.
|
||||
// Tell the plugin what pose we're using to render. In this case we're just using the
|
||||
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
|
||||
// for rotational timewarp. If we move to support positonal timewarp, we need to
|
||||
// ensure this contains the full pose composed with the eye offsets.
|
||||
mat4 headPose = displayPlugin->getHeadPose();
|
||||
displayPlugin->setEyeRenderPose(eye, headPose);
|
||||
|
||||
|
@ -1343,7 +1374,7 @@ void Application::paintGL() {
|
|||
PerformanceTimer perfTimer("pluginOutput");
|
||||
auto primaryFbo = framebufferCache->getPrimaryFramebuffer();
|
||||
GLuint finalTexture = gpu::GLBackend::getTextureID(primaryFbo->getRenderBuffer(0));
|
||||
// Ensure the rendering context commands are completed when rendering
|
||||
// Ensure the rendering context commands are completed when rendering
|
||||
GLsync sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
// Ensure the sync object is flushed to the driver thread before releasing the context
|
||||
// CRITICAL for the mac driver apparently.
|
||||
|
@ -1355,6 +1386,7 @@ void Application::paintGL() {
|
|||
// Ensure all operations from the previous context are complete before we try to read the fbo
|
||||
glWaitSync(sync, 0, GL_TIMEOUT_IGNORED);
|
||||
glDeleteSync(sync);
|
||||
uint64_t displayStart = usecTimestampNow();
|
||||
|
||||
{
|
||||
PROFILE_RANGE(__FUNCTION__ "/pluginDisplay");
|
||||
|
@ -1367,6 +1399,10 @@ void Application::paintGL() {
|
|||
PerformanceTimer perfTimer("bufferSwap");
|
||||
displayPlugin->finishFrame();
|
||||
}
|
||||
uint64_t displayEnd = usecTimestampNow();
|
||||
const float displayPeriodUsec = (float)(displayEnd - displayStart); // usecs
|
||||
_lastPaintWait = displayPeriodUsec / (float)USECS_PER_SECOND;
|
||||
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -1394,7 +1430,7 @@ void Application::audioMuteToggled() {
|
|||
}
|
||||
|
||||
void Application::faceTrackerMuteToggled() {
|
||||
|
||||
|
||||
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteFaceTracking);
|
||||
Q_CHECK_PTR(muteAction);
|
||||
bool isMuted = getSelectedFaceTracker()->isMuted();
|
||||
|
@ -1427,7 +1463,7 @@ void Application::resizeGL() {
|
|||
if (nullptr == _displayPlugin) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
auto displayPlugin = getActiveDisplayPlugin();
|
||||
// Set the desired FBO texture size. If it hasn't changed, this does nothing.
|
||||
// Otherwise, it must rebuild the FBOs
|
||||
|
@ -1437,14 +1473,14 @@ void Application::resizeGL() {
|
|||
_renderResolution = renderSize;
|
||||
DependencyManager::get<FramebufferCache>()->setFrameBufferSize(fromGlm(renderSize));
|
||||
}
|
||||
|
||||
|
||||
// FIXME the aspect ratio for stereo displays is incorrect based on this.
|
||||
float aspectRatio = displayPlugin->getRecommendedAspectRatio();
|
||||
_myCamera.setProjection(glm::perspective(glm::radians(_fieldOfView.get()), aspectRatio,
|
||||
DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
|
||||
// Possible change in aspect ratio
|
||||
loadViewFrustum(_myCamera, _viewFrustum);
|
||||
|
||||
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
auto uiSize = displayPlugin->getRecommendedUiSize();
|
||||
// Bit of a hack since there's no device pixel ratio change event I can find.
|
||||
|
@ -1613,7 +1649,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
if (isMeta) {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->load("Browser.qml");
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case Qt::Key_X:
|
||||
|
@ -2109,7 +2145,7 @@ void Application::wheelEvent(QWheelEvent* event) {
|
|||
if (_controllerScriptingInterface->isWheelCaptured()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(KeyboardMouseDevice::NAME)) {
|
||||
_keyboardMouseDevice->wheelEvent(event);
|
||||
}
|
||||
|
@ -2227,7 +2263,7 @@ void Application::idle(uint64_t now) {
|
|||
_idleLoopStdev.reset();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
_overlayConductor.update(secondsSinceLastUpdate);
|
||||
|
||||
// check for any requested background downloads.
|
||||
|
@ -2481,7 +2517,7 @@ void Application::init() {
|
|||
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
|
||||
|
||||
qCDebug(interfaceapp) << "Loaded settings";
|
||||
|
||||
|
||||
Leapmotion::init();
|
||||
RealSense::init();
|
||||
|
||||
|
@ -2519,11 +2555,12 @@ void Application::init() {
|
|||
setAvatarUpdateThreading();
|
||||
}
|
||||
|
||||
const bool ENABLE_AVATAR_UPDATE_THREADING = false;
|
||||
void Application::setAvatarUpdateThreading() {
|
||||
setAvatarUpdateThreading(Menu::getInstance()->isOptionChecked(MenuOption::EnableAvatarUpdateThreading));
|
||||
setAvatarUpdateThreading(ENABLE_AVATAR_UPDATE_THREADING);
|
||||
}
|
||||
void Application::setRawAvatarUpdateThreading() {
|
||||
setRawAvatarUpdateThreading(Menu::getInstance()->isOptionChecked(MenuOption::EnableAvatarUpdateThreading));
|
||||
setRawAvatarUpdateThreading(ENABLE_AVATAR_UPDATE_THREADING);
|
||||
}
|
||||
void Application::setRawAvatarUpdateThreading(bool isThreaded) {
|
||||
if (_avatarUpdate) {
|
||||
|
@ -2539,19 +2576,13 @@ void Application::setAvatarUpdateThreading(bool isThreaded) {
|
|||
if (_avatarUpdate && (_avatarUpdate->isThreaded() == isThreaded)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
auto myAvatar = getMyAvatar();
|
||||
bool isRigEnabled = myAvatar->getEnableRigAnimations();
|
||||
bool isGraphEnabled = myAvatar->getEnableAnimGraph();
|
||||
if (_avatarUpdate) {
|
||||
_avatarUpdate->terminate(); // Must be before we shutdown anim graph.
|
||||
}
|
||||
myAvatar->setEnableRigAnimations(false);
|
||||
myAvatar->setEnableAnimGraph(false);
|
||||
_avatarUpdate = new AvatarUpdate();
|
||||
_avatarUpdate->initialize(isThreaded);
|
||||
myAvatar->setEnableRigAnimations(isRigEnabled);
|
||||
myAvatar->setEnableAnimGraph(isGraphEnabled);
|
||||
}
|
||||
|
||||
void Application::updateLOD() {
|
||||
|
@ -2756,7 +2787,7 @@ void Application::updateDialogs(float deltaTime) {
|
|||
if(audioStatsDialog) {
|
||||
audioStatsDialog->update();
|
||||
}
|
||||
|
||||
|
||||
// Update bandwidth dialog, if any
|
||||
BandwidthDialog* bandwidthDialog = dialogsManager->getBandwidthDialog();
|
||||
if (bandwidthDialog) {
|
||||
|
@ -2834,14 +2865,8 @@ void Application::update(float deltaTime) {
|
|||
myAvatar->setDriveKeys(TRANSLATE_Y, userInputMapper->getActionState(controller::Action::TRANSLATE_Y));
|
||||
myAvatar->setDriveKeys(TRANSLATE_X, userInputMapper->getActionState(controller::Action::TRANSLATE_X));
|
||||
if (deltaTime > FLT_EPSILON) {
|
||||
// For rotations what we really want are meausures of "angles per second" (in order to prevent
|
||||
// fps-dependent spin rates) so we need to scale the units of the controller contribution.
|
||||
// (TODO?: maybe we should similarly scale ALL action state info, or change the expected behavior
|
||||
// controllers to provide a delta_per_second value rather than a raw delta.)
|
||||
const float EXPECTED_FRAME_RATE = 60.0f;
|
||||
float timeFactor = EXPECTED_FRAME_RATE * deltaTime;
|
||||
myAvatar->setDriveKeys(PITCH, -1.0f * userInputMapper->getActionState(controller::Action::PITCH) / timeFactor);
|
||||
myAvatar->setDriveKeys(YAW, -1.0f * userInputMapper->getActionState(controller::Action::YAW) / timeFactor);
|
||||
myAvatar->setDriveKeys(PITCH, -1.0f * userInputMapper->getActionState(controller::Action::PITCH));
|
||||
myAvatar->setDriveKeys(YAW, -1.0f * userInputMapper->getActionState(controller::Action::YAW));
|
||||
myAvatar->setDriveKeys(STEP_YAW, -1.0f * userInputMapper->getActionState(controller::Action::STEP_YAW));
|
||||
}
|
||||
}
|
||||
|
@ -2898,7 +2923,7 @@ void Application::update(float deltaTime) {
|
|||
_entities.getTree()->withWriteLock([&] {
|
||||
_physicsEngine->stepSimulation();
|
||||
});
|
||||
|
||||
|
||||
if (_physicsEngine->hasOutgoingChanges()) {
|
||||
_entities.getTree()->withWriteLock([&] {
|
||||
_entitySimulation.handleOutgoingChanges(_physicsEngine->getOutgoingChanges(), _physicsEngine->getSessionID());
|
||||
|
@ -2940,11 +2965,6 @@ void Application::update(float deltaTime) {
|
|||
loadViewFrustum(_myCamera, _viewFrustum);
|
||||
}
|
||||
|
||||
// Update animation debug draw renderer
|
||||
{
|
||||
AnimDebugDraw::getInstance().update();
|
||||
}
|
||||
|
||||
quint64 now = usecTimestampNow();
|
||||
|
||||
// Update my voxel servers with my current voxel query...
|
||||
|
@ -3032,10 +3052,10 @@ int Application::sendNackPackets() {
|
|||
foreach(const OCTREE_PACKET_SEQUENCE& missingNumber, missingSequenceNumbers) {
|
||||
nackPacketList->writePrimitive(missingNumber);
|
||||
}
|
||||
|
||||
|
||||
if (nackPacketList->getNumPackets()) {
|
||||
packetsSent += nackPacketList->getNumPackets();
|
||||
|
||||
|
||||
// send the packet list
|
||||
nodeList->sendPacketList(std::move(nackPacketList), *node);
|
||||
}
|
||||
|
@ -3504,6 +3524,8 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
myAvatar->preRender(renderArgs);
|
||||
myAvatar->endRender();
|
||||
|
||||
// Update animation debug draw renderer
|
||||
AnimDebugDraw::getInstance().update();
|
||||
|
||||
activeRenderingThread = QThread::currentThread();
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
|
@ -3641,7 +3663,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
|
|||
float fov = MIRROR_FIELD_OF_VIEW;
|
||||
|
||||
auto myAvatar = getMyAvatar();
|
||||
|
||||
|
||||
// bool eyeRelativeCamera = false;
|
||||
if (billboard) {
|
||||
fov = BILLBOARD_FIELD_OF_VIEW; // degees
|
||||
|
@ -3849,7 +3871,7 @@ void Application::nodeKilled(SharedNodePointer node) {
|
|||
Menu::getInstance()->getActionForOption(MenuOption::UploadAsset)->setEnabled(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Application::trackIncomingOctreePacket(NLPacket& packet, SharedNodePointer sendingNode, bool wasStatsPacket) {
|
||||
|
||||
// Attempt to identify the sender from its address.
|
||||
|
@ -3874,7 +3896,7 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
|
|||
int statsMessageLength = 0;
|
||||
|
||||
const QUuid& nodeUUID = sendingNode->getUUID();
|
||||
|
||||
|
||||
// now that we know the node ID, let's add these stats to the stats for that node...
|
||||
_octreeServerSceneStats.withWriteLock([&] {
|
||||
OctreeSceneStats& octreeStats = _octreeServerSceneStats[nodeUUID];
|
||||
|
@ -4075,7 +4097,7 @@ bool Application::acceptURL(const QString& urlString, bool defaultUpload) {
|
|||
Qt::AutoConnection, Q_ARG(const QString&, urlString));
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
QUrl url(urlString);
|
||||
QHashIterator<QString, AcceptURLMethod> i(_acceptedExtensions);
|
||||
QString lowerPath = url.path().toLower();
|
||||
|
@ -4086,7 +4108,7 @@ bool Application::acceptURL(const QString& urlString, bool defaultUpload) {
|
|||
return (this->*method)(urlString);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return defaultUpload && askToUploadAsset(urlString);
|
||||
}
|
||||
|
||||
|
@ -4168,10 +4190,10 @@ bool Application::askToUploadAsset(const QString& filename) {
|
|||
QString("You don't have upload rights on that domain.\n\n"));
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
QUrl url { filename };
|
||||
if (auto upload = DependencyManager::get<AssetClient>()->createUpload(url.toLocalFile())) {
|
||||
|
||||
|
||||
QMessageBox messageBox;
|
||||
messageBox.setWindowTitle("Asset upload");
|
||||
messageBox.setText("You are about to upload the following file to the asset server:\n" +
|
||||
|
@ -4179,19 +4201,19 @@ bool Application::askToUploadAsset(const QString& filename) {
|
|||
messageBox.setInformativeText("Do you want to continue?");
|
||||
messageBox.setStandardButtons(QMessageBox::Ok | QMessageBox::Cancel);
|
||||
messageBox.setDefaultButton(QMessageBox::Ok);
|
||||
|
||||
|
||||
// Option to drop model in world for models
|
||||
if (filename.endsWith(FBX_EXTENSION) || filename.endsWith(OBJ_EXTENSION)) {
|
||||
auto checkBox = new QCheckBox(&messageBox);
|
||||
checkBox->setText("Add to scene");
|
||||
messageBox.setCheckBox(checkBox);
|
||||
}
|
||||
|
||||
|
||||
if (messageBox.exec() != QMessageBox::Ok) {
|
||||
upload->deleteLater();
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// connect to the finished signal so we know when the AssetUpload is done
|
||||
if (messageBox.checkBox() && (messageBox.checkBox()->checkState() == Qt::Checked)) {
|
||||
// Custom behavior for models
|
||||
|
@ -4201,12 +4223,12 @@ bool Application::askToUploadAsset(const QString& filename) {
|
|||
&AssetUploadDialogFactory::getInstance(),
|
||||
&AssetUploadDialogFactory::handleUploadFinished);
|
||||
}
|
||||
|
||||
|
||||
// start the upload now
|
||||
upload->start();
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// display a message box with the error
|
||||
QMessageBox::warning(_window, "Failed Upload", QString("Failed to upload %1.\n\n").arg(filename));
|
||||
return false;
|
||||
|
@ -4214,20 +4236,20 @@ bool Application::askToUploadAsset(const QString& filename) {
|
|||
|
||||
void Application::modelUploadFinished(AssetUpload* upload, const QString& hash) {
|
||||
auto filename = QFileInfo(upload->getFilename()).fileName();
|
||||
|
||||
|
||||
if ((upload->getError() == AssetUpload::NoError) &&
|
||||
(filename.endsWith(FBX_EXTENSION) || filename.endsWith(OBJ_EXTENSION))) {
|
||||
|
||||
|
||||
auto entities = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
||||
|
||||
EntityItemProperties properties;
|
||||
properties.setType(EntityTypes::Model);
|
||||
properties.setModelURL(QString("%1:%2.%3").arg(URL_SCHEME_ATP).arg(hash).arg(upload->getExtension()));
|
||||
properties.setPosition(_myCamera.getPosition() + _myCamera.getOrientation() * Vectors::FRONT * 2.0f);
|
||||
properties.setName(QUrl(upload->getFilename()).fileName());
|
||||
|
||||
|
||||
entities->addEntity(properties);
|
||||
|
||||
|
||||
upload->deleteLater();
|
||||
} else {
|
||||
AssetUploadDialogFactory::getInstance().handleUploadFinished(upload, hash);
|
||||
|
@ -4335,10 +4357,6 @@ void Application::stopAllScripts(bool restart) {
|
|||
it.value()->stop();
|
||||
qCDebug(interfaceapp) << "stopping script..." << it.key();
|
||||
}
|
||||
// HACK: ATM scripts cannot set/get their animation priorities, so we clear priorities
|
||||
// whenever a script stops in case it happened to have been setting joint rotations.
|
||||
// TODO: expose animation priorities and provide a layered animation control system.
|
||||
getMyAvatar()->clearJointAnimationPriorities();
|
||||
getMyAvatar()->clearScriptableSettings();
|
||||
}
|
||||
|
||||
|
@ -4354,10 +4372,6 @@ bool Application::stopScript(const QString& scriptHash, bool restart) {
|
|||
scriptEngine->stop();
|
||||
stoppedScript = true;
|
||||
qCDebug(interfaceapp) << "stopping script..." << scriptHash;
|
||||
// HACK: ATM scripts cannot set/get their animation priorities, so we clear priorities
|
||||
// whenever a script stops in case it happened to have been setting joint rotations.
|
||||
// TODO: expose animation priorities and provide a layered animation control system.
|
||||
getMyAvatar()->clearJointAnimationPriorities();
|
||||
}
|
||||
if (_scriptEnginesHash.empty()) {
|
||||
getMyAvatar()->clearScriptableSettings();
|
||||
|
@ -4516,7 +4530,7 @@ void Application::takeSnapshot() {
|
|||
_snapshotShareDialog = new SnapshotShareDialog(fileName, _glWidget);
|
||||
}
|
||||
_snapshotShareDialog->show();
|
||||
|
||||
|
||||
}
|
||||
|
||||
float Application::getRenderResolutionScale() const {
|
||||
|
@ -4761,8 +4775,8 @@ void Application::updateDisplayMode() {
|
|||
return;
|
||||
}
|
||||
|
||||
// Some plugins *cough* Oculus *cough* process message events from inside their
|
||||
// display function, and we don't want to change the display plugin underneath
|
||||
// Some plugins *cough* Oculus *cough* process message events from inside their
|
||||
// display function, and we don't want to change the display plugin underneath
|
||||
// the paintGL call, so we need to guard against that
|
||||
if (_inPaint) {
|
||||
qDebug() << "Deferring plugin switch until out of painting";
|
||||
|
@ -4796,14 +4810,14 @@ void Application::updateDisplayMode() {
|
|||
|
||||
oldDisplayPlugin = _displayPlugin;
|
||||
_displayPlugin = newDisplayPlugin;
|
||||
|
||||
|
||||
// If the displayPlugin is a screen based HMD, then it will want the HMDTools displayed
|
||||
// Direct Mode HMDs (like windows Oculus) will be isHmd() but will have a screen of -1
|
||||
bool newPluginWantsHMDTools = newDisplayPlugin ?
|
||||
(newDisplayPlugin->isHmd() && (newDisplayPlugin->getHmdScreen() >= 0)) : false;
|
||||
bool oldPluginWantedHMDTools = oldDisplayPlugin ?
|
||||
bool oldPluginWantedHMDTools = oldDisplayPlugin ?
|
||||
(oldDisplayPlugin->isHmd() && (oldDisplayPlugin->getHmdScreen() >= 0)) : false;
|
||||
|
||||
|
||||
// Only show the hmd tools after the correct plugin has
|
||||
// been activated so that it's UI is setup correctly
|
||||
if (newPluginWantsHMDTools) {
|
||||
|
@ -4813,7 +4827,7 @@ void Application::updateDisplayMode() {
|
|||
if (oldDisplayPlugin) {
|
||||
oldDisplayPlugin->deactivate();
|
||||
_offscreenContext->makeCurrent();
|
||||
|
||||
|
||||
// if the old plugin was HMD and the new plugin is not HMD, then hide our hmdtools
|
||||
if (oldPluginWantedHMDTools && !newPluginWantsHMDTools) {
|
||||
DependencyManager::get<DialogsManager>()->hmdTools(false);
|
||||
|
@ -4946,7 +4960,7 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de
|
|||
rawVelocity = glm::vec3(0.0f);
|
||||
}
|
||||
palm.setRawVelocity(rawVelocity); // meters/sec
|
||||
|
||||
|
||||
// Angular Velocity of Palm
|
||||
glm::quat deltaRotation = rotation * glm::inverse(palm.getRawRotation());
|
||||
glm::vec3 angularVelocity(0.0f);
|
||||
|
@ -5026,7 +5040,7 @@ void Application::emulateMouse(Hand* hand, float click, float shift, HandData::H
|
|||
pos.setY(canvasSize.y / 2.0f + cursorRange * yAngle);
|
||||
|
||||
}
|
||||
|
||||
|
||||
//If we are off screen then we should stop processing, and if a trigger or bumper is pressed,
|
||||
//we should unpress them.
|
||||
if (pos.x() == INT_MAX) {
|
||||
|
|
|
@ -159,6 +159,14 @@ public:
|
|||
bool isForeground() const { return _isForeground; }
|
||||
|
||||
float getFps() const { return _fps; }
|
||||
float const HMD_TARGET_FRAME_RATE = 75.0f;
|
||||
float const DESKTOP_TARGET_FRAME_RATE = 60.0f;
|
||||
float getTargetFrameRate() { return isHMDMode() ? HMD_TARGET_FRAME_RATE : DESKTOP_TARGET_FRAME_RATE; }
|
||||
float getTargetFramePeriod() { return isHMDMode() ? 1.0f / HMD_TARGET_FRAME_RATE : 1.0f / DESKTOP_TARGET_FRAME_RATE; } // same as 1/getTargetFrameRate, but w/compile-time division
|
||||
float getLastInstanteousFps() const { return _lastInstantaneousFps; }
|
||||
float getLastPaintWait() const { return _lastPaintWait; };
|
||||
float getLastDeducedNonVSyncFps() const { return _lastDeducedNonVSyncFps; }
|
||||
void setMarginForDeducedFramePeriod(float newValue) { _marginForDeducedFramePeriod = newValue; }
|
||||
|
||||
float getFieldOfView() { return _fieldOfView.get(); }
|
||||
void setFieldOfView(float fov);
|
||||
|
@ -429,6 +437,10 @@ private:
|
|||
float _fps;
|
||||
QElapsedTimer _timerStart;
|
||||
QElapsedTimer _lastTimeUpdated;
|
||||
float _lastInstantaneousFps { 0.0f };
|
||||
float _lastPaintWait { 0.0f };
|
||||
float _lastDeducedNonVSyncFps { 0.0f };
|
||||
float _marginForDeducedFramePeriod{ 0.002f }; // 2ms, adjustable
|
||||
|
||||
ShapeManager _shapeManager;
|
||||
PhysicalEntitySimulation _entitySimulation;
|
||||
|
|
|
@ -145,8 +145,6 @@ Menu::Menu() {
|
|||
|
||||
addActionToQMenuAndActionHash(editMenu, MenuOption::Attachments, 0,
|
||||
dialogsManager.data(), SLOT(editAttachments()));
|
||||
addActionToQMenuAndActionHash(editMenu, MenuOption::Animations, 0,
|
||||
dialogsManager.data(), SLOT(editAnimations()));
|
||||
|
||||
MenuWrapper* toolsMenu = addMenu("Tools");
|
||||
addActionToQMenuAndActionHash(toolsMenu, MenuOption::ScriptEditor, Qt::ALT | Qt::Key_S,
|
||||
|
@ -443,16 +441,12 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::FixGaze, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableAvatarUpdateThreading, 0, false,
|
||||
qApp, SLOT(setAvatarUpdateThreading(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableRigAnimations, 0, false,
|
||||
avatar, SLOT(setEnableRigAnimations(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableAnimGraph, 0, true,
|
||||
avatar, SLOT(setEnableAnimGraph(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawBindPose, 0, false,
|
||||
avatar, SLOT(setEnableDebugDrawBindPose(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawDefaultPose, 0, false,
|
||||
avatar, SLOT(setEnableDebugDrawDefaultPose(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawAnimPose, 0, false,
|
||||
avatar, SLOT(setEnableDebugDrawAnimPose(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawPosition, 0, false,
|
||||
avatar, SLOT(setEnableDebugDrawPosition(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::MeshVisible, 0, true,
|
||||
avatar, SLOT(setEnableMeshVisible(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::DisableEyelidAdjustment, 0, false);
|
||||
|
@ -464,7 +458,6 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::EnableHandMouseInput, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LowVelocityFilter, 0, true,
|
||||
qApp, SLOT(setLowVelocityFilter(bool)));
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::ShowIKConstraints, 0, false);
|
||||
|
||||
MenuWrapper* leapOptionsMenu = handOptionsMenu->addMenu("Leap Motion");
|
||||
addCheckableActionToQMenuAndActionHash(leapOptionsMenu, MenuOption::LeapMotionOnHMD, 0, false);
|
||||
|
@ -949,6 +942,7 @@ void Menu::addMenuItem(const MenuItemProperties& properties) {
|
|||
QShortcut* shortcut = NULL;
|
||||
if (!properties.shortcutKeySequence.isEmpty()) {
|
||||
shortcut = new QShortcut(properties.shortcutKeySequence, this);
|
||||
shortcut->setContext(Qt::WidgetWithChildrenShortcut);
|
||||
}
|
||||
|
||||
// check for positioning requests
|
||||
|
|
|
@ -131,7 +131,8 @@ namespace MenuOption {
|
|||
const QString AddressBar = "Show Address Bar";
|
||||
const QString Animations = "Animations...";
|
||||
const QString AnimDebugDrawAnimPose = "Debug Draw Animation";
|
||||
const QString AnimDebugDrawBindPose = "Debug Draw Bind Pose";
|
||||
const QString AnimDebugDrawDefaultPose = "Debug Draw Default Pose";
|
||||
const QString AnimDebugDrawPosition= "Debug Draw Position";
|
||||
const QString Antialiasing = "Antialiasing";
|
||||
const QString AssetMigration = "ATP Asset Migration";
|
||||
const QString Atmosphere = "Atmosphere";
|
||||
|
@ -188,10 +189,7 @@ namespace MenuOption {
|
|||
const QString EchoServerAudio = "Echo Server Audio";
|
||||
const QString EditEntitiesHelp = "Edit Entities Help...";
|
||||
const QString Enable3DTVMode = "Enable 3DTV Mode";
|
||||
const QString EnableAvatarUpdateThreading = "Enable Avatar Update Threading";
|
||||
const QString EnableAnimGraph = "Enable Anim Graph";
|
||||
const QString EnableCharacterController = "Enable avatar collisions";
|
||||
const QString EnableRigAnimations = "Enable Rig Animations";
|
||||
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
|
||||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
const QString ExpandOtherAvatarTiming = "Expand /otherAvatar";
|
||||
|
@ -271,7 +269,6 @@ namespace MenuOption {
|
|||
const QString ScriptedMotorControl = "Enable Scripted Motor Control";
|
||||
const QString ShowDSConnectTable = "Show Domain Connection Timing";
|
||||
const QString ShowBordersEntityNodes = "Show Entity Nodes";
|
||||
const QString ShowIKConstraints = "Show IK Constraints";
|
||||
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
|
||||
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode";
|
||||
|
|
|
@ -40,11 +40,10 @@
|
|||
#include "Menu.h"
|
||||
#include "ModelReferential.h"
|
||||
#include "Physics.h"
|
||||
#include "Recorder.h"
|
||||
#include "Util.h"
|
||||
#include "world.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "EntityRig.h"
|
||||
#include <Rig.h>
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
@ -184,9 +183,31 @@ void Avatar::simulate(float deltaTime) {
|
|||
if (_shouldRenderBillboard) {
|
||||
if (getLODDistance() < BILLBOARD_LOD_DISTANCE * (1.0f - BILLBOARD_HYSTERESIS_PROPORTION)) {
|
||||
_shouldRenderBillboard = false;
|
||||
qCDebug(interfaceapp) << "Unbillboarding" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for LOD" << getLODDistance();
|
||||
}
|
||||
} else if (getLODDistance() > BILLBOARD_LOD_DISTANCE * (1.0f + BILLBOARD_HYSTERESIS_PROPORTION)) {
|
||||
_shouldRenderBillboard = true;
|
||||
qCDebug(interfaceapp) << "Billboarding" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for LOD" << getLODDistance();
|
||||
}
|
||||
|
||||
const bool isControllerLogging = DependencyManager::get<AvatarManager>()->getRenderDistanceControllerIsLogging();
|
||||
float renderDistance = DependencyManager::get<AvatarManager>()->getRenderDistance();
|
||||
const float SKIP_HYSTERESIS_PROPORTION = isControllerLogging ? 0.0f : BILLBOARD_HYSTERESIS_PROPORTION;
|
||||
float distance = glm::distance(qApp->getCamera()->getPosition(), _position);
|
||||
if (_shouldSkipRender) {
|
||||
if (distance < renderDistance * (1.0f - SKIP_HYSTERESIS_PROPORTION)) {
|
||||
_shouldSkipRender = false;
|
||||
_skeletonModel.setVisibleInScene(true, qApp->getMain3DScene());
|
||||
if (!isControllerLogging) { // Test for isMyAvatar is prophylactic. Never occurs in current code.
|
||||
qCDebug(interfaceapp) << "Rerendering" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for distance" << renderDistance;
|
||||
}
|
||||
}
|
||||
} else if (distance > renderDistance * (1.0f + SKIP_HYSTERESIS_PROPORTION)) {
|
||||
_shouldSkipRender = true;
|
||||
_skeletonModel.setVisibleInScene(false, qApp->getMain3DScene());
|
||||
if (!isControllerLogging) {
|
||||
qCDebug(interfaceapp) << "Unrendering" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for distance" << renderDistance;
|
||||
}
|
||||
}
|
||||
|
||||
// simple frustum check
|
||||
|
@ -199,15 +220,10 @@ void Avatar::simulate(float deltaTime) {
|
|||
getHand()->simulate(deltaTime, false);
|
||||
}
|
||||
|
||||
if (!_shouldRenderBillboard && inViewFrustum) {
|
||||
if (!_shouldRenderBillboard && !_shouldSkipRender && inViewFrustum) {
|
||||
{
|
||||
PerformanceTimer perfTimer("skeleton");
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
const JointData& data = _jointData.at(i);
|
||||
_skeletonModel.setJointRotation(i, data.rotationSet, data.rotation, 1.0f);
|
||||
_skeletonModel.setJointTranslation(i, data.translationSet, data.translation, 1.0f);
|
||||
}
|
||||
|
||||
_skeletonModel.getRig()->copyJointsFromJointData(_jointData);
|
||||
_skeletonModel.simulate(deltaTime, _hasNewJointRotations || _hasNewJointTranslations);
|
||||
simulateAttachments(deltaTime);
|
||||
_hasNewJointRotations = false;
|
||||
|
@ -251,7 +267,7 @@ bool Avatar::isLookingAtMe(AvatarSharedPointer avatar) {
|
|||
const float HEAD_SPHERE_RADIUS = 0.1f;
|
||||
glm::vec3 theirLookAt = dynamic_pointer_cast<Avatar>(avatar)->getHead()->getLookAtPosition();
|
||||
glm::vec3 myEyePosition = getHead()->getEyePosition();
|
||||
|
||||
|
||||
return glm::distance(theirLookAt, myEyePosition) <= (HEAD_SPHERE_RADIUS * getScale());
|
||||
}
|
||||
|
||||
|
@ -501,8 +517,8 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
eyeDiameter = DEFAULT_EYE_DIAMETER;
|
||||
}
|
||||
|
||||
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphereInstance(batch,
|
||||
Transform(transform).postScale(eyeDiameter * _scale / 2.0f + RADIUS_INCREMENT),
|
||||
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphereInstance(batch,
|
||||
Transform(transform).postScale(eyeDiameter * _scale / 2.0f + RADIUS_INCREMENT),
|
||||
glm::vec4(LOOKING_AT_ME_COLOR, alpha));
|
||||
|
||||
position = getHead()->getRightEyePosition();
|
||||
|
@ -512,7 +528,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
eyeDiameter = DEFAULT_EYE_DIAMETER;
|
||||
}
|
||||
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphereInstance(batch,
|
||||
Transform(transform).postScale(eyeDiameter * _scale / 2.0f + RADIUS_INCREMENT),
|
||||
Transform(transform).postScale(eyeDiameter * _scale / 2.0f + RADIUS_INCREMENT),
|
||||
glm::vec4(LOOKING_AT_ME_COLOR, alpha));
|
||||
|
||||
}
|
||||
|
@ -560,7 +576,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
if (!isMyAvatar() || cameraMode != CAMERA_MODE_FIRST_PERSON) {
|
||||
auto& frustum = *renderArgs->_viewFrustum;
|
||||
auto textPosition = getDisplayNamePosition();
|
||||
|
||||
|
||||
if (frustum.pointInFrustum(textPosition, true) == ViewFrustum::INSIDE) {
|
||||
renderDisplayName(batch, frustum, textPosition);
|
||||
}
|
||||
|
@ -616,7 +632,7 @@ void Avatar::fixupModelsInScene() {
|
|||
void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, float glowLevel) {
|
||||
|
||||
fixupModelsInScene();
|
||||
|
||||
|
||||
{
|
||||
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
|
||||
// render the billboard until both models are loaded
|
||||
|
@ -645,7 +661,7 @@ void Avatar::simulateAttachments(float deltaTime) {
|
|||
glm::vec3 jointPosition;
|
||||
glm::quat jointRotation;
|
||||
if (_skeletonModel.getJointPositionInWorldFrame(jointIndex, jointPosition) &&
|
||||
_skeletonModel.getJointCombinedRotation(jointIndex, jointRotation)) {
|
||||
_skeletonModel.getJointRotationInWorldFrame(jointIndex, jointRotation)) {
|
||||
model->setTranslation(jointPosition + jointRotation * attachment.translation * _scale);
|
||||
model->setRotation(jointRotation * attachment.rotation);
|
||||
model->setScaleToFit(true, _scale * attachment.scale, true); // hack to force rescale
|
||||
|
@ -680,7 +696,7 @@ void Avatar::renderBillboard(RenderArgs* renderArgs) {
|
|||
glm::quat rotation = getOrientation();
|
||||
glm::vec3 cameraVector = glm::inverse(rotation) * (qApp->getCamera()->getPosition() - _position);
|
||||
rotation = rotation * glm::angleAxis(atan2f(-cameraVector.x, -cameraVector.z), glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
|
||||
|
||||
// compute the size from the billboard camera parameters and scale
|
||||
float size = getBillboardSize();
|
||||
|
||||
|
@ -693,7 +709,7 @@ void Avatar::renderBillboard(RenderArgs* renderArgs) {
|
|||
glm::vec2 bottomRight(1.0f, 1.0f);
|
||||
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
|
||||
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
|
||||
|
||||
|
||||
gpu::Batch& batch = *renderArgs->_batch;
|
||||
PROFILE_RANGE_BATCH(batch, __FUNCTION__);
|
||||
batch.setResourceTexture(0, _billboardTexture->getGPUTexture());
|
||||
|
@ -726,29 +742,29 @@ glm::vec3 Avatar::getDisplayNamePosition() const {
|
|||
glm::vec3 namePosition(0.0f);
|
||||
glm::vec3 bodyUpDirection = getBodyUpDirection();
|
||||
DEBUG_VALUE("bodyUpDirection =", bodyUpDirection);
|
||||
|
||||
|
||||
if (getSkeletonModel().getNeckPosition(namePosition)) {
|
||||
float headHeight = getHeadHeight();
|
||||
DEBUG_VALUE("namePosition =", namePosition);
|
||||
DEBUG_VALUE("headHeight =", headHeight);
|
||||
|
||||
|
||||
static const float SLIGHTLY_ABOVE = 1.1f;
|
||||
namePosition += bodyUpDirection * headHeight * SLIGHTLY_ABOVE;
|
||||
} else {
|
||||
const float HEAD_PROPORTION = 0.75f;
|
||||
float billboardSize = getBillboardSize();
|
||||
|
||||
|
||||
DEBUG_VALUE("_position =", _position);
|
||||
DEBUG_VALUE("billboardSize =", billboardSize);
|
||||
namePosition = _position + bodyUpDirection * (billboardSize * HEAD_PROPORTION);
|
||||
}
|
||||
|
||||
|
||||
if (glm::any(glm::isnan(namePosition)) || glm::any(glm::isinf(namePosition))) {
|
||||
qCWarning(interfaceapp) << "Invalid display name position" << namePosition
|
||||
<< ", setting is to (0.0f, 0.5f, 0.0f)";
|
||||
namePosition = glm::vec3(0.0f, 0.5f, 0.0f);
|
||||
}
|
||||
|
||||
|
||||
return namePosition;
|
||||
}
|
||||
|
||||
|
@ -756,16 +772,16 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, cons
|
|||
Q_ASSERT_X(frustum.pointInFrustum(textPosition, true) == ViewFrustum::INSIDE,
|
||||
"Avatar::calculateDisplayNameTransform", "Text not in viewfrustum.");
|
||||
glm::vec3 toFrustum = frustum.getPosition() - textPosition;
|
||||
|
||||
|
||||
// Compute orientation
|
||||
// If x and z are 0, atan(x, z) adais undefined, so default to 0 degrees
|
||||
const float yawRotation = (toFrustum.x == 0.0f && toFrustum.z == 0.0f) ? 0.0f : glm::atan(toFrustum.x, toFrustum.z);
|
||||
glm::quat orientation = glm::quat(glm::vec3(0.0f, yawRotation, 0.0f));
|
||||
|
||||
|
||||
// Compute correct scale to apply
|
||||
static const float DESIRED_HEIGHT_RAD = glm::radians(1.5f);
|
||||
float scale = glm::length(toFrustum) * glm::tan(DESIRED_HEIGHT_RAD);
|
||||
|
||||
|
||||
// Set transform
|
||||
Transform result;
|
||||
result.setTranslation(textPosition);
|
||||
|
@ -773,7 +789,7 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, cons
|
|||
result.setScale(scale);
|
||||
// raise by half the scale up so that textPosition be the bottom
|
||||
result.postTranslate(Vectors::UP / 2.0f);
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -801,14 +817,14 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, co
|
|||
}
|
||||
renderedDisplayName += statsFormat.arg(QString::number(kilobitsPerSecond, 'f', 2)).arg(getReceiveRate());
|
||||
}
|
||||
|
||||
|
||||
// Compute display name extent/position offset
|
||||
const glm::vec2 extent = renderer->computeExtent(renderedDisplayName);
|
||||
if (!glm::any(glm::isCompNull(extent, EPSILON))) {
|
||||
const QRect nameDynamicRect = QRect(0, 0, (int)extent.x, (int)extent.y);
|
||||
const int text_x = -nameDynamicRect.width() / 2;
|
||||
const int text_y = -nameDynamicRect.height() / 2;
|
||||
|
||||
|
||||
// Compute background position/size
|
||||
static const float SLIGHTLY_IN_FRONT = 0.1f;
|
||||
static const float BORDER_RELATIVE_SIZE = 0.1f;
|
||||
|
@ -819,12 +835,12 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, co
|
|||
const int width = nameDynamicRect.width() + 2.0f * border;
|
||||
const int height = nameDynamicRect.height() + 2.0f * border;
|
||||
const int bevelDistance = BEVEL_FACTOR * height;
|
||||
|
||||
|
||||
// Display name and background colors
|
||||
glm::vec4 textColor(0.93f, 0.93f, 0.93f, _displayNameAlpha);
|
||||
glm::vec4 backgroundColor(0.2f, 0.2f, 0.2f,
|
||||
(_displayNameAlpha / DISPLAYNAME_ALPHA) * DISPLAYNAME_BACKGROUND_ALPHA);
|
||||
|
||||
|
||||
// Compute display name transform
|
||||
auto textTransform = calculateDisplayNameTransform(frustum, textPosition);
|
||||
// Test on extent above insures abs(height) > 0.0f
|
||||
|
@ -840,7 +856,7 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& frustum, co
|
|||
|
||||
// Render actual name
|
||||
QByteArray nameUTF8 = renderedDisplayName.toLocal8Bit();
|
||||
|
||||
|
||||
// Render text slightly in front to avoid z-fighting
|
||||
textTransform.postTranslate(glm::vec3(0.0f, 0.0f, SLIGHTLY_IN_FRONT * renderer->getFontSize()));
|
||||
batch.setModelTransform(textTransform);
|
||||
|
@ -942,52 +958,6 @@ glm::vec3 Avatar::getJointPosition(const QString& name) const {
|
|||
return position;
|
||||
}
|
||||
|
||||
glm::quat Avatar::getJointCombinedRotation(int index) const {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
glm::quat rotation;
|
||||
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "getJointCombinedRotation", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(glm::quat, rotation), Q_ARG(const int, index));
|
||||
return rotation;
|
||||
}
|
||||
glm::quat rotation;
|
||||
_skeletonModel.getJointCombinedRotation(index, rotation);
|
||||
return rotation;
|
||||
}
|
||||
|
||||
glm::quat Avatar::getJointCombinedRotation(const QString& name) const {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
glm::quat rotation;
|
||||
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "getJointCombinedRotation", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(glm::quat, rotation), Q_ARG(const QString&, name));
|
||||
return rotation;
|
||||
}
|
||||
glm::quat rotation;
|
||||
_skeletonModel.getJointCombinedRotation(getJointIndex(name), rotation);
|
||||
return rotation;
|
||||
}
|
||||
|
||||
const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f;
|
||||
|
||||
void Avatar::setJointModelPositionAndOrientation(int index, glm::vec3 position, const glm::quat& rotation) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "setJointModelPositionAndOrientation",
|
||||
Qt::AutoConnection, Q_ARG(const int, index), Q_ARG(const glm::vec3, position),
|
||||
Q_ARG(const glm::quat&, rotation));
|
||||
} else {
|
||||
_skeletonModel.inverseKinematics(index, position, rotation, SCRIPT_PRIORITY);
|
||||
}
|
||||
}
|
||||
|
||||
void Avatar::setJointModelPositionAndOrientation(const QString& name, glm::vec3 position, const glm::quat& rotation) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "setJointModelPositionAndOrientation",
|
||||
Qt::AutoConnection, Q_ARG(const QString&, name), Q_ARG(const glm::vec3, position),
|
||||
Q_ARG(const glm::quat&, rotation));
|
||||
} else {
|
||||
_skeletonModel.inverseKinematics(getJointIndex(name), position, rotation, SCRIPT_PRIORITY);
|
||||
}
|
||||
}
|
||||
|
||||
void Avatar::scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const {
|
||||
//Scale a world space vector as if it was relative to the position
|
||||
positionToScale = _position + _scale * (positionToScale - _position);
|
||||
|
@ -1016,7 +986,7 @@ void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
|
|||
if (_unusedAttachments.size() > 0) {
|
||||
model = _unusedAttachments.takeFirst();
|
||||
} else {
|
||||
model = new Model(std::make_shared<EntityRig>(), this);
|
||||
model = new Model(std::make_shared<Rig>(), this);
|
||||
}
|
||||
model->init();
|
||||
_attachmentModels.append(model);
|
||||
|
@ -1187,7 +1157,7 @@ void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
|
||||
// virtual
|
||||
void Avatar::rebuildSkeletonBody() {
|
||||
DependencyManager::get<AvatarManager>()->updateAvatarPhysicsShape(getSessionUUID());
|
||||
DependencyManager::get<AvatarManager>()->updateAvatarPhysicsShape(this);
|
||||
}
|
||||
|
||||
glm::vec3 Avatar::getLeftPalmPosition() {
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
#include "Head.h"
|
||||
#include "SkeletonModel.h"
|
||||
#include "world.h"
|
||||
#include "Rig.h"
|
||||
|
||||
namespace render {
|
||||
template <> const ItemKey payloadGetKey(const AvatarSharedPointer& avatar);
|
||||
|
@ -65,7 +66,7 @@ public:
|
|||
|
||||
typedef render::Payload<AvatarData> Payload;
|
||||
typedef std::shared_ptr<render::Item::PayloadInterface> PayloadPointer;
|
||||
|
||||
|
||||
void init();
|
||||
void simulate(float deltaTime);
|
||||
|
||||
|
@ -100,20 +101,20 @@ public:
|
|||
float getLODDistance() const;
|
||||
|
||||
virtual bool isMyAvatar() const { return false; }
|
||||
|
||||
|
||||
virtual QVector<glm::quat> getJointRotations() const;
|
||||
virtual glm::quat getJointRotation(int index) const;
|
||||
virtual glm::vec3 getJointTranslation(int index) const;
|
||||
virtual int getJointIndex(const QString& name) const;
|
||||
virtual QStringList getJointNames() const;
|
||||
|
||||
|
||||
virtual void setFaceModelURL(const QUrl& faceModelURL);
|
||||
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
|
||||
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData);
|
||||
virtual void setBillboard(const QByteArray& billboard);
|
||||
|
||||
void setShowDisplayName(bool showDisplayName);
|
||||
|
||||
|
||||
virtual int parseDataFromBuffer(const QByteArray& buffer);
|
||||
|
||||
static void renderJointConnectingCone( gpu::Batch& batch, glm::vec3 position1, glm::vec3 position2,
|
||||
|
@ -124,22 +125,17 @@ public:
|
|||
Q_INVOKABLE void setSkeletonOffset(const glm::vec3& offset);
|
||||
Q_INVOKABLE glm::vec3 getSkeletonOffset() { return _skeletonOffset; }
|
||||
virtual glm::vec3 getSkeletonPosition() const;
|
||||
|
||||
|
||||
Q_INVOKABLE glm::vec3 getJointPosition(int index) const;
|
||||
Q_INVOKABLE glm::vec3 getJointPosition(const QString& name) const;
|
||||
Q_INVOKABLE glm::quat getJointCombinedRotation(int index) const;
|
||||
Q_INVOKABLE glm::quat getJointCombinedRotation(const QString& name) const;
|
||||
|
||||
Q_INVOKABLE void setJointModelPositionAndOrientation(int index, const glm::vec3 position, const glm::quat& rotation);
|
||||
Q_INVOKABLE void setJointModelPositionAndOrientation(const QString& name, const glm::vec3 position,
|
||||
const glm::quat& rotation);
|
||||
|
||||
Q_INVOKABLE glm::vec3 getNeckPosition() const;
|
||||
|
||||
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }
|
||||
Q_INVOKABLE glm::vec3 getAngularVelocity() const { return _angularVelocity; }
|
||||
Q_INVOKABLE glm::vec3 getAngularAcceleration() const { return _angularAcceleration; }
|
||||
|
||||
Q_INVOKABLE bool getShouldRender() const { return !_shouldSkipRender; }
|
||||
|
||||
/// Scales a world space position vector relative to the avatar position and scale
|
||||
/// \param vector position to be scaled. Will store the result
|
||||
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
|
||||
|
@ -173,7 +169,7 @@ protected:
|
|||
QVector<Model*> _attachmentModels;
|
||||
QVector<Model*> _attachmentsToRemove;
|
||||
QVector<Model*> _unusedAttachments;
|
||||
float _bodyYawDelta;
|
||||
float _bodyYawDelta; // degrees/sec
|
||||
|
||||
// These position histories and derivatives are in the world-frame.
|
||||
// The derivatives are the MEASURED results of all external and internal forces
|
||||
|
@ -194,9 +190,9 @@ protected:
|
|||
glm::vec3 _worldUpDirection;
|
||||
float _stringLength;
|
||||
bool _moving; ///< set when position is changing
|
||||
|
||||
|
||||
bool isLookingAtMe(AvatarSharedPointer avatar);
|
||||
|
||||
|
||||
// protected methods...
|
||||
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
glm::vec3 getBodyUpDirection() const { return getOrientation() * IDENTITY_UP; }
|
||||
|
@ -226,6 +222,7 @@ private:
|
|||
bool _initialized;
|
||||
NetworkTexturePointer _billboardTexture;
|
||||
bool _shouldRenderBillboard;
|
||||
bool _shouldSkipRender { false };
|
||||
bool _isLookAtTarget;
|
||||
|
||||
void renderBillboard(RenderArgs* renderArgs);
|
||||
|
|
|
@ -327,4 +327,6 @@ void AvatarActionHold::deserialize(QByteArray serializedArguments) {
|
|||
|
||||
_active = true;
|
||||
});
|
||||
|
||||
forceBodyNonStatic();
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
#include "Menu.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "SceneScriptingInterface.h"
|
||||
#include "AvatarRig.h"
|
||||
#include <Rig.h>
|
||||
|
||||
// 70 times per second - target is 60hz, but this helps account for any small deviations
|
||||
// in the update loop
|
||||
|
@ -66,7 +66,7 @@ AvatarManager::AvatarManager(QObject* parent) :
|
|||
{
|
||||
// register a meta type for the weak pointer we'll use for the owning avatar mixer for each avatar
|
||||
qRegisterMetaType<QWeakPointer<Node> >("NodeWeakPointer");
|
||||
_myAvatar = std::make_shared<MyAvatar>(std::make_shared<AvatarRig>());
|
||||
_myAvatar = std::make_shared<MyAvatar>(std::make_shared<Rig>());
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerListener(PacketType::BulkAvatarData, this, "processAvatarDataPacket");
|
||||
|
@ -90,6 +90,21 @@ void AvatarManager::init() {
|
|||
_myAvatar->addToScene(_myAvatar, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
|
||||
const float target_fps = qApp->getTargetFrameRate();
|
||||
_renderDistanceController.setMeasuredValueSetpoint(target_fps);
|
||||
const float SMALLEST_REASONABLE_HORIZON = 5.0f; // meters
|
||||
_renderDistanceController.setControlledValueHighLimit(1.0f / SMALLEST_REASONABLE_HORIZON);
|
||||
_renderDistanceController.setControlledValueLowLimit(1.0f / (float) TREE_SCALE);
|
||||
// Advice for tuning parameters:
|
||||
// See PIDController.h. There's a section on tuning in the reference.
|
||||
// Turn on logging with the following (or from js with AvatarList.setRenderDistanceControllerHistory("avatar render", 300))
|
||||
//_renderDistanceController.setHistorySize("avatar render", target_fps * 4);
|
||||
// Note that extra logging/hysteresis is turned off in Avatar.cpp when the above logging is on.
|
||||
_renderDistanceController.setKP(0.0008f); // Usually about 0.6 of largest that doesn't oscillate when other parameters 0.
|
||||
_renderDistanceController.setKI(0.0006f); // Big enough to bring us to target with the above KP.
|
||||
_renderDistanceController.setKD(0.000001f); // A touch of kd increases the speed by which we get there.
|
||||
|
||||
}
|
||||
|
||||
void AvatarManager::updateMyAvatar(float deltaTime) {
|
||||
|
@ -110,35 +125,56 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
|
|||
}
|
||||
|
||||
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||
// lock the hash for read to check the size
|
||||
QReadLocker lock(&_hashLock);
|
||||
|
||||
if (_avatarHash.size() < 2 && _avatarFades.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
lock.unlock();
|
||||
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::updateAvatars()");
|
||||
|
||||
PerformanceTimer perfTimer("otherAvatars");
|
||||
|
||||
_renderDistanceController.setMeasuredValueSetpoint(qApp->getTargetFrameRate()); // No problem updating in flight.
|
||||
// The PID controller raises the controlled value when the measured value goes up.
|
||||
// The measured value is frame rate. When the controlled value (1 / render cutoff distance)
|
||||
// goes up, the render cutoff distance gets closer, the number of rendered avatars is less, and frame rate
|
||||
// goes up.
|
||||
const float deduced = qApp->getLastDeducedNonVSyncFps();
|
||||
const float distance = 1.0f / _renderDistanceController.update(deduced, deltaTime);
|
||||
_renderDistanceAverage.updateAverage(distance);
|
||||
_renderDistance = _renderDistanceAverage.getAverage();
|
||||
int renderableCount = 0;
|
||||
|
||||
// simulate avatars
|
||||
AvatarHash::iterator avatarIterator = _avatarHash.begin();
|
||||
while (avatarIterator != _avatarHash.end()) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
|
||||
auto hashCopy = getHashCopy();
|
||||
|
||||
AvatarHash::iterator avatarIterator = hashCopy.begin();
|
||||
while (avatarIterator != hashCopy.end()) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
|
||||
|
||||
if (avatar == _myAvatar || !avatar->isInitialized()) {
|
||||
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
|
||||
// DO NOT update or fade out uninitialized Avatars
|
||||
++avatarIterator;
|
||||
} else if (avatar->shouldDie()) {
|
||||
removeAvatarMotionState(avatar);
|
||||
_avatarFades.push_back(avatarIterator.value());
|
||||
QWriteLocker locker(&_hashLock);
|
||||
avatarIterator = _avatarHash.erase(avatarIterator);
|
||||
removeAvatar(avatarIterator.key());
|
||||
++avatarIterator;
|
||||
} else {
|
||||
avatar->startUpdate();
|
||||
avatar->simulate(deltaTime);
|
||||
if (avatar->getShouldRender()) {
|
||||
renderableCount++;
|
||||
}
|
||||
avatar->endUpdate();
|
||||
++avatarIterator;
|
||||
}
|
||||
}
|
||||
_renderedAvatarCount = renderableCount;
|
||||
|
||||
// simulate avatar fades
|
||||
simulateAvatarFades(deltaTime);
|
||||
|
@ -148,7 +184,7 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
QVector<AvatarSharedPointer>::iterator fadingIterator = _avatarFades.begin();
|
||||
|
||||
const float SHRINK_RATE = 0.9f;
|
||||
const float MIN_FADE_SCALE = 0.001f;
|
||||
const float MIN_FADE_SCALE = MIN_AVATAR_SCALE;
|
||||
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
|
@ -156,7 +192,7 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
auto avatar = std::static_pointer_cast<Avatar>(*fadingIterator);
|
||||
avatar->startUpdate();
|
||||
avatar->setTargetScale(avatar->getScale() * SHRINK_RATE, true);
|
||||
if (avatar->getTargetScale() < MIN_FADE_SCALE) {
|
||||
if (avatar->getTargetScale() <= MIN_FADE_SCALE) {
|
||||
avatar->removeFromScene(*fadingIterator, scene, pendingChanges);
|
||||
fadingIterator = _avatarFades.erase(fadingIterator);
|
||||
} else {
|
||||
|
@ -169,19 +205,21 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
}
|
||||
|
||||
AvatarSharedPointer AvatarManager::newSharedAvatar() {
|
||||
return AvatarSharedPointer(std::make_shared<Avatar>(std::make_shared<AvatarRig>()));
|
||||
return std::make_shared<Avatar>(std::make_shared<Rig>());
|
||||
}
|
||||
|
||||
// virtual
|
||||
AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer));
|
||||
auto newAvatar = AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer);
|
||||
auto rawRenderableAvatar = std::static_pointer_cast<Avatar>(newAvatar);
|
||||
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
avatar->addToScene(avatar, scene, pendingChanges);
|
||||
rawRenderableAvatar->addToScene(rawRenderableAvatar, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
return avatar;
|
||||
|
||||
return newAvatar;
|
||||
}
|
||||
|
||||
// protected
|
||||
|
@ -200,20 +238,25 @@ void AvatarManager::removeAvatarMotionState(AvatarSharedPointer avatar) {
|
|||
|
||||
// virtual
|
||||
void AvatarManager::removeAvatar(const QUuid& sessionUUID) {
|
||||
AvatarHash::iterator avatarIterator = _avatarHash.find(sessionUUID);
|
||||
if (avatarIterator != _avatarHash.end()) {
|
||||
std::shared_ptr<Avatar> avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
|
||||
if (avatar != _myAvatar && avatar->isInitialized()) {
|
||||
removeAvatarMotionState(avatar);
|
||||
_avatarFades.push_back(avatarIterator.value());
|
||||
QWriteLocker locker(&_hashLock);
|
||||
_avatarHash.erase(avatarIterator);
|
||||
}
|
||||
QWriteLocker locker(&_hashLock);
|
||||
|
||||
auto removedAvatar = _avatarHash.take(sessionUUID);
|
||||
if (removedAvatar) {
|
||||
handleRemovedAvatar(removedAvatar);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar) {
|
||||
AvatarHashMap::handleRemovedAvatar(removedAvatar);
|
||||
|
||||
removeAvatarMotionState(removedAvatar);
|
||||
_avatarFades.push_back(removedAvatar);
|
||||
}
|
||||
|
||||
void AvatarManager::clearOtherAvatars() {
|
||||
// clear any avatars that came from an avatar-mixer
|
||||
QWriteLocker locker(&_hashLock);
|
||||
|
||||
AvatarHash::iterator avatarIterator = _avatarHash.begin();
|
||||
while (avatarIterator != _avatarHash.end()) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
|
||||
|
@ -221,10 +264,10 @@ void AvatarManager::clearOtherAvatars() {
|
|||
// don't remove myAvatar or uninitialized avatars from the list
|
||||
++avatarIterator;
|
||||
} else {
|
||||
removeAvatarMotionState(avatar);
|
||||
_avatarFades.push_back(avatarIterator.value());
|
||||
QWriteLocker locker(&_hashLock);
|
||||
auto removedAvatar = avatarIterator.value();
|
||||
avatarIterator = _avatarHash.erase(avatarIterator);
|
||||
|
||||
handleRemovedAvatar(removedAvatar);
|
||||
}
|
||||
}
|
||||
_myAvatar->clearLookAtTargetAvatar();
|
||||
|
@ -252,6 +295,7 @@ QVector<QUuid> AvatarManager::getAvatarIdentifiers() {
|
|||
QReadLocker locker(&_hashLock);
|
||||
return _avatarHash.keys().toVector();
|
||||
}
|
||||
|
||||
AvatarData* AvatarManager::getAvatar(QUuid avatarID) {
|
||||
QReadLocker locker(&_hashLock);
|
||||
return _avatarHash[avatarID].get(); // Non-obvious: A bogus avatarID answers your own avatar.
|
||||
|
@ -312,28 +356,25 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
|
|||
|
||||
AudioInjector::playSound(collisionSoundURL, energyFactorOfFull, AVATAR_STRETCH_FACTOR, myAvatar->getPosition());
|
||||
myAvatar->collisionWithEntity(collision);
|
||||
return; }
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
|
||||
AvatarHash::iterator avatarItr = _avatarHash.find(id);
|
||||
if (avatarItr != _avatarHash.end()) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarItr.value());
|
||||
AvatarMotionState* motionState = avatar->getMotionState();
|
||||
if (motionState) {
|
||||
motionState->addDirtyFlags(Simulation::DIRTY_SHAPE);
|
||||
} else {
|
||||
ShapeInfo shapeInfo;
|
||||
avatar->computeShapeInfo(shapeInfo);
|
||||
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
|
||||
if (shape) {
|
||||
AvatarMotionState* motionState = new AvatarMotionState(avatar.get(), shape);
|
||||
avatar->setMotionState(motionState);
|
||||
_motionStatesToAdd.insert(motionState);
|
||||
_avatarMotionStates.insert(motionState);
|
||||
}
|
||||
void AvatarManager::updateAvatarPhysicsShape(Avatar* avatar) {
|
||||
AvatarMotionState* motionState = avatar->getMotionState();
|
||||
if (motionState) {
|
||||
motionState->addDirtyFlags(Simulation::DIRTY_SHAPE);
|
||||
} else {
|
||||
ShapeInfo shapeInfo;
|
||||
avatar->computeShapeInfo(shapeInfo);
|
||||
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
|
||||
if (shape) {
|
||||
AvatarMotionState* motionState = new AvatarMotionState(avatar, shape);
|
||||
avatar->setMotionState(motionState);
|
||||
_motionStatesToAdd.insert(motionState);
|
||||
_avatarMotionStates.insert(motionState);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -341,7 +382,7 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
|
|||
void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
for (auto avatarData : _avatarHash) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarData);
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
avatar->addToScene(avatar, scene, pendingChanges);
|
||||
|
@ -349,7 +390,7 @@ void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
|
|||
}
|
||||
} else {
|
||||
for (auto avatarData : _avatarHash) {
|
||||
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarData);
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarData);
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
avatar->removeFromScene(avatar, scene, pendingChanges);
|
||||
|
@ -363,11 +404,6 @@ AvatarSharedPointer AvatarManager::getAvatarBySessionID(const QUuid& sessionID)
|
|||
if (sessionID == _myAvatar->getSessionUUID()) {
|
||||
return std::static_pointer_cast<Avatar>(_myAvatar);
|
||||
}
|
||||
QReadLocker locker(&_hashLock);
|
||||
auto iter = _avatarHash.find(sessionID);
|
||||
if (iter != _avatarHash.end()) {
|
||||
return iter.value();
|
||||
} else {
|
||||
return AvatarSharedPointer();
|
||||
}
|
||||
|
||||
return findAvatar(sessionID);
|
||||
}
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
|
||||
#include <AvatarHashMap.h>
|
||||
#include <PhysicsEngine.h>
|
||||
#include <PIDController.h>
|
||||
#include <SimpleMovingAverage.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "AvatarMotionState.h"
|
||||
|
@ -43,6 +45,7 @@ public:
|
|||
void clearOtherAvatars();
|
||||
|
||||
bool shouldShowReceiveStats() const { return _shouldShowReceiveStats; }
|
||||
PIDController& getRenderDistanceController() { return _renderDistanceController; }
|
||||
|
||||
class LocalLight {
|
||||
public:
|
||||
|
@ -63,7 +66,18 @@ public:
|
|||
void handleOutgoingChanges(const VectorOfMotionStates& motionStates);
|
||||
void handleCollisionEvents(const CollisionEvents& collisionEvents);
|
||||
|
||||
void updateAvatarPhysicsShape(const QUuid& id);
|
||||
void updateAvatarPhysicsShape(Avatar* avatar);
|
||||
|
||||
// Expose results and parameter-tuning operations to other systems, such as stats and javascript.
|
||||
Q_INVOKABLE float getRenderDistance() { return _renderDistance; }
|
||||
Q_INVOKABLE int getNumberInRenderRange() { return _renderedAvatarCount; }
|
||||
Q_INVOKABLE bool getRenderDistanceControllerIsLogging() { return _renderDistanceController.getIsLogging(); }
|
||||
Q_INVOKABLE void setRenderDistanceControllerHistory(QString label, int size) { return _renderDistanceController.setHistorySize(label, size); }
|
||||
Q_INVOKABLE void setRenderDistanceKP(float newValue) { _renderDistanceController.setKP(newValue); }
|
||||
Q_INVOKABLE void setRenderDistanceKI(float newValue) { _renderDistanceController.setKI(newValue); }
|
||||
Q_INVOKABLE void setRenderDistanceKD(float newValue) { _renderDistanceController.setKD(newValue); }
|
||||
Q_INVOKABLE void setRenderDistanceLowLimit(float newValue) { _renderDistanceController.setControlledValueLowLimit(newValue); }
|
||||
Q_INVOKABLE void setRenderDistanceHighLimit(float newValue) { _renderDistanceController.setControlledValueHighLimit(newValue); }
|
||||
|
||||
public slots:
|
||||
void setShouldShowReceiveStats(bool shouldShowReceiveStats) { _shouldShowReceiveStats = shouldShowReceiveStats; }
|
||||
|
@ -79,7 +93,9 @@ private:
|
|||
virtual AvatarSharedPointer newSharedAvatar();
|
||||
virtual AvatarSharedPointer addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer);
|
||||
void removeAvatarMotionState(AvatarSharedPointer avatar);
|
||||
|
||||
virtual void removeAvatar(const QUuid& sessionUUID);
|
||||
virtual void handleRemovedAvatar(const AvatarSharedPointer& removedAvatar);
|
||||
|
||||
QVector<AvatarSharedPointer> _avatarFades;
|
||||
std::shared_ptr<MyAvatar> _myAvatar;
|
||||
|
@ -88,6 +104,10 @@ private:
|
|||
QVector<AvatarManager::LocalLight> _localLights;
|
||||
|
||||
bool _shouldShowReceiveStats = false;
|
||||
float _renderDistance { (float) TREE_SCALE };
|
||||
int _renderedAvatarCount { 0 };
|
||||
PIDController _renderDistanceController { };
|
||||
SimpleMovingAverage _renderDistanceAverage { 10 };
|
||||
|
||||
SetOfAvatarMotionStates _avatarMotionStates;
|
||||
SetOfMotionStates _motionStatesToAdd;
|
||||
|
|
|
@ -44,9 +44,12 @@ void AvatarUpdate::synchronousProcess() {
|
|||
bool AvatarUpdate::process() {
|
||||
PerformanceTimer perfTimer("AvatarUpdate");
|
||||
quint64 start = usecTimestampNow();
|
||||
quint64 deltaMicroseconds = start - _lastAvatarUpdate;
|
||||
_lastAvatarUpdate = start;
|
||||
quint64 deltaMicroseconds = 0;
|
||||
if (_lastAvatarUpdate > 0) {
|
||||
deltaMicroseconds = start - _lastAvatarUpdate;
|
||||
}
|
||||
float deltaSeconds = (float) deltaMicroseconds / (float) USECS_PER_SECOND;
|
||||
_lastAvatarUpdate = start;
|
||||
qApp->setAvatarSimrateSample(1.0f / deltaSeconds);
|
||||
|
||||
QSharedPointer<AvatarManager> manager = DependencyManager::get<AvatarManager>();
|
||||
|
|
|
@ -32,10 +32,7 @@ void FaceModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
neckPosition = owningAvatar->getPosition();
|
||||
}
|
||||
setTranslation(neckPosition);
|
||||
glm::quat neckParentRotation;
|
||||
if (!owningAvatar->getSkeletonModel().getNeckParentRotationFromDefaultOrientation(neckParentRotation)) {
|
||||
neckParentRotation = owningAvatar->getOrientation();
|
||||
}
|
||||
glm::quat neckParentRotation = owningAvatar->getOrientation();
|
||||
setRotation(neckParentRotation);
|
||||
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningHead->getScale());
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/EyeTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "AvatarRig.h"
|
||||
#include <Rig.h>
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
@ -62,7 +62,7 @@ Head::Head(Avatar* owningAvatar) :
|
|||
_isLookingAtMe(false),
|
||||
_lookingAtMeStarted(0),
|
||||
_wasLastLookingAtMe(0),
|
||||
_faceModel(this, std::make_shared<AvatarRig>()),
|
||||
_faceModel(this, std::make_shared<Rig>()),
|
||||
_leftEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
_rightEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID())
|
||||
{
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
|
||||
#include <AccountManager.h>
|
||||
#include <AddressManager.h>
|
||||
#include <AnimationHandle.h>
|
||||
#include <AudioClient.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <display-plugins/DisplayPlugin.h>
|
||||
|
@ -49,7 +48,6 @@
|
|||
#include "ModelReferential.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "Physics.h"
|
||||
#include "Recorder.h"
|
||||
#include "Util.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "DebugDraw.h"
|
||||
|
@ -181,10 +179,22 @@ MyAvatar::MyAvatar(RigPointer rig) :
|
|||
setPosition(dummyAvatar.getPosition());
|
||||
setOrientation(dummyAvatar.getOrientation());
|
||||
|
||||
// FIXME attachments
|
||||
// FIXME joints
|
||||
// FIXME head lean
|
||||
// FIXME head orientation
|
||||
if (!dummyAvatar.getAttachmentData().isEmpty()) {
|
||||
setAttachmentData(dummyAvatar.getAttachmentData());
|
||||
}
|
||||
|
||||
auto headData = dummyAvatar.getHeadData();
|
||||
if (headData && _headData) {
|
||||
// blendshapes
|
||||
if (!headData->getBlendshapeCoefficients().isEmpty()) {
|
||||
_headData->setBlendshapeCoefficients(headData->getBlendshapeCoefficients());
|
||||
}
|
||||
// head lean
|
||||
_headData->setLeanForward(headData->getLeanForward());
|
||||
_headData->setLeanSideways(headData->getLeanSideways());
|
||||
// head orientation
|
||||
_headData->setLookAtPosition(headData->getLookAtPosition());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -207,16 +217,9 @@ QByteArray MyAvatar::toByteArray(bool cullSmallChanges, bool sendAll) {
|
|||
}
|
||||
|
||||
void MyAvatar::reset(bool andReload) {
|
||||
// Gather animation mode...
|
||||
// This should be simpler when we have only graph animations always on.
|
||||
bool isRig = _rig->getEnableRig();
|
||||
// seting rig animation to true, below, will clear the graph animation menu item, so grab it now.
|
||||
bool isGraph = _rig->getEnableAnimGraph() || Menu::getInstance()->isOptionChecked(MenuOption::EnableAnimGraph);
|
||||
// ... and get to sane configuration where other activity won't bother us.
|
||||
|
||||
if (andReload) {
|
||||
qApp->setRawAvatarUpdateThreading(false);
|
||||
_rig->disableHands = true;
|
||||
setEnableRigAnimations(true);
|
||||
}
|
||||
|
||||
// Reset dynamic state.
|
||||
|
@ -252,19 +255,6 @@ void MyAvatar::reset(bool andReload) {
|
|||
//_bodySensorMatrix = newBodySensorMatrix;
|
||||
//updateSensorToWorldMatrix(); // Uses updated position/orientation and _bodySensorMatrix changes
|
||||
|
||||
_skeletonModel.simulate(0.1f); // non-zero
|
||||
setEnableRigAnimations(false);
|
||||
_skeletonModel.simulate(0.1f);
|
||||
}
|
||||
if (isRig) {
|
||||
setEnableRigAnimations(true);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::EnableRigAnimations, true);
|
||||
} else if (isGraph) {
|
||||
setEnableAnimGraph(true);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::EnableAnimGraph, true);
|
||||
}
|
||||
if (andReload) {
|
||||
_rig->disableHands = false;
|
||||
qApp->setRawAvatarUpdateThreading();
|
||||
}
|
||||
}
|
||||
|
@ -348,13 +338,7 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
{
|
||||
PerformanceTimer perfTimer("joints");
|
||||
// copy out the skeleton joints from the model
|
||||
_jointData.resize(_rig->getJointStateCount());
|
||||
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
JointData& data = _jointData[i];
|
||||
data.rotationSet |= _rig->getJointStateRotation(i, data.rotation);
|
||||
data.translationSet |= _rig->getJointStateTranslation(i, data.translation);
|
||||
}
|
||||
_rig->copyJointsIntoJointData(_jointData);
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -428,7 +412,7 @@ void MyAvatar::updateHMDFollowVelocity() {
|
|||
}
|
||||
if (_followSpeed > 0.0f) {
|
||||
// to compute new velocity we must rotate offset into the world-frame
|
||||
glm::quat sensorToWorldRotation = extractRotation(_sensorToWorldMatrix);
|
||||
glm::quat sensorToWorldRotation = glm::normalize(glm::quat_cast(_sensorToWorldMatrix));
|
||||
_followVelocity = _followSpeed * glm::normalize(sensorToWorldRotation * offset);
|
||||
}
|
||||
}
|
||||
|
@ -439,12 +423,6 @@ void MyAvatar::updateHMDFollowVelocity() {
|
|||
// This is so the correct camera can be used for rendering.
|
||||
void MyAvatar::updateSensorToWorldMatrix() {
|
||||
|
||||
#ifdef DEBUG_RENDERING
|
||||
// draw marker about avatar's position
|
||||
const glm::vec4 red(1.0f, 0.0f, 0.0f, 1.0f);
|
||||
DebugDraw::getInstance().addMyAvatarMarker("pos", glm::quat(), glm::vec3(), red);
|
||||
#endif
|
||||
|
||||
// update the sensor mat so that the body position will end up in the desired
|
||||
// position when driven from the head.
|
||||
glm::mat4 desiredMat = createMatFromQuatAndPos(getOrientation(), getPosition());
|
||||
|
@ -591,12 +569,6 @@ void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
}
|
||||
|
||||
Avatar::render(renderArgs, cameraPosition);
|
||||
|
||||
// don't display IK constraints in shadow mode
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::ShowIKConstraints) &&
|
||||
renderArgs && renderArgs->_batch) {
|
||||
_skeletonModel.renderIKConstraints(*renderArgs->_batch);
|
||||
}
|
||||
}
|
||||
|
||||
void MyAvatar::clearReferential() {
|
||||
|
@ -625,76 +597,56 @@ bool MyAvatar::setJointReferential(const QUuid& id, int jointIndex) {
|
|||
}
|
||||
}
|
||||
|
||||
void MyAvatar::startAnimation(const QString& url, float fps, float priority,
|
||||
bool loop, bool hold, float firstFrame, float lastFrame, const QStringList& maskedJoints) {
|
||||
void MyAvatar::overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "startAnimation", Q_ARG(const QString&, url), Q_ARG(float, fps),
|
||||
Q_ARG(float, priority), Q_ARG(bool, loop), Q_ARG(bool, hold), Q_ARG(float, firstFrame),
|
||||
Q_ARG(float, lastFrame), Q_ARG(const QStringList&, maskedJoints));
|
||||
QMetaObject::invokeMethod(this, "overrideAnimation", Q_ARG(const QString&, url), Q_ARG(float, fps),
|
||||
Q_ARG(bool, loop), Q_ARG(float, firstFrame), Q_ARG(float, lastFrame));
|
||||
return;
|
||||
}
|
||||
_rig->startAnimation(url, fps, priority, loop, hold, firstFrame, lastFrame, maskedJoints);
|
||||
_rig->overrideAnimation(url, fps, loop, firstFrame, lastFrame);
|
||||
}
|
||||
|
||||
void MyAvatar::startAnimationByRole(const QString& role, const QString& url, float fps, float priority,
|
||||
bool loop, bool hold, float firstFrame, float lastFrame, const QStringList& maskedJoints) {
|
||||
void MyAvatar::restoreAnimation() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "startAnimationByRole", Q_ARG(const QString&, role), Q_ARG(const QString&, url),
|
||||
Q_ARG(float, fps), Q_ARG(float, priority), Q_ARG(bool, loop), Q_ARG(bool, hold), Q_ARG(float, firstFrame),
|
||||
Q_ARG(float, lastFrame), Q_ARG(const QStringList&, maskedJoints));
|
||||
QMetaObject::invokeMethod(this, "restoreAnimation");
|
||||
return;
|
||||
}
|
||||
_rig->startAnimationByRole(role, url, fps, priority, loop, hold, firstFrame, lastFrame, maskedJoints);
|
||||
_rig->restoreAnimation();
|
||||
}
|
||||
|
||||
void MyAvatar::stopAnimationByRole(const QString& role) {
|
||||
QStringList MyAvatar::getAnimationRoles() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "stopAnimationByRole", Q_ARG(const QString&, role));
|
||||
return;
|
||||
}
|
||||
_rig->stopAnimationByRole(role);
|
||||
}
|
||||
|
||||
void MyAvatar::stopAnimation(const QString& url) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "stopAnimation", Q_ARG(const QString&, url));
|
||||
return;
|
||||
}
|
||||
_rig->stopAnimation(url);
|
||||
}
|
||||
|
||||
AnimationDetails MyAvatar::getAnimationDetailsByRole(const QString& role) {
|
||||
AnimationDetails result;
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "getAnimationDetailsByRole", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(AnimationDetails, result),
|
||||
Q_ARG(const QString&, role));
|
||||
QStringList result;
|
||||
QMetaObject::invokeMethod(this, "getAnimationRoles", Qt::BlockingQueuedConnection, Q_RETURN_ARG(QStringList, result));
|
||||
return result;
|
||||
}
|
||||
foreach (const AnimationHandlePointer& handle, _rig->getRunningAnimations()) {
|
||||
if (handle->getRole() == role) {
|
||||
result = handle->getAnimationDetails();
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
return _rig->getAnimationRoles();
|
||||
}
|
||||
|
||||
AnimationDetails MyAvatar::getAnimationDetails(const QString& url) {
|
||||
AnimationDetails result;
|
||||
void MyAvatar::overrideRoleAnimation(const QString& role, const QString& url, float fps, bool loop,
|
||||
float firstFrame, float lastFrame) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "getAnimationDetails", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(AnimationDetails, result),
|
||||
Q_ARG(const QString&, url));
|
||||
return result;
|
||||
QMetaObject::invokeMethod(this, "overrideRoleAnimation", Q_ARG(const QString&, role), Q_ARG(const QString&, url),
|
||||
Q_ARG(float, fps), Q_ARG(bool, loop), Q_ARG(float, firstFrame), Q_ARG(float, lastFrame));
|
||||
return;
|
||||
}
|
||||
foreach (const AnimationHandlePointer& handle, _rig->getRunningAnimations()) {
|
||||
if (handle->getURL() == url) {
|
||||
result = handle->getAnimationDetails();
|
||||
break;
|
||||
}
|
||||
_rig->overrideRoleAnimation(role, url, fps, loop, firstFrame, lastFrame);
|
||||
}
|
||||
|
||||
void MyAvatar::restoreRoleAnimation(const QString& role) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "restoreRoleAnimation", Q_ARG(const QString&, role));
|
||||
return;
|
||||
}
|
||||
return result;
|
||||
_rig->restoreRoleAnimation(role);
|
||||
}
|
||||
|
||||
void MyAvatar::prefetchAnimation(const QString& url) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "prefetchAnimation", Q_ARG(const QString&, url));
|
||||
return;
|
||||
}
|
||||
_rig->prefetchAnimation(url);
|
||||
}
|
||||
|
||||
void MyAvatar::saveData() {
|
||||
|
@ -729,24 +681,6 @@ void MyAvatar::saveData() {
|
|||
}
|
||||
settings.endArray();
|
||||
|
||||
settings.beginWriteArray("animationHandles");
|
||||
auto animationHandles = _rig->getAnimationHandles();
|
||||
for (int i = 0; i < animationHandles.size(); i++) {
|
||||
settings.setArrayIndex(i);
|
||||
const AnimationHandlePointer& pointer = animationHandles.at(i);
|
||||
settings.setValue("role", pointer->getRole());
|
||||
settings.setValue("url", pointer->getURL());
|
||||
settings.setValue("fps", pointer->getFPS());
|
||||
settings.setValue("priority", pointer->getPriority());
|
||||
settings.setValue("loop", pointer->getLoop());
|
||||
settings.setValue("hold", pointer->getHold());
|
||||
settings.setValue("startAutomatically", pointer->getStartAutomatically());
|
||||
settings.setValue("firstFrame", pointer->getFirstFrame());
|
||||
settings.setValue("lastFrame", pointer->getLastFrame());
|
||||
settings.setValue("maskedJoints", pointer->getMaskedJoints());
|
||||
}
|
||||
settings.endArray();
|
||||
|
||||
settings.setValue("displayName", _displayName);
|
||||
settings.setValue("collisionSoundURL", _collisionSoundURL);
|
||||
|
||||
|
@ -761,65 +695,11 @@ float loadSetting(QSettings& settings, const char* name, float defaultValue) {
|
|||
return value;
|
||||
}
|
||||
|
||||
// Resource loading is not yet thread safe. If an animation is not loaded when requested by other than tha main thread,
|
||||
// we block in AnimationHandle::setURL => AnimationCache::getAnimation.
|
||||
// Meanwhile, the main thread will also eventually lock as it tries to render us.
|
||||
// If we demand the animation from the update thread while we're locked, we'll deadlock.
|
||||
// Until we untangle this, code puts the updates back on the main thread temporarilly and starts all the loading.
|
||||
void MyAvatar::safelyLoadAnimations() {
|
||||
_rig->addAnimationByRole("idle");
|
||||
_rig->addAnimationByRole("walk");
|
||||
_rig->addAnimationByRole("backup");
|
||||
_rig->addAnimationByRole("leftTurn");
|
||||
_rig->addAnimationByRole("rightTurn");
|
||||
_rig->addAnimationByRole("leftStrafe");
|
||||
_rig->addAnimationByRole("rightStrafe");
|
||||
}
|
||||
|
||||
void MyAvatar::setEnableRigAnimations(bool isEnabled) {
|
||||
if (_rig->getEnableRig() == isEnabled) {
|
||||
return;
|
||||
}
|
||||
if (isEnabled) {
|
||||
qApp->setRawAvatarUpdateThreading(false);
|
||||
setEnableAnimGraph(false);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::EnableAnimGraph, false);
|
||||
safelyLoadAnimations();
|
||||
qApp->setRawAvatarUpdateThreading();
|
||||
_rig->setEnableRig(true);
|
||||
} else {
|
||||
_rig->setEnableRig(false);
|
||||
_rig->deleteAnimations();
|
||||
}
|
||||
}
|
||||
|
||||
void MyAvatar::setEnableAnimGraph(bool isEnabled) {
|
||||
if (_rig->getEnableAnimGraph() == isEnabled) {
|
||||
return;
|
||||
}
|
||||
if (isEnabled) {
|
||||
qApp->setRawAvatarUpdateThreading(false);
|
||||
setEnableRigAnimations(false);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::EnableRigAnimations, false);
|
||||
safelyLoadAnimations();
|
||||
if (_skeletonModel.readyToAddToScene()) {
|
||||
_rig->setEnableAnimGraph(true);
|
||||
initAnimGraph(); // must be enabled for the init to happen
|
||||
_rig->setEnableAnimGraph(false); // must be disable to safely reset threading
|
||||
}
|
||||
qApp->setRawAvatarUpdateThreading();
|
||||
_rig->setEnableAnimGraph(true);
|
||||
} else {
|
||||
_rig->setEnableAnimGraph(false);
|
||||
destroyAnimGraph();
|
||||
}
|
||||
}
|
||||
|
||||
void MyAvatar::setEnableDebugDrawBindPose(bool isEnabled) {
|
||||
_enableDebugDrawBindPose = isEnabled;
|
||||
void MyAvatar::setEnableDebugDrawDefaultPose(bool isEnabled) {
|
||||
_enableDebugDrawDefaultPose = isEnabled;
|
||||
|
||||
if (!isEnabled) {
|
||||
AnimDebugDraw::getInstance().removeSkeleton("myAvatar");
|
||||
AnimDebugDraw::getInstance().removeAbsolutePoses("myAvatarDefaultPoses");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -827,7 +707,16 @@ void MyAvatar::setEnableDebugDrawAnimPose(bool isEnabled) {
|
|||
_enableDebugDrawAnimPose = isEnabled;
|
||||
|
||||
if (!isEnabled) {
|
||||
AnimDebugDraw::getInstance().removePoses("myAvatar");
|
||||
AnimDebugDraw::getInstance().removeAbsolutePoses("myAvatarAnimPoses");
|
||||
}
|
||||
}
|
||||
|
||||
void MyAvatar::setEnableDebugDrawPosition(bool isEnabled) {
|
||||
if (isEnabled) {
|
||||
const glm::vec4 red(1.0f, 0.0f, 0.0f, 1.0f);
|
||||
DebugDraw::getInstance().addMyAvatarMarker("avatarPosition", glm::quat(), glm::vec3(), red);
|
||||
} else {
|
||||
DebugDraw::getInstance().removeMyAvatarMarker("avatarPosition");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -875,31 +764,15 @@ void MyAvatar::loadData() {
|
|||
settings.endArray();
|
||||
setAttachmentData(attachmentData);
|
||||
|
||||
int animationCount = settings.beginReadArray("animationHandles");
|
||||
_rig->deleteAnimations();
|
||||
for (int i = 0; i < animationCount; i++) {
|
||||
settings.setArrayIndex(i);
|
||||
_rig->addAnimationByRole(settings.value("role", "idle").toString(),
|
||||
settings.value("url").toString(),
|
||||
loadSetting(settings, "fps", 30.0f),
|
||||
loadSetting(settings, "priority", 1.0f),
|
||||
settings.value("loop", true).toBool(),
|
||||
settings.value("hold", false).toBool(),
|
||||
settings.value("firstFrame", 0.0f).toFloat(),
|
||||
settings.value("lastFrame", INT_MAX).toFloat(),
|
||||
settings.value("maskedJoints").toStringList(),
|
||||
settings.value("startAutomatically", true).toBool());
|
||||
}
|
||||
settings.endArray();
|
||||
|
||||
setDisplayName(settings.value("displayName").toString());
|
||||
setCollisionSoundURL(settings.value("collisionSoundURL", DEFAULT_AVATAR_COLLISION_SOUND_URL).toString());
|
||||
|
||||
settings.endGroup();
|
||||
_rig->setEnableRig(Menu::getInstance()->isOptionChecked(MenuOption::EnableRigAnimations));
|
||||
|
||||
setEnableMeshVisible(Menu::getInstance()->isOptionChecked(MenuOption::MeshVisible));
|
||||
setEnableDebugDrawBindPose(Menu::getInstance()->isOptionChecked(MenuOption::AnimDebugDrawBindPose));
|
||||
setEnableDebugDrawDefaultPose(Menu::getInstance()->isOptionChecked(MenuOption::AnimDebugDrawDefaultPose));
|
||||
setEnableDebugDrawAnimPose(Menu::getInstance()->isOptionChecked(MenuOption::AnimDebugDrawAnimPose));
|
||||
setEnableDebugDrawPosition(Menu::getInstance()->isOptionChecked(MenuOption::AnimDebugDrawPosition));
|
||||
}
|
||||
|
||||
void MyAvatar::saveAttachmentData(const AttachmentData& attachment) const {
|
||||
|
@ -982,10 +855,8 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
|
||||
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
|
||||
|
||||
AvatarHash hash;
|
||||
DependencyManager::get<AvatarManager>()->withAvatarHash([&] (const AvatarHash& locked) {
|
||||
hash = locked; // make a shallow copy and operate on that, to minimize lock time
|
||||
});
|
||||
AvatarHash hash = DependencyManager::get<AvatarManager>()->getHashCopy();
|
||||
|
||||
foreach (const AvatarSharedPointer& avatarPointer, hash) {
|
||||
auto avatar = static_pointer_cast<Avatar>(avatarPointer);
|
||||
bool isCurrentTarget = avatar->getIsLookAtTarget();
|
||||
|
@ -1081,11 +952,11 @@ eyeContactTarget MyAvatar::getEyeContactTarget() {
|
|||
}
|
||||
|
||||
glm::vec3 MyAvatar::getDefaultEyePosition() const {
|
||||
return getPosition() + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
|
||||
return getPosition() + getWorldAlignedOrientation() * Quaternions::Y_180 * _skeletonModel.getDefaultEyeModelPosition();
|
||||
}
|
||||
|
||||
const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f;
|
||||
const float RECORDER_PRIORITY = SCRIPT_PRIORITY + 1.0f;
|
||||
const float SCRIPT_PRIORITY = 1.0f + 1.0f;
|
||||
const float RECORDER_PRIORITY = 1.0f + 1.0f;
|
||||
|
||||
void MyAvatar::setJointRotations(QVector<glm::quat> jointRotations) {
|
||||
int numStates = glm::min(_skeletonModel.getJointStateCount(), jointRotations.size());
|
||||
|
@ -1134,14 +1005,7 @@ void MyAvatar::clearJointData(int index) {
|
|||
}
|
||||
|
||||
void MyAvatar::clearJointsData() {
|
||||
clearJointAnimationPriorities();
|
||||
}
|
||||
|
||||
void MyAvatar::clearJointAnimationPriorities() {
|
||||
int numStates = _skeletonModel.getJointStateCount();
|
||||
for (int i = 0; i < numStates; ++i) {
|
||||
_rig->clearJointAnimationPriority(i);
|
||||
}
|
||||
//clearJointAnimationPriorities();
|
||||
}
|
||||
|
||||
void MyAvatar::setFaceModelURL(const QUrl& faceModelURL) {
|
||||
|
@ -1186,16 +1050,8 @@ void MyAvatar::useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelN
|
|||
|
||||
const QString& urlString = fullAvatarURL.toString();
|
||||
if (urlString.isEmpty() || (fullAvatarURL != getSkeletonModelURL())) {
|
||||
bool isRigEnabled = getEnableRigAnimations();
|
||||
bool isGraphEnabled = getEnableAnimGraph();
|
||||
qApp->setRawAvatarUpdateThreading(false);
|
||||
setEnableRigAnimations(false);
|
||||
setEnableAnimGraph(false);
|
||||
|
||||
setSkeletonModelURL(fullAvatarURL);
|
||||
|
||||
setEnableRigAnimations(isRigEnabled);
|
||||
setEnableAnimGraph(isGraphEnabled);
|
||||
qApp->setRawAvatarUpdateThreading();
|
||||
UserActivityLogger::getInstance().changedModel("skeleton", urlString);
|
||||
}
|
||||
|
@ -1331,7 +1187,6 @@ void MyAvatar::setScriptedMotorFrame(QString frame) {
|
|||
}
|
||||
|
||||
void MyAvatar::clearScriptableSettings() {
|
||||
clearJointAnimationPriorities();
|
||||
_scriptedMotorVelocity = glm::vec3(0.0f);
|
||||
_scriptedMotorTimescale = DEFAULT_SCRIPTED_MOTOR_TIMESCALE;
|
||||
}
|
||||
|
@ -1450,7 +1305,7 @@ void MyAvatar::initAnimGraph() {
|
|||
auto graphUrl = QUrl(_animGraphUrl.isEmpty() ?
|
||||
QUrl::fromLocalFile(PathUtils::resourcesPath() + "meshes/defaultAvatar_full/avatar-animation.json") :
|
||||
_animGraphUrl);
|
||||
_rig->initAnimGraph(graphUrl, _skeletonModel.getGeometry()->getFBXGeometry());
|
||||
_rig->initAnimGraph(graphUrl);
|
||||
|
||||
_bodySensorMatrix = deriveBodyFromHMDSensor(); // Based on current cached HMD position/rotation..
|
||||
updateSensorToWorldMatrix(); // Uses updated position/orientation and _bodySensorMatrix changes
|
||||
|
@ -1469,38 +1324,29 @@ void MyAvatar::preRender(RenderArgs* renderArgs) {
|
|||
initHeadBones();
|
||||
_skeletonModel.setCauterizeBoneSet(_headBoneSet);
|
||||
initAnimGraph();
|
||||
_debugDrawSkeleton = std::make_shared<AnimSkeleton>(_skeletonModel.getGeometry()->getFBXGeometry());
|
||||
}
|
||||
|
||||
if (_enableDebugDrawBindPose || _enableDebugDrawAnimPose) {
|
||||
if (_enableDebugDrawDefaultPose || _enableDebugDrawAnimPose) {
|
||||
|
||||
// bones are aligned such that z is forward, not -z.
|
||||
glm::quat rotY180 = glm::angleAxis((float)M_PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
AnimPose xform(glm::vec3(1), getOrientation() * rotY180, getPosition());
|
||||
auto animSkeleton = _rig->getAnimSkeleton();
|
||||
|
||||
if (_enableDebugDrawBindPose && _debugDrawSkeleton) {
|
||||
// the rig is in the skeletonModel frame
|
||||
AnimPose xform(glm::vec3(1), _skeletonModel.getRotation(), _skeletonModel.getTranslation());
|
||||
|
||||
if (_enableDebugDrawDefaultPose && animSkeleton) {
|
||||
glm::vec4 gray(0.2f, 0.2f, 0.2f, 0.2f);
|
||||
AnimDebugDraw::getInstance().addSkeleton("myAvatar", _debugDrawSkeleton, xform, gray);
|
||||
AnimDebugDraw::getInstance().addAbsolutePoses("myAvatarDefaultPoses", animSkeleton, _rig->getAbsoluteDefaultPoses(), xform, gray);
|
||||
}
|
||||
|
||||
if (_enableDebugDrawAnimPose && _debugDrawSkeleton) {
|
||||
glm::vec4 cyan(0.1f, 0.6f, 0.6f, 1.0f);
|
||||
|
||||
// build AnimPoseVec from JointStates.
|
||||
AnimPoseVec poses;
|
||||
poses.reserve(_debugDrawSkeleton->getNumJoints());
|
||||
for (int i = 0; i < _debugDrawSkeleton->getNumJoints(); i++) {
|
||||
AnimPose pose = _debugDrawSkeleton->getRelativeBindPose(i);
|
||||
glm::quat jointRot;
|
||||
_rig->getJointRotationInConstrainedFrame(i, jointRot);
|
||||
glm::vec3 jointTrans;
|
||||
_rig->getJointTranslation(i, jointTrans);
|
||||
pose.rot = pose.rot * jointRot;
|
||||
pose.trans = jointTrans;
|
||||
poses.push_back(pose);
|
||||
if (_enableDebugDrawAnimPose && animSkeleton) {
|
||||
// build absolute AnimPoseVec from rig
|
||||
AnimPoseVec absPoses;
|
||||
absPoses.reserve(_rig->getJointStateCount());
|
||||
for (int i = 0; i < _rig->getJointStateCount(); i++) {
|
||||
absPoses.push_back(AnimPose(_rig->getJointTransform(i)));
|
||||
}
|
||||
|
||||
AnimDebugDraw::getInstance().addPoses("myAvatar", _debugDrawSkeleton, poses, xform, cyan);
|
||||
glm::vec4 cyan(0.1f, 0.6f, 0.6f, 1.0f);
|
||||
AnimDebugDraw::getInstance().addAbsolutePoses("myAvatarAnimPoses", animSkeleton, absPoses, xform, cyan);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1952,54 +1798,33 @@ glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
|||
const glm::quat hmdOrientation = getHMDSensorOrientation();
|
||||
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
|
||||
|
||||
/*
|
||||
const glm::vec3 DEFAULT_RIGHT_EYE_POS(-0.3f, 1.6f, 0.0f);
|
||||
const glm::vec3 DEFAULT_LEFT_EYE_POS(0.3f, 1.6f, 0.0f);
|
||||
const glm::vec3 DEFAULT_NECK_POS(0.0f, 1.5f, 0.0f);
|
||||
const glm::vec3 DEFAULT_HIPS_POS(0.0f, 1.0f, 0.0f);
|
||||
*/
|
||||
// 2 meter tall dude (in rig coordinates)
|
||||
const glm::vec3 DEFAULT_RIG_MIDDLE_EYE_POS(0.0f, 0.9f, 0.0f);
|
||||
const glm::vec3 DEFAULT_RIG_NECK_POS(0.0f, 0.70f, 0.0f);
|
||||
const glm::vec3 DEFAULT_RIG_HIPS_POS(0.0f, 0.05f, 0.0f);
|
||||
|
||||
// 2 meter tall dude
|
||||
const glm::vec3 DEFAULT_RIGHT_EYE_POS(-0.3f, 1.9f, 0.0f);
|
||||
const glm::vec3 DEFAULT_LEFT_EYE_POS(0.3f, 1.9f, 0.0f);
|
||||
const glm::vec3 DEFAULT_NECK_POS(0.0f, 1.70f, 0.0f);
|
||||
const glm::vec3 DEFAULT_HIPS_POS(0.0f, 1.05f, 0.0f);
|
||||
int rightEyeIndex = _rig->indexOfJoint("RightEye");
|
||||
int leftEyeIndex = _rig->indexOfJoint("LeftEye");
|
||||
int neckIndex = _rig->indexOfJoint("Neck");
|
||||
int hipsIndex = _rig->indexOfJoint("Hips");
|
||||
|
||||
vec3 localEyes, localNeck;
|
||||
if (!_debugDrawSkeleton) {
|
||||
const glm::quat rotY180 = glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
localEyes = rotY180 * (((DEFAULT_RIGHT_EYE_POS + DEFAULT_LEFT_EYE_POS) / 2.0f) - DEFAULT_HIPS_POS);
|
||||
localNeck = rotY180 * (DEFAULT_NECK_POS - DEFAULT_HIPS_POS);
|
||||
} else {
|
||||
// TODO: At the moment MyAvatar does not have access to the rig, which has the skeleton, which has the bind poses.
|
||||
// for now use the _debugDrawSkeleton, which is initialized with the same FBX model as the rig.
|
||||
glm::vec3 rigMiddleEyePos = leftEyeIndex != -1 ? _rig->getAbsoluteDefaultPose(leftEyeIndex).trans : DEFAULT_RIG_MIDDLE_EYE_POS;
|
||||
glm::vec3 rigNeckPos = neckIndex != -1 ? _rig->getAbsoluteDefaultPose(neckIndex).trans : DEFAULT_RIG_NECK_POS;
|
||||
glm::vec3 rigHipsPos = hipsIndex != -1 ? _rig->getAbsoluteDefaultPose(hipsIndex).trans : DEFAULT_RIG_HIPS_POS;
|
||||
|
||||
// TODO: cache these indices.
|
||||
int rightEyeIndex = _debugDrawSkeleton->nameToJointIndex("RightEye");
|
||||
int leftEyeIndex = _debugDrawSkeleton->nameToJointIndex("LeftEye");
|
||||
int neckIndex = _debugDrawSkeleton->nameToJointIndex("Neck");
|
||||
int hipsIndex = _debugDrawSkeleton->nameToJointIndex("Hips");
|
||||
|
||||
glm::vec3 absRightEyePos = rightEyeIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(rightEyeIndex).trans : DEFAULT_RIGHT_EYE_POS;
|
||||
glm::vec3 absLeftEyePos = leftEyeIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(leftEyeIndex).trans : DEFAULT_LEFT_EYE_POS;
|
||||
glm::vec3 absNeckPos = neckIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(neckIndex).trans : DEFAULT_NECK_POS;
|
||||
glm::vec3 absHipsPos = neckIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(hipsIndex).trans : DEFAULT_HIPS_POS;
|
||||
|
||||
const glm::quat rotY180 = glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
localEyes = rotY180 * (((absRightEyePos + absLeftEyePos) / 2.0f) - absHipsPos);
|
||||
localNeck = rotY180 * (absNeckPos - absHipsPos);
|
||||
}
|
||||
glm::vec3 localEyes = (rigMiddleEyePos - rigHipsPos);
|
||||
glm::vec3 localNeck = (rigNeckPos - rigHipsPos);
|
||||
|
||||
// apply simplistic head/neck model
|
||||
// figure out where the avatar body should be by applying offsets from the avatar's neck & head joints.
|
||||
|
||||
// eyeToNeck offset is relative full HMD orientation.
|
||||
// while neckToRoot offset is only relative to HMDs yaw.
|
||||
glm::vec3 eyeToNeck = hmdOrientation * (localNeck - localEyes);
|
||||
glm::vec3 neckToRoot = hmdOrientationYawOnly * -localNeck;
|
||||
// Y_180 is necessary because rig is z forward and hmdOrientation is -z forward
|
||||
glm::vec3 eyeToNeck = hmdOrientation * Quaternions::Y_180 * (localNeck - localEyes);
|
||||
glm::vec3 neckToRoot = hmdOrientationYawOnly * Quaternions::Y_180 * -localNeck;
|
||||
glm::vec3 bodyPos = hmdPosition + eyeToNeck + neckToRoot;
|
||||
|
||||
// avatar facing is determined solely by hmd orientation.
|
||||
return createMatFromQuatAndPos(hmdOrientationYawOnly, bodyPos);
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -114,27 +114,24 @@ public:
|
|||
|
||||
float getRealWorldFieldOfView() { return _realWorldFieldOfView.get(); }
|
||||
|
||||
const QList<AnimationHandlePointer>& getAnimationHandles() const { return _rig->getAnimationHandles(); }
|
||||
AnimationHandlePointer addAnimationHandle() { return _rig->createAnimationHandle(); }
|
||||
void removeAnimationHandle(const AnimationHandlePointer& handle) { _rig->removeAnimationHandle(handle); }
|
||||
/// Allows scripts to run animations.
|
||||
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
|
||||
bool hold = false, float firstFrame = 0.0f,
|
||||
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
|
||||
// Interrupt the current animation with a custom animation.
|
||||
Q_INVOKABLE void overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame);
|
||||
|
||||
/// Stops an animation as identified by a URL.
|
||||
Q_INVOKABLE void stopAnimation(const QString& url);
|
||||
// Stop the animation that was started with overrideAnimation and go back to the standard animation.
|
||||
Q_INVOKABLE void restoreAnimation();
|
||||
|
||||
// Returns a list of all clips that are available
|
||||
Q_INVOKABLE QStringList getAnimationRoles();
|
||||
|
||||
// Replace an existing standard role animation with a custom one.
|
||||
Q_INVOKABLE void overrideRoleAnimation(const QString& role, const QString& url, float fps, bool loop, float firstFrame, float lastFrame);
|
||||
|
||||
// remove an animation role override and return to the standard animation.
|
||||
Q_INVOKABLE void restoreRoleAnimation(const QString& role);
|
||||
|
||||
// prefetch animation
|
||||
Q_INVOKABLE void prefetchAnimation(const QString& url);
|
||||
|
||||
/// Starts an animation by its role, using the provided URL and parameters if the avatar doesn't have a custom
|
||||
/// animation for the role.
|
||||
Q_INVOKABLE void startAnimationByRole(const QString& role, const QString& url = QString(), float fps = 30.0f,
|
||||
float priority = 1.0f, bool loop = false, bool hold = false, float firstFrame = 0.0f,
|
||||
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
|
||||
/// Stops an animation identified by its role.
|
||||
Q_INVOKABLE void stopAnimationByRole(const QString& role);
|
||||
Q_INVOKABLE AnimationDetails getAnimationDetailsByRole(const QString& role);
|
||||
Q_INVOKABLE AnimationDetails getAnimationDetails(const QString& url);
|
||||
void clearJointAnimationPriorities();
|
||||
// Adds handler(animStateDictionaryIn) => animStateDictionaryOut, which will be invoked just before each animGraph state update.
|
||||
// The handler will be called with an animStateDictionaryIn that has all those properties specified by the (possibly empty)
|
||||
// propertiesList argument. However for debugging, if the properties argument is null, all internal animGraph state is provided.
|
||||
|
@ -256,13 +253,11 @@ public slots:
|
|||
|
||||
virtual void rebuildSkeletonBody() override;
|
||||
|
||||
bool getEnableRigAnimations() const { return _rig->getEnableRig(); }
|
||||
void setEnableRigAnimations(bool isEnabled);
|
||||
bool getEnableAnimGraph() const { return _rig->getEnableAnimGraph(); }
|
||||
const QString& getAnimGraphUrl() const { return _animGraphUrl; }
|
||||
void setEnableAnimGraph(bool isEnabled);
|
||||
void setEnableDebugDrawBindPose(bool isEnabled);
|
||||
|
||||
void setEnableDebugDrawDefaultPose(bool isEnabled);
|
||||
void setEnableDebugDrawAnimPose(bool isEnabled);
|
||||
void setEnableDebugDrawPosition(bool isEnabled);
|
||||
void setEnableMeshVisible(bool isEnabled);
|
||||
void setAnimGraphUrl(const QString& url) { _animGraphUrl = url; }
|
||||
|
||||
|
@ -361,7 +356,6 @@ private:
|
|||
void maybeUpdateBillboard();
|
||||
void initHeadBones();
|
||||
void initAnimGraph();
|
||||
void safelyLoadAnimations();
|
||||
|
||||
// Avatar Preferences
|
||||
QUrl _fullAvatarURLFromPreferences;
|
||||
|
@ -393,9 +387,8 @@ private:
|
|||
RigPointer _rig;
|
||||
bool _prevShouldDrawHead;
|
||||
|
||||
bool _enableDebugDrawBindPose { false };
|
||||
bool _enableDebugDrawDefaultPose { false };
|
||||
bool _enableDebugDrawAnimPose { false };
|
||||
AnimSkeleton::ConstPointer _debugDrawSkeleton { nullptr };
|
||||
|
||||
AudioListenerMode _audioListenerMode;
|
||||
glm::vec3 _customListenPosition;
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
|
||||
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer rig) :
|
||||
Model(rig, parent),
|
||||
_triangleFanID(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
_owningAvatar(owningAvatar),
|
||||
_boundingCapsuleLocalOffset(0.0f),
|
||||
_boundingCapsuleRadius(0.0f),
|
||||
|
@ -41,26 +40,11 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer r
|
|||
SkeletonModel::~SkeletonModel() {
|
||||
}
|
||||
|
||||
void SkeletonModel::initJointStates(QVector<JointState> states) {
|
||||
void SkeletonModel::initJointStates() {
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
glm::mat4 rootTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
|
||||
|
||||
int rootJointIndex = geometry.rootJointIndex;
|
||||
int leftHandJointIndex = geometry.leftHandJointIndex;
|
||||
int leftElbowJointIndex = leftHandJointIndex >= 0 ? geometry.joints.at(leftHandJointIndex).parentIndex : -1;
|
||||
int leftShoulderJointIndex = leftElbowJointIndex >= 0 ? geometry.joints.at(leftElbowJointIndex).parentIndex : -1;
|
||||
int rightHandJointIndex = geometry.rightHandJointIndex;
|
||||
int rightElbowJointIndex = rightHandJointIndex >= 0 ? geometry.joints.at(rightHandJointIndex).parentIndex : -1;
|
||||
int rightShoulderJointIndex = rightElbowJointIndex >= 0 ? geometry.joints.at(rightElbowJointIndex).parentIndex : -1;
|
||||
|
||||
_rig->initJointStates(states, rootTransform,
|
||||
rootJointIndex,
|
||||
leftHandJointIndex,
|
||||
leftElbowJointIndex,
|
||||
leftShoulderJointIndex,
|
||||
rightHandJointIndex,
|
||||
rightElbowJointIndex,
|
||||
rightShoulderJointIndex);
|
||||
glm::mat4 geometryOffset = geometry.offset;
|
||||
glm::mat4 modelOffset = glm::scale(_scale) * glm::translate(_offset);
|
||||
_rig->initJointStates(geometry, modelOffset);
|
||||
|
||||
// Determine the default eye position for avatar scale = 1.0
|
||||
int headJointIndex = _geometry->getFBXGeometry().headJointIndex;
|
||||
|
@ -75,19 +59,11 @@ void SkeletonModel::initJointStates(QVector<JointState> states) {
|
|||
getJointPosition(rootJointIndex, rootModelPosition);
|
||||
|
||||
_defaultEyeModelPosition = midEyePosition - rootModelPosition;
|
||||
_defaultEyeModelPosition.z = -_defaultEyeModelPosition.z;
|
||||
|
||||
// Skeleton may have already been scaled so unscale it
|
||||
_defaultEyeModelPosition = _defaultEyeModelPosition / _scale;
|
||||
}
|
||||
|
||||
// the SkeletonModel override of updateJointState() will clear the translation part
|
||||
// of its root joint and we need that done before we try to build shapes hence we
|
||||
// recompute all joint transforms at this time.
|
||||
for (int i = 0; i < _rig->getJointStateCount(); i++) {
|
||||
_rig->updateJointState(i, rootTransform);
|
||||
}
|
||||
|
||||
computeBoundingShape();
|
||||
|
||||
Extents meshExtents = getMeshExtents();
|
||||
|
@ -98,7 +74,6 @@ void SkeletonModel::initJointStates(QVector<JointState> states) {
|
|||
emit skeletonLoaded();
|
||||
}
|
||||
|
||||
const float PALM_PRIORITY = DEFAULT_PRIORITY;
|
||||
// Called within Model::simulate call, below.
|
||||
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
Head* head = _owningAvatar->getHead();
|
||||
|
@ -111,32 +86,24 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
headParams.leanSideways = head->getFinalLeanSideways();
|
||||
headParams.leanForward = head->getFinalLeanForward();
|
||||
headParams.torsoTwist = head->getTorsoTwist();
|
||||
headParams.localHeadPitch = head->getFinalPitch();
|
||||
headParams.localHeadYaw = head->getFinalYaw();
|
||||
headParams.localHeadRoll = head->getFinalRoll();
|
||||
|
||||
if (qApp->getAvatarUpdater()->isHMDMode()) {
|
||||
headParams.isInHMD = true;
|
||||
|
||||
// get HMD position from sensor space into world space, and back into model space
|
||||
AnimPose avatarToWorld(glm::vec3(1.0f), myAvatar->getOrientation(), myAvatar->getPosition());
|
||||
glm::mat4 worldToAvatar = glm::inverse((glm::mat4)avatarToWorld);
|
||||
// get HMD position from sensor space into world space, and back into rig space
|
||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||
glm::mat4 hmdMat = worldToAvatar * worldHMDMat;
|
||||
|
||||
// in local avatar space (i.e. relative to avatar pos/orientation.)
|
||||
glm::vec3 hmdPosition = extractTranslation(hmdMat);
|
||||
glm::quat hmdOrientation = extractRotation(hmdMat);
|
||||
|
||||
headParams.localHeadPosition = hmdPosition;
|
||||
headParams.localHeadOrientation = hmdOrientation;
|
||||
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
|
||||
glm::mat4 worldToRig = glm::inverse(rigToWorld);
|
||||
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
|
||||
|
||||
headParams.rigHeadPosition = extractTranslation(rigHMDMat);
|
||||
headParams.rigHeadOrientation = extractRotation(rigHMDMat);
|
||||
headParams.worldHeadOrientation = extractRotation(worldHMDMat);
|
||||
} else {
|
||||
headParams.isInHMD = false;
|
||||
|
||||
// We don't have a valid localHeadPosition.
|
||||
headParams.localHeadOrientation = head->getFinalOrientationInLocalFrame();
|
||||
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame();
|
||||
headParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
|
||||
}
|
||||
|
||||
|
@ -151,8 +118,8 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
auto leftPalm = myAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand);
|
||||
if (leftPalm.isValid() && leftPalm.isActive()) {
|
||||
handParams.isLeftEnabled = true;
|
||||
handParams.leftPosition = leftPalm.getRawPosition();
|
||||
handParams.leftOrientation = leftPalm.getRawRotation();
|
||||
handParams.leftPosition = Quaternions::Y_180 * leftPalm.getRawPosition();
|
||||
handParams.leftOrientation = Quaternions::Y_180 * leftPalm.getRawRotation();
|
||||
handParams.leftTrigger = leftPalm.getTrigger();
|
||||
} else {
|
||||
handParams.isLeftEnabled = false;
|
||||
|
@ -161,8 +128,8 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
auto rightPalm = myAvatar->getHand()->getCopyOfPalmData(HandData::RightHand);
|
||||
if (rightPalm.isValid() && rightPalm.isActive()) {
|
||||
handParams.isRightEnabled = true;
|
||||
handParams.rightPosition = rightPalm.getRawPosition();
|
||||
handParams.rightOrientation = rightPalm.getRawRotation();
|
||||
handParams.rightPosition = Quaternions::Y_180 * rightPalm.getRawPosition();
|
||||
handParams.rightOrientation = Quaternions::Y_180 * rightPalm.getRawRotation();
|
||||
handParams.rightTrigger = rightPalm.getTrigger();
|
||||
} else {
|
||||
handParams.isRightEnabled = false;
|
||||
|
@ -171,6 +138,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
_rig->updateFromHandParameters(handParams, deltaTime);
|
||||
|
||||
_rig->computeMotionAnimationState(deltaTime, _owningAvatar->getPosition(), _owningAvatar->getVelocity(), _owningAvatar->getOrientation());
|
||||
|
||||
// evaluate AnimGraph animation and update jointStates.
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
|
||||
|
@ -185,10 +153,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
|
||||
// rebuild the jointState transform for the eyes only. Must be after updateRig.
|
||||
_rig->updateJointState(eyeParams.leftEyeJointIndex, parentTransform);
|
||||
_rig->updateJointState(eyeParams.rightEyeJointIndex, parentTransform);
|
||||
|
||||
} else {
|
||||
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
|
@ -202,6 +166,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
|
||||
// We will revisit that as priorities allow, and particularly after the new rig/animation/joints.
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
|
||||
// If the head is not positioned, updateEyeJoints won't get the math right
|
||||
glm::quat headOrientation;
|
||||
_rig->getJointRotation(geometry.headJointIndex, headOrientation);
|
||||
|
@ -218,14 +183,14 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
eyeParams.modelTranslation = getTranslation();
|
||||
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
|
||||
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
||||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
}
|
||||
}
|
||||
|
||||
void SkeletonModel::updateAttitude() {
|
||||
setTranslation(_owningAvatar->getSkeletonPosition());
|
||||
static const glm::quat refOrientation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
setRotation(_owningAvatar->getOrientation() * refOrientation);
|
||||
setRotation(_owningAvatar->getOrientation() * Quaternions::Y_180);
|
||||
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale());
|
||||
}
|
||||
|
||||
|
@ -238,9 +203,9 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
Model::simulate(deltaTime, fullUpdate);
|
||||
|
||||
// let rig compute the model offset
|
||||
glm::vec3 modelOffset;
|
||||
if (_rig->getModelOffset(modelOffset)) {
|
||||
setOffset(modelOffset);
|
||||
glm::vec3 registrationPoint;
|
||||
if (_rig->getModelRegistrationPoint(registrationPoint)) {
|
||||
setOffset(registrationPoint);
|
||||
}
|
||||
|
||||
if (!isActive() || !_owningAvatar->isMyAvatar()) {
|
||||
|
@ -251,45 +216,6 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
if (player->isPlaying()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
|
||||
// Don't Relax toward hand positions when in animGraph mode.
|
||||
if (!_rig->getEnableAnimGraph()) {
|
||||
|
||||
Hand* hand = _owningAvatar->getHand();
|
||||
auto leftPalm = hand->getCopyOfPalmData(HandData::LeftHand);
|
||||
auto rightPalm = hand->getCopyOfPalmData(HandData::RightHand);
|
||||
|
||||
const float HAND_RESTORATION_RATE = 0.25f;
|
||||
if (!leftPalm.isActive() && !rightPalm.isActive()) {
|
||||
// palms are not yet set, use mouse
|
||||
if (_owningAvatar->getHandState() == HAND_STATE_NULL) {
|
||||
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
|
||||
} else {
|
||||
// transform into model-frame
|
||||
glm::vec3 handPosition = glm::inverse(_rotation) * (_owningAvatar->getHandPosition() - _translation);
|
||||
applyHandPosition(geometry.rightHandJointIndex, handPosition);
|
||||
}
|
||||
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
|
||||
} else {
|
||||
if (leftPalm.isActive()) {
|
||||
applyPalmData(geometry.leftHandJointIndex, leftPalm);
|
||||
} else {
|
||||
restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
|
||||
}
|
||||
if (rightPalm.isActive()) {
|
||||
applyPalmData(geometry.rightHandJointIndex, rightPalm);
|
||||
} else {
|
||||
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void SkeletonModel::renderIKConstraints(gpu::Batch& batch) {
|
||||
renderJointConstraints(batch, getRightHandJointIndex());
|
||||
renderJointConstraints(batch, getLeftHandJointIndex());
|
||||
}
|
||||
|
||||
class IndexValue {
|
||||
|
@ -302,34 +228,6 @@ bool operator<(const IndexValue& firstIndex, const IndexValue& secondIndex) {
|
|||
return firstIndex.value < secondIndex.value;
|
||||
}
|
||||
|
||||
void SkeletonModel::applyHandPosition(int jointIndex, const glm::vec3& position) {
|
||||
if (jointIndex == -1 || jointIndex >= _rig->getJointStateCount()) {
|
||||
return;
|
||||
}
|
||||
// NOTE: 'position' is in model-frame
|
||||
setJointPosition(jointIndex, position, glm::quat(), false, -1, false, glm::vec3(0.0f, -1.0f, 0.0f), PALM_PRIORITY);
|
||||
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
glm::vec3 handPosition, elbowPosition;
|
||||
getJointPosition(jointIndex, handPosition);
|
||||
getJointPosition(geometry.joints.at(jointIndex).parentIndex, elbowPosition);
|
||||
glm::vec3 forearmVector = handPosition - elbowPosition;
|
||||
float forearmLength = glm::length(forearmVector);
|
||||
if (forearmLength < EPSILON) {
|
||||
return;
|
||||
}
|
||||
glm::quat handRotation;
|
||||
if (!_rig->getJointStateRotation(jointIndex, handRotation)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// align hand with forearm
|
||||
float sign = (jointIndex == geometry.rightHandJointIndex) ? 1.0f : -1.0f;
|
||||
_rig->applyJointRotationDelta(jointIndex,
|
||||
rotationBetween(handRotation * glm::vec3(-sign, 0.0f, 0.0f), forearmVector),
|
||||
PALM_PRIORITY);
|
||||
}
|
||||
|
||||
void SkeletonModel::applyPalmData(int jointIndex, const PalmData& palm) {
|
||||
if (jointIndex == -1 || jointIndex >= _rig->getJointStateCount()) {
|
||||
return;
|
||||
|
@ -344,100 +242,6 @@ void SkeletonModel::applyPalmData(int jointIndex, const PalmData& palm) {
|
|||
glm::quat inverseRotation = glm::inverse(_rotation);
|
||||
glm::vec3 palmPosition = inverseRotation * (palm.getPosition() - _translation);
|
||||
glm::quat palmRotation = inverseRotation * palm.getRotation();
|
||||
|
||||
inverseKinematics(jointIndex, palmPosition, palmRotation, PALM_PRIORITY);
|
||||
}
|
||||
|
||||
void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
|
||||
if (jointIndex == -1 || jointIndex >= _rig->getJointStateCount()) {
|
||||
return;
|
||||
}
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const float BASE_DIRECTION_SIZE = 0.3f;
|
||||
float directionSize = BASE_DIRECTION_SIZE * extractUniformScale(_scale);
|
||||
// FIXME: THe line width of 3.0f is not supported anymore, we ll need a workaround
|
||||
|
||||
do {
|
||||
const FBXJoint& joint = geometry.joints.at(jointIndex);
|
||||
const JointState& jointState = _rig->getJointState(jointIndex);
|
||||
glm::vec3 position = _rotation * jointState.getPosition() + _translation;
|
||||
glm::quat parentRotation = (joint.parentIndex == -1) ?
|
||||
_rotation :
|
||||
_rotation * _rig->getJointState(joint.parentIndex).getRotation();
|
||||
float fanScale = directionSize * 0.75f;
|
||||
|
||||
Transform transform = Transform();
|
||||
transform.setTranslation(position);
|
||||
transform.setRotation(parentRotation);
|
||||
transform.setScale(fanScale);
|
||||
batch.setModelTransform(transform);
|
||||
|
||||
const int AXIS_COUNT = 3;
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
for (int i = 0; i < AXIS_COUNT; i++) {
|
||||
if (joint.rotationMin[i] <= -PI + EPSILON && joint.rotationMax[i] >= PI - EPSILON) {
|
||||
continue; // unconstrained
|
||||
}
|
||||
glm::vec3 axis;
|
||||
axis[i] = 1.0f;
|
||||
|
||||
glm::vec3 otherAxis;
|
||||
if (i == 0) {
|
||||
otherAxis.y = 1.0f;
|
||||
} else {
|
||||
otherAxis.x = 1.0f;
|
||||
}
|
||||
glm::vec4 color(otherAxis.r, otherAxis.g, otherAxis.b, 0.75f);
|
||||
|
||||
QVector<glm::vec3> points;
|
||||
points << glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
const int FAN_SEGMENTS = 16;
|
||||
for (int j = 0; j < FAN_SEGMENTS; j++) {
|
||||
glm::vec3 rotated = glm::angleAxis(glm::mix(joint.rotationMin[i], joint.rotationMax[i],
|
||||
(float)j / (FAN_SEGMENTS - 1)), axis) * otherAxis;
|
||||
points << rotated;
|
||||
}
|
||||
// TODO: this is really inefficient constantly recreating these vertices buffers. It would be
|
||||
// better if the skeleton model cached these buffers for each of the joints they are rendering
|
||||
geometryCache->updateVertices(_triangleFanID, points, color);
|
||||
geometryCache->renderVertices(batch, gpu::TRIANGLE_FAN, _triangleFanID);
|
||||
|
||||
}
|
||||
|
||||
renderOrientationDirections(batch, jointIndex, position, _rotation * jointState.getRotation(), directionSize);
|
||||
jointIndex = joint.parentIndex;
|
||||
|
||||
} while (jointIndex != -1 && geometry.joints.at(jointIndex).isFree);
|
||||
}
|
||||
|
||||
void SkeletonModel::renderOrientationDirections(gpu::Batch& batch, int jointIndex,
|
||||
glm::vec3 position, const glm::quat& orientation, float size) {
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
if (!_jointOrientationLines.contains(jointIndex)) {
|
||||
OrientationLineIDs jointLineIDs;
|
||||
jointLineIDs._up = geometryCache->allocateID();
|
||||
jointLineIDs._front = geometryCache->allocateID();
|
||||
jointLineIDs._right = geometryCache->allocateID();
|
||||
_jointOrientationLines[jointIndex] = jointLineIDs;
|
||||
}
|
||||
OrientationLineIDs& jointLineIDs = _jointOrientationLines[jointIndex];
|
||||
|
||||
glm::vec3 pRight = position + orientation * IDENTITY_RIGHT * size;
|
||||
glm::vec3 pUp = position + orientation * IDENTITY_UP * size;
|
||||
glm::vec3 pFront = position + orientation * IDENTITY_FRONT * size;
|
||||
|
||||
glm::vec3 red(1.0f, 0.0f, 0.0f);
|
||||
geometryCache->renderLine(batch, position, pRight, red, jointLineIDs._right);
|
||||
|
||||
glm::vec3 green(0.0f, 1.0f, 0.0f);
|
||||
geometryCache->renderLine(batch, position, pUp, green, jointLineIDs._up);
|
||||
|
||||
glm::vec3 blue(0.0f, 0.0f, 1.0f);
|
||||
geometryCache->renderLine(batch, position, pFront, blue, jointLineIDs._front);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getLeftHandPosition(glm::vec3& position) const {
|
||||
|
@ -484,27 +288,10 @@ bool SkeletonModel::getLocalNeckPosition(glm::vec3& neckPosition) const {
|
|||
return isActive() && getJointPosition(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const {
|
||||
if (!isActive()) {
|
||||
return false;
|
||||
}
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
if (geometry.neckJointIndex == -1) {
|
||||
return false;
|
||||
}
|
||||
int parentIndex = geometry.joints.at(geometry.neckJointIndex).parentIndex;
|
||||
glm::quat worldFrameRotation;
|
||||
bool success = getJointRotationInWorldFrame(parentIndex, worldFrameRotation);
|
||||
if (success) {
|
||||
neckParentRotation = worldFrameRotation * _rig->getJointState(parentIndex).getInverseDefaultRotation();
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
bool SkeletonModel::getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {
|
||||
if (!isActive()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
if (getJointPosition(geometry.leftEyeJointIndex, firstEyePosition) &&
|
||||
getJointPosition(geometry.rightEyeJointIndex, secondEyePosition)) {
|
||||
|
@ -557,112 +344,17 @@ void SkeletonModel::computeBoundingShape() {
|
|||
return;
|
||||
}
|
||||
|
||||
// BOUNDING SHAPE HACK: before we measure the bounds of the joints we use IK to put the
|
||||
// hands and feet into positions that are more correct than the default pose.
|
||||
|
||||
// Measure limb lengths so we can specify IK targets that will pull hands and feet tight to body
|
||||
QVector<QString> endEffectors;
|
||||
endEffectors.push_back("RightHand");
|
||||
endEffectors.push_back("LeftHand");
|
||||
endEffectors.push_back("RightFoot");
|
||||
endEffectors.push_back("LeftFoot");
|
||||
|
||||
QVector<QString> baseJoints;
|
||||
baseJoints.push_back("RightArm");
|
||||
baseJoints.push_back("LeftArm");
|
||||
baseJoints.push_back("RightUpLeg");
|
||||
baseJoints.push_back("LeftUpLeg");
|
||||
|
||||
for (int i = 0; i < endEffectors.size(); ++i) {
|
||||
QString tipName = endEffectors[i];
|
||||
QString baseName = baseJoints[i];
|
||||
float limbLength = 0.0f;
|
||||
int tipIndex = _rig->indexOfJoint(tipName);
|
||||
if (tipIndex == -1) {
|
||||
continue;
|
||||
}
|
||||
// save tip's relative rotation for later
|
||||
glm::quat tipRotation = _rig->getJointState(tipIndex).getRotationInConstrainedFrame();
|
||||
|
||||
// IK on each endpoint
|
||||
int jointIndex = tipIndex;
|
||||
QVector<int> freeLineage;
|
||||
float priority = 1.0f;
|
||||
while (jointIndex > -1) {
|
||||
JointState limbJoint = _rig->getJointState(jointIndex);
|
||||
freeLineage.push_back(jointIndex);
|
||||
if (limbJoint.getName() == baseName) {
|
||||
glm::vec3 targetPosition = limbJoint.getPosition() - glm::vec3(0.0f, 1.5f * limbLength, 0.0f);
|
||||
// do IK a few times to make sure the endpoint gets close to its target
|
||||
for (int j = 0; j < 5; ++j) {
|
||||
_rig->inverseKinematics(tipIndex,
|
||||
targetPosition,
|
||||
glm::quat(),
|
||||
priority,
|
||||
freeLineage,
|
||||
glm::mat4());
|
||||
}
|
||||
break;
|
||||
}
|
||||
limbLength += limbJoint.getDistanceToParent();
|
||||
jointIndex = limbJoint.getParentIndex();
|
||||
}
|
||||
|
||||
// since this IK target is totally bogus we restore the tip's relative rotation
|
||||
_rig->setJointRotationInConstrainedFrame(tipIndex, tipRotation, priority);
|
||||
}
|
||||
|
||||
// recompute all joint model-frame transforms
|
||||
glm::mat4 rootTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
|
||||
for (int i = 0; i < _rig->getJointStateCount(); i++) {
|
||||
_rig->updateJointState(i, rootTransform);
|
||||
}
|
||||
// END BOUNDING SHAPE HACK
|
||||
|
||||
// compute bounding box that encloses all shapes
|
||||
Extents totalExtents;
|
||||
totalExtents.reset();
|
||||
totalExtents.addPoint(glm::vec3(0.0f));
|
||||
int numStates = _rig->getJointStateCount();
|
||||
for (int i = 0; i < numStates; i++) {
|
||||
// Each joint contributes a capsule defined by FBXJoint.shapeInfo.
|
||||
// For totalExtents we use the capsule endpoints expanded by the radius.
|
||||
const JointState& state = _rig->getJointState(i);
|
||||
const glm::mat4& jointTransform = state.getTransform();
|
||||
const FBXJointShapeInfo& shapeInfo = geometry.joints.at(i).shapeInfo;
|
||||
if (shapeInfo.points.size() > 0) {
|
||||
for (int j = 0; j < shapeInfo.points.size(); ++j) {
|
||||
totalExtents.addPoint(extractTranslation(jointTransform * glm::translate(shapeInfo.points[j])));
|
||||
}
|
||||
}
|
||||
// HACK so that default legless robot doesn't knuckle-drag
|
||||
if (shapeInfo.points.size() == 0 && (state.getName() == "LeftFoot" || state.getName() == "RightFoot")) {
|
||||
totalExtents.addPoint(extractTranslation(jointTransform));
|
||||
}
|
||||
}
|
||||
|
||||
// compute bounding shape parameters
|
||||
// NOTE: we assume that the longest side of totalExtents is the yAxis...
|
||||
glm::vec3 diagonal = totalExtents.maximum - totalExtents.minimum;
|
||||
// ... and assume the radius is half the RMS of the X and Z sides:
|
||||
_boundingCapsuleRadius = 0.5f * sqrtf(0.5f * (diagonal.x * diagonal.x + diagonal.z * diagonal.z));
|
||||
_boundingCapsuleHeight = diagonal.y - 2.0f * _boundingCapsuleRadius;
|
||||
|
||||
glm::vec3 rootPosition = _rig->getJointState(geometry.rootJointIndex).getPosition();
|
||||
_boundingCapsuleLocalOffset = 0.5f * (totalExtents.maximum + totalExtents.minimum) - rootPosition;
|
||||
|
||||
// RECOVER FROM BOUNINDG SHAPE HACK: now that we're all done, restore the default pose
|
||||
for (int i = 0; i < numStates; i++) {
|
||||
_rig->restoreJointRotation(i, 1.0f, 1.0f);
|
||||
_rig->restoreJointTranslation(i, 1.0f, 1.0f);
|
||||
}
|
||||
_rig->computeAvatarBoundingCapsule(geometry,
|
||||
_boundingCapsuleRadius,
|
||||
_boundingCapsuleHeight,
|
||||
_boundingCapsuleLocalOffset);
|
||||
}
|
||||
|
||||
void SkeletonModel::renderBoundingCollisionShapes(gpu::Batch& batch, float alpha) {
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
auto deferredLighting = DependencyManager::get<DeferredLightingEffect>();
|
||||
// draw a blue sphere at the capsule top point
|
||||
glm::vec3 topPoint = _translation + _boundingCapsuleLocalOffset + (0.5f * _boundingCapsuleHeight) * glm::vec3(0.0f, 1.0f, 0.0f);
|
||||
glm::vec3 topPoint = _translation + getRotation() * (_boundingCapsuleLocalOffset + (0.5f * _boundingCapsuleHeight) * glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
|
||||
deferredLighting->renderSolidSphereInstance(batch,
|
||||
Transform().setTranslation(topPoint).postScale(_boundingCapsuleRadius),
|
||||
|
|
|
@ -27,14 +27,12 @@ public:
|
|||
SkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr, RigPointer rig = nullptr);
|
||||
~SkeletonModel();
|
||||
|
||||
virtual void initJointStates(QVector<JointState> states) override;
|
||||
virtual void initJointStates() override;
|
||||
|
||||
virtual void simulate(float deltaTime, bool fullUpdate = true) override;
|
||||
virtual void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
void updateAttitude();
|
||||
|
||||
void renderIKConstraints(gpu::Batch& batch);
|
||||
|
||||
/// Returns the index of the left hand joint, or -1 if not found.
|
||||
int getLeftHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().leftHandJointIndex : -1; }
|
||||
|
||||
|
@ -83,10 +81,6 @@ public:
|
|||
|
||||
bool getLocalNeckPosition(glm::vec3& neckPosition) const;
|
||||
|
||||
/// Returns the rotation of the neck joint's parent from default orientation
|
||||
/// \return whether or not the neck was found
|
||||
bool getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const;
|
||||
|
||||
/// Retrieve the positions of up to two eye meshes.
|
||||
/// \return whether or not both eye meshes were found
|
||||
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
|
||||
|
@ -114,25 +108,9 @@ protected:
|
|||
|
||||
void computeBoundingShape();
|
||||
|
||||
/// \param jointIndex index of joint in model
|
||||
/// \param position position of joint in model-frame
|
||||
void applyHandPosition(int jointIndex, const glm::vec3& position);
|
||||
|
||||
void applyPalmData(int jointIndex, const PalmData& palm);
|
||||
private:
|
||||
|
||||
void renderJointConstraints(gpu::Batch& batch, int jointIndex);
|
||||
void renderOrientationDirections(gpu::Batch& batch, int jointIndex,
|
||||
glm::vec3 position, const glm::quat& orientation, float size);
|
||||
|
||||
struct OrientationLineIDs {
|
||||
int _up;
|
||||
int _front;
|
||||
int _right;
|
||||
};
|
||||
QHash<int, OrientationLineIDs> _jointOrientationLines;
|
||||
int _triangleFanID;
|
||||
|
||||
bool getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
|
||||
|
||||
Avatar* _owningAvatar;
|
||||
|
|
|
@ -196,5 +196,9 @@ QScriptValue WebWindowClass::constructor(QScriptContext* context, QScriptEngine*
|
|||
}
|
||||
|
||||
void WebWindowClass::setTitle(const QString& title) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setTitle", Qt::AutoConnection, Q_ARG(QString, title));
|
||||
return;
|
||||
}
|
||||
_windowWidget->setWindowTitle(title);
|
||||
}
|
||||
|
|
|
@ -1,210 +0,0 @@
|
|||
//
|
||||
// AnimationsDialog.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Andrzej Kapolka on 5/19/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QCheckBox>
|
||||
#include <QComboBox>
|
||||
#include <QDialogButtonBox>
|
||||
#include <QDoubleSpinBox>
|
||||
#include <QFileDialog>
|
||||
#include <QFormLayout>
|
||||
#include <QLineEdit>
|
||||
#include <QMenu>
|
||||
#include <QPushButton>
|
||||
#include <QScrollArea>
|
||||
#include <QVBoxLayout>
|
||||
|
||||
#include "avatar/AvatarManager.h"
|
||||
|
||||
#include "AnimationsDialog.h"
|
||||
|
||||
AnimationsDialog::AnimationsDialog(QWidget* parent) :
|
||||
QDialog(parent)
|
||||
{
|
||||
setWindowTitle("Edit Animations");
|
||||
setAttribute(Qt::WA_DeleteOnClose);
|
||||
|
||||
QVBoxLayout* layout = new QVBoxLayout();
|
||||
setLayout(layout);
|
||||
|
||||
QScrollArea* area = new QScrollArea();
|
||||
layout->addWidget(area);
|
||||
area->setWidgetResizable(true);
|
||||
QWidget* container = new QWidget();
|
||||
container->setLayout(_animations = new QVBoxLayout());
|
||||
container->setSizePolicy(QSizePolicy::Ignored, QSizePolicy::Preferred);
|
||||
area->setWidget(container);
|
||||
_animations->addStretch(1);
|
||||
|
||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
foreach (const AnimationHandlePointer& handle, myAvatar->getAnimationHandles()) {
|
||||
_animations->insertWidget(_animations->count() - 1, new AnimationPanel(this, handle));
|
||||
}
|
||||
|
||||
QPushButton* newAnimation = new QPushButton("New Animation");
|
||||
connect(newAnimation, SIGNAL(clicked(bool)), SLOT(addAnimation()));
|
||||
layout->addWidget(newAnimation);
|
||||
|
||||
QDialogButtonBox* buttons = new QDialogButtonBox(QDialogButtonBox::Ok);
|
||||
layout->addWidget(buttons);
|
||||
connect(buttons, SIGNAL(accepted()), SLOT(deleteLater()));
|
||||
_ok = buttons->button(QDialogButtonBox::Ok);
|
||||
|
||||
setMinimumSize(600, 600);
|
||||
}
|
||||
|
||||
void AnimationsDialog::setVisible(bool visible) {
|
||||
QDialog::setVisible(visible);
|
||||
|
||||
// un-default the OK button
|
||||
if (visible) {
|
||||
_ok->setDefault(false);
|
||||
}
|
||||
}
|
||||
|
||||
void AnimationsDialog::addAnimation() {
|
||||
_animations->insertWidget(_animations->count() - 1, new AnimationPanel(this,
|
||||
DependencyManager::get<AvatarManager>()->getMyAvatar()->addAnimationHandle()));
|
||||
}
|
||||
|
||||
Setting::Handle<QString> AnimationPanel::_animationDirectory("animation_directory", QString());
|
||||
|
||||
AnimationPanel::AnimationPanel(AnimationsDialog* dialog, const AnimationHandlePointer& handle) :
|
||||
_dialog(dialog),
|
||||
_handle(handle),
|
||||
_applying(false) {
|
||||
setFrameStyle(QFrame::StyledPanel);
|
||||
|
||||
QFormLayout* layout = new QFormLayout();
|
||||
layout->setFieldGrowthPolicy(QFormLayout::AllNonFixedFieldsGrow);
|
||||
setLayout(layout);
|
||||
|
||||
layout->addRow("Role:", _role = new QComboBox());
|
||||
_role->addItem("idle");
|
||||
_role->addItem("sit");
|
||||
_role->setEditable(true);
|
||||
_role->setCurrentText(handle->getRole());
|
||||
connect(_role, SIGNAL(currentTextChanged(const QString&)), SLOT(updateHandle()));
|
||||
|
||||
QHBoxLayout* urlBox = new QHBoxLayout();
|
||||
layout->addRow("URL:", urlBox);
|
||||
urlBox->addWidget(_url = new QLineEdit(handle->getURL().toString()), 1);
|
||||
connect(_url, SIGNAL(editingFinished()), SLOT(updateHandle()));
|
||||
QPushButton* chooseURL = new QPushButton("Choose");
|
||||
urlBox->addWidget(chooseURL);
|
||||
connect(chooseURL, SIGNAL(clicked(bool)), SLOT(chooseURL()));
|
||||
|
||||
layout->addRow("FPS:", _fps = new QDoubleSpinBox());
|
||||
_fps->setSingleStep(0.01);
|
||||
_fps->setMinimum(-FLT_MAX);
|
||||
_fps->setMaximum(FLT_MAX);
|
||||
_fps->setValue(handle->getFPS());
|
||||
connect(_fps, SIGNAL(valueChanged(double)), SLOT(updateHandle()));
|
||||
|
||||
layout->addRow("Priority:", _priority = new QDoubleSpinBox());
|
||||
_priority->setSingleStep(0.01);
|
||||
_priority->setMinimum(-FLT_MAX);
|
||||
_priority->setMaximum(FLT_MAX);
|
||||
_priority->setValue(handle->getPriority());
|
||||
connect(_priority, SIGNAL(valueChanged(double)), SLOT(updateHandle()));
|
||||
|
||||
QHBoxLayout* maskedJointBox = new QHBoxLayout();
|
||||
layout->addRow("Masked Joints:", maskedJointBox);
|
||||
maskedJointBox->addWidget(_maskedJoints = new QLineEdit(handle->getMaskedJoints().join(", ")), 1);
|
||||
connect(_maskedJoints, SIGNAL(editingFinished()), SLOT(updateHandle()));
|
||||
maskedJointBox->addWidget(_chooseMaskedJoints = new QPushButton("Choose"));
|
||||
connect(_chooseMaskedJoints, SIGNAL(clicked(bool)), SLOT(chooseMaskedJoints()));
|
||||
|
||||
layout->addRow("Loop:", _loop = new QCheckBox());
|
||||
_loop->setChecked(handle->getLoop());
|
||||
connect(_loop, SIGNAL(toggled(bool)), SLOT(updateHandle()));
|
||||
|
||||
layout->addRow("Hold:", _hold = new QCheckBox());
|
||||
_hold->setChecked(handle->getHold());
|
||||
connect(_hold, SIGNAL(toggled(bool)), SLOT(updateHandle()));
|
||||
|
||||
layout->addRow("Start Automatically:", _startAutomatically = new QCheckBox());
|
||||
_startAutomatically->setChecked(handle->getStartAutomatically());
|
||||
connect(_startAutomatically, SIGNAL(toggled(bool)), SLOT(updateHandle()));
|
||||
|
||||
layout->addRow("First Frame:", _firstFrame = new QDoubleSpinBox());
|
||||
_firstFrame->setSingleStep(0.01);
|
||||
_firstFrame->setMaximum(INT_MAX);
|
||||
_firstFrame->setValue(handle->getFirstFrame());
|
||||
connect(_firstFrame, SIGNAL(valueChanged(double)), SLOT(updateHandle()));
|
||||
|
||||
layout->addRow("Last Frame:", _lastFrame = new QDoubleSpinBox());
|
||||
_lastFrame->setSingleStep(0.01);
|
||||
_lastFrame->setMaximum(INT_MAX);
|
||||
_lastFrame->setValue(handle->getLastFrame());
|
||||
connect(_lastFrame, SIGNAL(valueChanged(double)), SLOT(updateHandle()));
|
||||
|
||||
QHBoxLayout* buttons = new QHBoxLayout();
|
||||
layout->addRow(buttons);
|
||||
buttons->addWidget(_start = new QPushButton("Start"));
|
||||
_handle->connect(_start, SIGNAL(clicked(bool)), SLOT(start()));
|
||||
buttons->addWidget(_stop = new QPushButton("Stop"));
|
||||
_handle->connect(_stop, SIGNAL(clicked(bool)), SLOT(stop()));
|
||||
QPushButton* remove = new QPushButton("Delete");
|
||||
buttons->addWidget(remove);
|
||||
connect(remove, SIGNAL(clicked(bool)), SLOT(removeHandle()));
|
||||
|
||||
_stop->connect(_handle.get(), SIGNAL(runningChanged(bool)), SLOT(setEnabled(bool)));
|
||||
_stop->setEnabled(_handle->isRunning());
|
||||
}
|
||||
|
||||
void AnimationPanel::chooseURL() {
|
||||
QString filename = QFileDialog::getOpenFileName(this, "Choose Animation",
|
||||
_animationDirectory.get(), "Animation files (*.fbx)");
|
||||
if (filename.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
_animationDirectory.set(QFileInfo(filename).path());
|
||||
_url->setText(QUrl::fromLocalFile(filename).toString());
|
||||
emit _url->editingFinished();
|
||||
}
|
||||
|
||||
void AnimationPanel::chooseMaskedJoints() {
|
||||
QMenu menu;
|
||||
QStringList maskedJoints = _handle->getMaskedJoints();
|
||||
foreach (const QString& jointName, DependencyManager::get<AvatarManager>()->getMyAvatar()->getJointNames()) {
|
||||
QAction* action = menu.addAction(jointName);
|
||||
action->setCheckable(true);
|
||||
action->setChecked(maskedJoints.contains(jointName));
|
||||
}
|
||||
QAction* action = menu.exec(_chooseMaskedJoints->mapToGlobal(QPoint(0, 0)));
|
||||
if (action) {
|
||||
if (action->isChecked()) {
|
||||
maskedJoints.append(action->text());
|
||||
} else {
|
||||
maskedJoints.removeOne(action->text());
|
||||
}
|
||||
_handle->setMaskedJoints(maskedJoints);
|
||||
_maskedJoints->setText(maskedJoints.join(", "));
|
||||
}
|
||||
}
|
||||
|
||||
void AnimationPanel::updateHandle() {
|
||||
_handle->setRole(_role->currentText());
|
||||
_handle->setURL(_url->text());
|
||||
_handle->setFPS(_fps->value());
|
||||
_handle->setPriority(_priority->value());
|
||||
_handle->setLoop(_loop->isChecked());
|
||||
_handle->setHold(_hold->isChecked());
|
||||
_handle->setStartAutomatically(_startAutomatically->isChecked());
|
||||
_handle->setFirstFrame(_firstFrame->value());
|
||||
_handle->setLastFrame(_lastFrame->value());
|
||||
_handle->setMaskedJoints(_maskedJoints->text().split(QRegExp("\\s*,\\s*")));
|
||||
}
|
||||
|
||||
void AnimationPanel::removeHandle() {
|
||||
DependencyManager::get<AvatarManager>()->getMyAvatar()->removeAnimationHandle(_handle);
|
||||
deleteLater();
|
||||
}
|
|
@ -1,87 +0,0 @@
|
|||
//
|
||||
// AnimationsDialog.h
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Andrzej Kapolka on 5/19/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AnimationsDialog_h
|
||||
#define hifi_AnimationsDialog_h
|
||||
|
||||
#include <QDialog>
|
||||
#include <QDoubleSpinBox>
|
||||
#include <QFrame>
|
||||
|
||||
#include <SettingHandle.h>
|
||||
|
||||
#include "avatar/MyAvatar.h"
|
||||
|
||||
class QCheckBox;
|
||||
class QComboBox;
|
||||
class QDoubleSpinner;
|
||||
class QLineEdit;
|
||||
class QPushButton;
|
||||
class QVBoxLayout;
|
||||
|
||||
/// Allows users to edit the avatar animations.
|
||||
class AnimationsDialog : public QDialog {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
AnimationsDialog(QWidget* parent = nullptr);
|
||||
|
||||
virtual void setVisible(bool visible);
|
||||
|
||||
private slots:
|
||||
|
||||
void addAnimation();
|
||||
|
||||
private:
|
||||
|
||||
QVBoxLayout* _animations = nullptr;
|
||||
QPushButton* _ok = nullptr;
|
||||
};
|
||||
|
||||
/// A panel controlling a single animation.
|
||||
class AnimationPanel : public QFrame {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
||||
AnimationPanel(AnimationsDialog* dialog, const AnimationHandlePointer& handle);
|
||||
|
||||
private slots:
|
||||
|
||||
void chooseURL();
|
||||
void chooseMaskedJoints();
|
||||
void updateHandle();
|
||||
void removeHandle();
|
||||
|
||||
private:
|
||||
|
||||
AnimationsDialog* _dialog = nullptr;
|
||||
AnimationHandlePointer _handle;
|
||||
QComboBox* _role = nullptr;
|
||||
QLineEdit* _url = nullptr;
|
||||
QDoubleSpinBox* _fps = nullptr;
|
||||
QDoubleSpinBox* _priority = nullptr;
|
||||
QCheckBox* _loop = nullptr;
|
||||
QCheckBox* _hold = nullptr;
|
||||
QCheckBox* _startAutomatically = nullptr;
|
||||
QDoubleSpinBox* _firstFrame = nullptr;
|
||||
QDoubleSpinBox* _lastFrame = nullptr;
|
||||
QLineEdit* _maskedJoints = nullptr;
|
||||
QPushButton* _chooseMaskedJoints = nullptr;
|
||||
QPushButton* _start = nullptr;
|
||||
QPushButton* _stop = nullptr;
|
||||
bool _applying;
|
||||
|
||||
static Setting::Handle<QString> _animationDirectory;
|
||||
};
|
||||
|
||||
#endif // hifi_AnimationsDialog_h
|
|
@ -19,7 +19,6 @@
|
|||
#include <PathUtils.h>
|
||||
|
||||
#include "AddressBarDialog.h"
|
||||
#include "AnimationsDialog.h"
|
||||
#include "AttachmentsDialog.h"
|
||||
#include "BandwidthDialog.h"
|
||||
#include "CachesSizeDialog.h"
|
||||
|
@ -110,15 +109,6 @@ void DialogsManager::editAttachments() {
|
|||
}
|
||||
}
|
||||
|
||||
void DialogsManager::editAnimations() {
|
||||
if (!_animationsDialog) {
|
||||
maybeCreateDialog(_animationsDialog);
|
||||
_animationsDialog->show();
|
||||
} else {
|
||||
_animationsDialog->close();
|
||||
}
|
||||
}
|
||||
|
||||
void DialogsManager::audioStatsDetails() {
|
||||
if (! _audioStatsDialog) {
|
||||
_audioStatsDialog = new AudioStatsDialog(qApp->getWindow());
|
||||
|
|
|
@ -52,7 +52,6 @@ public slots:
|
|||
void cachesSizeDialog();
|
||||
void editPreferences();
|
||||
void editAttachments();
|
||||
void editAnimations();
|
||||
void audioStatsDetails();
|
||||
void bandwidthDetails();
|
||||
void lodTools();
|
||||
|
|
|
@ -247,12 +247,7 @@ void PreferencesDialog::savePreferences() {
|
|||
myAvatar->setLeanScale(ui.leanScaleSpin->value());
|
||||
myAvatar->setClampedTargetScale(ui.avatarScaleSpin->value());
|
||||
if (myAvatar->getAnimGraphUrl() != ui.avatarAnimationEdit->text()) { // If changed, destroy the old and start with the new
|
||||
bool isEnabled = myAvatar->getEnableAnimGraph();
|
||||
myAvatar->setEnableAnimGraph(false);
|
||||
myAvatar->setAnimGraphUrl(ui.avatarAnimationEdit->text());
|
||||
if (isEnabled) {
|
||||
myAvatar->setEnableAnimGraph(true);
|
||||
}
|
||||
}
|
||||
|
||||
myAvatar->setRealWorldFieldOfView(ui.realWorldFieldOfViewSpin->value());
|
||||
|
|
|
@ -115,6 +115,8 @@ void Stats::updateStats(bool force) {
|
|||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
// we need to take one avatar out so we don't include ourselves
|
||||
STAT_UPDATE(avatarCount, avatarManager->size() - 1);
|
||||
STAT_UPDATE(avatarRenderableCount, avatarManager->getNumberInRenderRange());
|
||||
STAT_UPDATE(avatarRenderDistance, (int) round(avatarManager->getRenderDistance())); // deliberately truncating
|
||||
STAT_UPDATE(serverCount, nodeList->size());
|
||||
STAT_UPDATE(framerate, (int)qApp->getFps());
|
||||
STAT_UPDATE(simrate, (int)qApp->getAverageSimsPerSecond());
|
||||
|
|
|
@ -36,6 +36,8 @@ class Stats : public QQuickItem {
|
|||
STATS_PROPERTY(int, simrate, 0)
|
||||
STATS_PROPERTY(int, avatarSimrate, 0)
|
||||
STATS_PROPERTY(int, avatarCount, 0)
|
||||
STATS_PROPERTY(int, avatarRenderableCount, 0)
|
||||
STATS_PROPERTY(int, avatarRenderDistance, 0)
|
||||
STATS_PROPERTY(int, packetInCount, 0)
|
||||
STATS_PROPERTY(int, packetOutCount, 0)
|
||||
STATS_PROPERTY(float, mbpsIn, 0)
|
||||
|
@ -117,6 +119,8 @@ signals:
|
|||
void simrateChanged();
|
||||
void avatarSimrateChanged();
|
||||
void avatarCountChanged();
|
||||
void avatarRenderableCountChanged();
|
||||
void avatarRenderDistanceChanged();
|
||||
void packetInCountChanged();
|
||||
void packetOutCountChanged();
|
||||
void mbpsInChanged();
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
//
|
||||
|
||||
#include "ModelOverlay.h"
|
||||
#include "EntityRig.h"
|
||||
#include <Rig.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
|
@ -18,7 +18,7 @@
|
|||
QString const ModelOverlay::TYPE = "model";
|
||||
|
||||
ModelOverlay::ModelOverlay()
|
||||
: _model(std::make_shared<EntityRig>()),
|
||||
: _model(std::make_shared<Rig>()),
|
||||
_modelTextures(QVariantMap()),
|
||||
_updateModel(false)
|
||||
{
|
||||
|
@ -28,7 +28,7 @@ ModelOverlay::ModelOverlay()
|
|||
|
||||
ModelOverlay::ModelOverlay(const ModelOverlay* modelOverlay) :
|
||||
Volume3DOverlay(modelOverlay),
|
||||
_model(std::make_shared<EntityRig>()),
|
||||
_model(std::make_shared<Rig>()),
|
||||
_modelTextures(QVariantMap()),
|
||||
_url(modelOverlay->_url),
|
||||
_updateModel(false)
|
||||
|
|
|
@ -91,57 +91,64 @@ void AnimClip::copyFromNetworkAnim() {
|
|||
// build a mapping from animation joint indices to skeleton joint indices.
|
||||
// by matching joints with the same name.
|
||||
const FBXGeometry& geom = _networkAnim->getGeometry();
|
||||
const QVector<FBXJoint>& animJoints = geom.joints;
|
||||
AnimSkeleton animSkeleton(geom);
|
||||
const int animJointCount = animSkeleton.getNumJoints();
|
||||
const int skeletonJointCount = _skeleton->getNumJoints();
|
||||
std::vector<int> jointMap;
|
||||
const int animJointCount = animJoints.count();
|
||||
jointMap.reserve(animJointCount);
|
||||
for (int i = 0; i < animJointCount; i++) {
|
||||
int skeletonJoint = _skeleton->nameToJointIndex(animJoints.at(i).name);
|
||||
int skeletonJoint = _skeleton->nameToJointIndex(animSkeleton.getJointName(i));
|
||||
if (skeletonJoint == -1) {
|
||||
qCWarning(animation) << "animation contains joint =" << animJoints.at(i).name << " which is not in the skeleton, url =" << _url;
|
||||
qCWarning(animation) << "animation contains joint =" << animSkeleton.getJointName(i) << " which is not in the skeleton, url =" << _url;
|
||||
}
|
||||
jointMap.push_back(skeletonJoint);
|
||||
}
|
||||
|
||||
const int frameCount = geom.animationFrames.size();
|
||||
const int skeletonJointCount = _skeleton->getNumJoints();
|
||||
_anim.resize(frameCount);
|
||||
|
||||
const glm::vec3 offsetScale = extractScale(geom.offset);
|
||||
|
||||
for (int frame = 0; frame < frameCount; frame++) {
|
||||
|
||||
// init all joints in animation to bind pose
|
||||
// this will give us a resonable result for bones in the skeleton but not in the animation.
|
||||
// init all joints in animation to default pose
|
||||
// this will give us a resonable result for bones in the model skeleton but not in the animation.
|
||||
_anim[frame].reserve(skeletonJointCount);
|
||||
for (int skeletonJoint = 0; skeletonJoint < skeletonJointCount; skeletonJoint++) {
|
||||
_anim[frame].push_back(_skeleton->getRelativeBindPose(skeletonJoint));
|
||||
_anim[frame].push_back(_skeleton->getRelativeDefaultPose(skeletonJoint));
|
||||
}
|
||||
|
||||
for (int animJoint = 0; animJoint < animJointCount; animJoint++) {
|
||||
|
||||
int skeletonJoint = jointMap[animJoint];
|
||||
|
||||
// skip joints that are in the animation but not in the skeleton.
|
||||
if (skeletonJoint >= 0 && skeletonJoint < skeletonJointCount) {
|
||||
|
||||
const glm::vec3& fbxZeroTrans = geom.animationFrames[0].translations[animJoint] * offsetScale;
|
||||
const AnimPose& relBindPose = _skeleton->getRelativeBindPose(skeletonJoint);
|
||||
const glm::vec3& fbxZeroTrans = geom.animationFrames[0].translations[animJoint];
|
||||
#ifdef USE_PRE_ROT_FROM_ANIM
|
||||
// TODO: This is the correct way to apply the pre rotations from maya, however
|
||||
// the current animation set has incorrect preRotations for the left wrist and thumb
|
||||
// so it looks wrong if we enable this code.
|
||||
glm::quat preRotation = animSkeleton.getPreRotation(animJoint);
|
||||
#else
|
||||
// TODO: This is the legacy approach, this does not work when animations and models do not
|
||||
// have the same set of pre rotations. For example when mixing maya models with blender animations.
|
||||
glm::quat preRotation = _skeleton->getRelativeBindPose(skeletonJoint).rot;
|
||||
#endif
|
||||
const AnimPose& relDefaultPose = _skeleton->getRelativeDefaultPose(skeletonJoint);
|
||||
|
||||
// used to adjust translation offsets, so large translation animatons on the reference skeleton
|
||||
// will be adjusted when played on a skeleton with short limbs.
|
||||
float limbLengthScale = fabsf(glm::length(fbxZeroTrans)) <= 0.0001f ? 1.0f : (glm::length(relBindPose.trans) / glm::length(fbxZeroTrans));
|
||||
float limbLengthScale = fabsf(glm::length(fbxZeroTrans)) <= 0.0001f ? 1.0f : (glm::length(relDefaultPose.trans) / glm::length(fbxZeroTrans));
|
||||
|
||||
AnimPose& pose = _anim[frame][skeletonJoint];
|
||||
const FBXAnimationFrame& fbxAnimFrame = geom.animationFrames[frame];
|
||||
|
||||
// rotation in fbxAnimationFrame is a delta from a reference skeleton bind pose.
|
||||
pose.rot = relBindPose.rot * fbxAnimFrame.rotations[animJoint];
|
||||
// rotation in fbxAnimationFrame is a delta from its preRotation.
|
||||
pose.rot = preRotation * fbxAnimFrame.rotations[animJoint];
|
||||
|
||||
// translation in fbxAnimationFrame is not a delta.
|
||||
// convert it into a delta by subtracting from the first frame.
|
||||
const glm::vec3& fbxTrans = fbxAnimFrame.translations[animJoint] * offsetScale;
|
||||
pose.trans = relBindPose.trans + limbLengthScale * (fbxTrans - fbxZeroTrans);
|
||||
const glm::vec3& fbxTrans = fbxAnimFrame.translations[animJoint];
|
||||
pose.trans = relDefaultPose.trans + limbLengthScale * (fbxTrans - fbxZeroTrans);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,13 +37,18 @@ public:
|
|||
void setFrameVar(const QString& frameVar) { _frameVar = frameVar; }
|
||||
|
||||
float getStartFrame() const { return _startFrame; }
|
||||
void setStartFrame(float startFrame) { _startFrame = startFrame; }
|
||||
float getEndFrame() const { return _endFrame; }
|
||||
void setEndFrame(float endFrame) { _endFrame = endFrame; }
|
||||
|
||||
void setTimeScale(float timeScale) { _timeScale = timeScale; }
|
||||
float getTimeScale() const { return _timeScale; }
|
||||
|
||||
protected:
|
||||
bool getLoopFlag() const { return _loopFlag; }
|
||||
void setLoopFlag(bool loopFlag) { _loopFlag = loopFlag; }
|
||||
|
||||
void loadURL(const QString& url);
|
||||
protected:
|
||||
|
||||
virtual void setCurrentFrameInternal(float frame) override;
|
||||
|
||||
|
|
|
@ -98,8 +98,8 @@ void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::
|
|||
target.setType(animVars.lookup(targetVar.typeVar, (int)IKTarget::Type::RotationAndPosition));
|
||||
if (target.getType() != IKTarget::Type::Unknown) {
|
||||
AnimPose defaultPose = _skeleton->getAbsolutePose(targetVar.jointIndex, underPoses);
|
||||
glm::quat rotation = animVars.lookup(targetVar.rotationVar, defaultPose.rot);
|
||||
glm::vec3 translation = animVars.lookup(targetVar.positionVar, defaultPose.trans);
|
||||
glm::quat rotation = animVars.lookupRigToGeometry(targetVar.rotationVar, defaultPose.rot);
|
||||
glm::vec3 translation = animVars.lookupRigToGeometry(targetVar.positionVar, defaultPose.trans);
|
||||
if (target.getType() == IKTarget::Type::HipsRelativeRotationAndPosition) {
|
||||
translation += _hipsOffset;
|
||||
}
|
||||
|
|
|
@ -54,9 +54,9 @@ protected:
|
|||
AnimInverseKinematics& operator=(const AnimInverseKinematics&) = delete;
|
||||
|
||||
struct IKTargetVar {
|
||||
IKTargetVar(const QString& jointNameIn,
|
||||
const QString& positionVarIn,
|
||||
const QString& rotationVarIn,
|
||||
IKTargetVar(const QString& jointNameIn,
|
||||
const QString& positionVarIn,
|
||||
const QString& rotationVarIn,
|
||||
const QString& typeVarIn) :
|
||||
positionVar(positionVarIn),
|
||||
rotationVar(rotationVarIn),
|
||||
|
|
|
@ -46,39 +46,15 @@ const AnimPoseVec& AnimManipulator::overlay(const AnimVariantMap& animVars, floa
|
|||
|
||||
if (jointVar.jointIndex >= 0) {
|
||||
|
||||
AnimPose defaultAbsPose;
|
||||
// use the underPose as our default value if we can.
|
||||
AnimPose defaultRelPose;
|
||||
AnimPose parentAbsPose = AnimPose::identity;
|
||||
if (jointVar.jointIndex <= (int)underPoses.size()) {
|
||||
|
||||
// jointVar is an absolute rotation, if it is not set we will use the underPose as our default value
|
||||
defaultRelPose = underPoses[jointVar.jointIndex];
|
||||
defaultAbsPose = _skeleton->getAbsolutePose(jointVar.jointIndex, underPoses);
|
||||
defaultAbsPose.rot = animVars.lookup(jointVar.var, defaultAbsPose.rot);
|
||||
|
||||
// because jointVar is absolute, we must use an absolute parent frame to convert into a relative pose.
|
||||
int parentIndex = _skeleton->getParentIndex(jointVar.jointIndex);
|
||||
if (parentIndex >= 0) {
|
||||
parentAbsPose = _skeleton->getAbsolutePose(parentIndex, underPoses);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
// jointVar is an absolute rotation, if it is not set we will use the bindPose as our default value
|
||||
defaultRelPose = AnimPose::identity;
|
||||
defaultAbsPose = _skeleton->getAbsoluteBindPose(jointVar.jointIndex);
|
||||
defaultAbsPose.rot = animVars.lookup(jointVar.var, defaultAbsPose.rot);
|
||||
|
||||
// because jointVar is absolute, we must use an absolute parent frame to convert into a relative pose
|
||||
// here we use the bind pose
|
||||
int parentIndex = _skeleton->getParentIndex(jointVar.jointIndex);
|
||||
if (parentIndex >= 0) {
|
||||
parentAbsPose = _skeleton->getAbsoluteBindPose(parentIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// convert from absolute to relative
|
||||
AnimPose relPose = parentAbsPose.inverse() * defaultAbsPose;
|
||||
AnimPose relPose = computeRelativePoseFromJointVar(animVars, jointVar, defaultRelPose, underPoses);
|
||||
|
||||
// blend with underPose
|
||||
::blend(1, &defaultRelPose, &relPose, _alpha, &_poses[jointVar.jointIndex]);
|
||||
|
@ -114,3 +90,44 @@ const AnimPoseVec& AnimManipulator::getPosesInternal() const {
|
|||
void AnimManipulator::addJointVar(const JointVar& jointVar) {
|
||||
_jointVars.push_back(jointVar);
|
||||
}
|
||||
|
||||
void AnimManipulator::removeAllJointVars() {
|
||||
_jointVars.clear();
|
||||
}
|
||||
|
||||
AnimPose AnimManipulator::computeRelativePoseFromJointVar(const AnimVariantMap& animVars, const JointVar& jointVar,
|
||||
const AnimPose& defaultRelPose, const AnimPoseVec& underPoses) {
|
||||
|
||||
AnimPose defaultAbsPose = _skeleton->getAbsolutePose(jointVar.jointIndex, underPoses);
|
||||
|
||||
if (jointVar.type == JointVar::Type::AbsoluteRotation || jointVar.type == JointVar::Type::AbsolutePosition) {
|
||||
|
||||
if (jointVar.type == JointVar::Type::AbsoluteRotation) {
|
||||
defaultAbsPose.rot = animVars.lookupRigToGeometry(jointVar.var, defaultAbsPose.rot);
|
||||
} else if (jointVar.type == JointVar::Type::AbsolutePosition) {
|
||||
defaultAbsPose.trans = animVars.lookupRigToGeometry(jointVar.var, defaultAbsPose.trans);
|
||||
}
|
||||
|
||||
// because jointVar is absolute, we must use an absolute parent frame to convert into a relative pose.
|
||||
AnimPose parentAbsPose = AnimPose::identity;
|
||||
int parentIndex = _skeleton->getParentIndex(jointVar.jointIndex);
|
||||
if (parentIndex >= 0) {
|
||||
parentAbsPose = _skeleton->getAbsolutePose(parentIndex, underPoses);
|
||||
}
|
||||
|
||||
// convert from absolute to relative
|
||||
return parentAbsPose.inverse() * defaultAbsPose;
|
||||
|
||||
} else {
|
||||
|
||||
// override the default rel pose
|
||||
AnimPose relPose = defaultRelPose;
|
||||
if (jointVar.type == JointVar::Type::RelativeRotation) {
|
||||
relPose.rot = animVars.lookupRigToGeometry(jointVar.var, defaultRelPose.rot);
|
||||
} else if (jointVar.type == JointVar::Type::RelativePosition) {
|
||||
relPose.trans = animVars.lookupRigToGeometry(jointVar.var, defaultRelPose.trans);
|
||||
}
|
||||
|
||||
return relPose;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,19 +30,33 @@ public:
|
|||
virtual void setSkeletonInternal(AnimSkeleton::ConstPointer skeleton) override;
|
||||
|
||||
struct JointVar {
|
||||
JointVar(const QString& varIn, const QString& jointNameIn) : var(varIn), jointName(jointNameIn), jointIndex(-1), hasPerformedJointLookup(false) {}
|
||||
enum class Type {
|
||||
AbsoluteRotation = 0,
|
||||
AbsolutePosition,
|
||||
RelativeRotation,
|
||||
RelativePosition,
|
||||
NumTypes
|
||||
};
|
||||
|
||||
JointVar(const QString& varIn, const QString& jointNameIn, Type typeIn) : var(varIn), jointName(jointNameIn), type(typeIn), jointIndex(-1), hasPerformedJointLookup(false) {}
|
||||
QString var = "";
|
||||
QString jointName = "";
|
||||
Type type = Type::AbsoluteRotation;
|
||||
int jointIndex = -1;
|
||||
bool hasPerformedJointLookup = false;
|
||||
bool isRelative = false;
|
||||
};
|
||||
|
||||
void addJointVar(const JointVar& jointVar);
|
||||
void removeAllJointVars();
|
||||
|
||||
protected:
|
||||
// for AnimDebugDraw rendering
|
||||
virtual const AnimPoseVec& getPosesInternal() const override;
|
||||
|
||||
AnimPose computeRelativePoseFromJointVar(const AnimVariantMap& animVars, const JointVar& jointVar,
|
||||
const AnimPose& defaultRelPose, const AnimPoseVec& underPoses);
|
||||
|
||||
AnimPoseVec _poses;
|
||||
float _alpha;
|
||||
QString _alphaVar;
|
||||
|
|
|
@ -8,12 +8,35 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QtGlobal>
|
||||
#include "AnimNode.h"
|
||||
|
||||
void AnimNode::removeChild(AnimNode::Pointer child) {
|
||||
AnimNode::Pointer AnimNode::getParent() {
|
||||
return _parent.lock();
|
||||
}
|
||||
|
||||
void AnimNode::addChild(Pointer child) {
|
||||
_children.push_back(child);
|
||||
child->_parent = shared_from_this();
|
||||
}
|
||||
|
||||
void AnimNode::removeChild(Pointer child) {
|
||||
auto iter = std::find(_children.begin(), _children.end(), child);
|
||||
if (iter != _children.end()) {
|
||||
_children.erase(iter);
|
||||
child->_parent.reset();
|
||||
}
|
||||
}
|
||||
|
||||
void AnimNode::replaceChild(Pointer oldChild, Pointer newChild) {
|
||||
auto iter = std::find(_children.begin(), _children.end(), oldChild);
|
||||
if (iter != _children.end()) {
|
||||
oldChild->_parent.reset();
|
||||
newChild->_parent = shared_from_this();
|
||||
if (_skeleton) {
|
||||
newChild->setSkeleton(_skeleton);
|
||||
}
|
||||
*iter = newChild;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -22,7 +45,7 @@ AnimNode::Pointer AnimNode::getChild(int i) const {
|
|||
return _children[i];
|
||||
}
|
||||
|
||||
void AnimNode::setSkeleton(const AnimSkeleton::Pointer skeleton) {
|
||||
void AnimNode::setSkeleton(AnimSkeleton::ConstPointer skeleton) {
|
||||
setSkeletonInternal(skeleton);
|
||||
for (auto&& child : _children) {
|
||||
child->setSkeleton(skeleton);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue