mirror of
https://github.com/overte-org/overte.git
synced 2025-04-23 12:13:40 +02:00
Resolve merging conflict
This commit is contained in:
commit
c62108c3c8
21 changed files with 528 additions and 54 deletions
assignment-client/src
interface
resources/qml/hifi
src
libraries
avatars/src
fbx/src
gl/src/gl
script-engine/src
script-archive/acScripts
scripts/system
|
@ -138,7 +138,6 @@ void Agent::handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, Sh
|
|||
|
||||
void Agent::handleAudioPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
_receivedAudioStream.parseData(*message);
|
||||
|
||||
_lastReceivedAudioLoudness = _receivedAudioStream.getNextOutputFrameLoudness();
|
||||
_receivedAudioStream.clearBuffer();
|
||||
}
|
||||
|
@ -323,12 +322,14 @@ void Agent::scriptRequestFinished() {
|
|||
request->deleteLater();
|
||||
}
|
||||
|
||||
|
||||
void Agent::executeScript() {
|
||||
_scriptEngine = std::unique_ptr<ScriptEngine>(new ScriptEngine(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload));
|
||||
_scriptEngine->setParent(this); // be the parent of the script engine so it gets moved when we do
|
||||
|
||||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
|
||||
connect(_scriptEngine.get(), SIGNAL(update(float)), scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
|
||||
|
@ -338,11 +339,33 @@ void Agent::executeScript() {
|
|||
// give this AvatarData object to the script engine
|
||||
_scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
|
||||
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
connect(player.data(), &recording::Deck::playbackStateChanged, [=] {
|
||||
if (player->isPlaying()) {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
if (recordingInterface->getPlayFromCurrentLocation()) {
|
||||
scriptedAvatar->setRecordingBasis();
|
||||
}
|
||||
} else {
|
||||
scriptedAvatar->clearRecordingBasis();
|
||||
}
|
||||
});
|
||||
|
||||
using namespace recording;
|
||||
static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
|
||||
// FIXME how to deal with driving multiple avatars locally?
|
||||
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this, scriptedAvatar](Frame::ConstPointer frame) {
|
||||
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
|
||||
|
||||
// FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
|
||||
if (!useFrameSkeleton) {
|
||||
static std::once_flag warning;
|
||||
std::call_once(warning, [] {
|
||||
qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
|
||||
});
|
||||
}
|
||||
|
||||
AvatarData::fromFrame(frame->data, *scriptedAvatar);
|
||||
});
|
||||
|
||||
|
@ -352,8 +375,11 @@ void Agent::executeScript() {
|
|||
const QByteArray& audio = frame->data;
|
||||
static quint16 audioSequenceNumber{ 0 };
|
||||
Transform audioTransform;
|
||||
|
||||
auto headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioTransform.setTranslation(scriptedAvatar->getPosition());
|
||||
audioTransform.setRotation(scriptedAvatar->getOrientation());
|
||||
audioTransform.setRotation(headOrientation);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
|
@ -537,7 +563,10 @@ void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) {
|
|||
}
|
||||
|
||||
void Agent::processAgentAvatarAudio() {
|
||||
if (_isAvatar && (_isListeningToAudioStream || _avatarSound)) {
|
||||
auto recordingInterface = DependencyManager::get<RecordingScriptingInterface>();
|
||||
bool isPlayingRecording = recordingInterface->isPlaying();
|
||||
|
||||
if (_isAvatar && ((_isListeningToAudioStream && !isPlayingRecording) || _avatarSound)) {
|
||||
// if we have an avatar audio stream then send it out to our audio-mixer
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
bool silentFrame = true;
|
||||
|
|
|
@ -16,6 +16,8 @@ import QtQuick.Controls 1.4
|
|||
import "../styles-uit"
|
||||
import "../controls-uit" as HifiControls
|
||||
|
||||
// references HMD, Users, UserActivityLogger from root context
|
||||
|
||||
Rectangle {
|
||||
id: pal
|
||||
// Size
|
||||
|
@ -26,7 +28,7 @@ Rectangle {
|
|||
// Properties
|
||||
property int myCardHeight: 90
|
||||
property int rowHeight: 70
|
||||
property int actionButtonWidth: 75
|
||||
property int actionButtonWidth: 55
|
||||
property int nameCardWidth: palContainer.width - actionButtonWidth*(iAmAdmin ? 4 : 2) - 4 - hifi.dimensions.scrollbarBackgroundWidth
|
||||
property var myData: ({displayName: "", userName: "", audioLevel: 0.0, admin: true}) // valid dummy until set
|
||||
property var ignored: ({}); // Keep a local list of ignored avatars & their data. Necessary because HashMap is slow to respond after ignoring.
|
||||
|
@ -35,7 +37,9 @@ Rectangle {
|
|||
// Keep a local list of per-avatar gainSliderValueDBs. Far faster than fetching this data from the server.
|
||||
// NOTE: if another script modifies the per-avatar gain, this value won't be accurate!
|
||||
property var gainSliderValueDB: ({});
|
||||
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
// The letterbox used for popup messages
|
||||
LetterboxMessage {
|
||||
id: letterboxMessage
|
||||
|
@ -54,8 +58,8 @@ Rectangle {
|
|||
property bool punctuationMode: false
|
||||
id: palContainer
|
||||
// Size
|
||||
width: pal.width - 50
|
||||
height: pal.height - 50
|
||||
width: pal.width - 10
|
||||
height: pal.height - 10
|
||||
// Style
|
||||
color: pal.color
|
||||
// Anchors
|
||||
|
@ -397,7 +401,7 @@ Rectangle {
|
|||
width: 20
|
||||
height: 28
|
||||
anchors.right: adminTab.right
|
||||
anchors.rightMargin: 31 + hifi.dimensions.scrollbarBackgroundWidth
|
||||
anchors.rightMargin: 10 + hifi.dimensions.scrollbarBackgroundWidth
|
||||
anchors.top: adminTab.top
|
||||
anchors.topMargin: 2
|
||||
RalewayRegular {
|
||||
|
@ -422,6 +426,8 @@ Rectangle {
|
|||
onExited: adminHelpText.color = hifi.colors.redHighlight
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
HifiControls.Keyboard {
|
||||
id: keyboard
|
||||
raised: myCard.currentlyEditingDisplayName && HMD.active
|
||||
|
@ -432,7 +438,7 @@ Rectangle {
|
|||
right: parent.right
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Timer used when selecting table rows that aren't yet present in the model
|
||||
// (i.e. when selecting avatars using edit.js or sphere overlays)
|
||||
Timer {
|
||||
|
|
|
@ -18,6 +18,16 @@ Item {
|
|||
loader.item.scriptURL = injectedJavaScriptUrl;
|
||||
}
|
||||
|
||||
// used to send a message from qml to interface script.
|
||||
signal sendToScript(var message);
|
||||
|
||||
// used to receive messages from interface script
|
||||
function fromScript(message) {
|
||||
if (loader.item.hasOwnProperty("fromScript")) {
|
||||
loader.item.fromScript(message);
|
||||
}
|
||||
}
|
||||
|
||||
SoundEffect {
|
||||
id: buttonClickSound
|
||||
volume: 0.1
|
||||
|
@ -55,6 +65,9 @@ Item {
|
|||
}
|
||||
});
|
||||
}
|
||||
if (loader.item.hasOwnProperty("sendToScript")) {
|
||||
loader.item.sendToScript.connect(tabletRoot.sendToScript);
|
||||
}
|
||||
loader.item.forceActiveFocus();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1010,7 +1010,7 @@ void Avatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
|||
|
||||
void Avatar::setModelURLFinished(bool success) {
|
||||
if (!success && _skeletonModelURL != AvatarData::defaultFullAvatarModelUrl()) {
|
||||
qDebug() << "Using default after failing to load Avatar model: " << _skeletonModelURL;
|
||||
qCWarning(interfaceapp) << "Using default after failing to load Avatar model: " << _skeletonModelURL;
|
||||
// call _skeletonModel.setURL, but leave our copy of _skeletonModelURL alone. This is so that
|
||||
// we don't redo this every time we receive an identity packet from the avatar with the bad url.
|
||||
QMetaObject::invokeMethod(_skeletonModel.get(), "setURL",
|
||||
|
|
|
@ -134,7 +134,7 @@ Q_LOGGING_CATEGORY(trace_simulation_avatar, "trace.simulation.avatar");
|
|||
|
||||
float AvatarManager::getAvatarDataRate(const QUuid& sessionID, const QString& rateName) {
|
||||
auto avatar = getAvatarBySessionID(sessionID);
|
||||
return avatar->getDataRate(rateName);
|
||||
return avatar ? avatar->getDataRate(rateName) : 0.0f;
|
||||
}
|
||||
|
||||
class AvatarPriority {
|
||||
|
|
|
@ -1165,7 +1165,6 @@ void MyAvatar::clearJointsData() {
|
|||
}
|
||||
|
||||
void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
||||
|
||||
Avatar::setSkeletonModelURL(skeletonModelURL);
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
_skeletonModel->setVisibleInScene(true, scene);
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
|
||||
#include <SettingHandle.h>
|
||||
|
||||
#include <display-plugins/CompositorHelper.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "DomainHandler.h"
|
||||
#include "MainWindow.h"
|
||||
|
@ -147,6 +149,15 @@ void WindowScriptingInterface::setPreviousBrowseLocation(const QString& location
|
|||
Setting::Handle<QVariant>(LAST_BROWSE_LOCATION_SETTING).set(location);
|
||||
}
|
||||
|
||||
/// Makes sure that the reticle is visible, use this in blocking forms that require a reticle and
|
||||
/// might be in same thread as a script that sets the reticle to invisible
|
||||
void WindowScriptingInterface::ensureReticleVisible() const {
|
||||
auto compositorHelper = DependencyManager::get<CompositorHelper>();
|
||||
if (!compositorHelper->getReticleVisible()) {
|
||||
compositorHelper->setReticleVisible(true);
|
||||
}
|
||||
}
|
||||
|
||||
/// Display an open file dialog. If `directory` is an invalid file or directory the browser will start at the current
|
||||
/// working directory.
|
||||
/// \param const QString& title title of the window
|
||||
|
@ -154,6 +165,7 @@ void WindowScriptingInterface::setPreviousBrowseLocation(const QString& location
|
|||
/// \param const QString& nameFilter filter to filter filenames by - see `QFileDialog`
|
||||
/// \return QScriptValue file path as a string if one was selected, otherwise `QScriptValue::NullValue`
|
||||
QScriptValue WindowScriptingInterface::browse(const QString& title, const QString& directory, const QString& nameFilter) {
|
||||
ensureReticleVisible();
|
||||
QString path = directory;
|
||||
if (path.isEmpty()) {
|
||||
path = getPreviousBrowseLocation();
|
||||
|
@ -175,6 +187,7 @@ QScriptValue WindowScriptingInterface::browse(const QString& title, const QStrin
|
|||
/// \param const QString& nameFilter filter to filter filenames by - see `QFileDialog`
|
||||
/// \return QScriptValue file path as a string if one was selected, otherwise `QScriptValue::NullValue`
|
||||
QScriptValue WindowScriptingInterface::save(const QString& title, const QString& directory, const QString& nameFilter) {
|
||||
ensureReticleVisible();
|
||||
QString path = directory;
|
||||
if (path.isEmpty()) {
|
||||
path = getPreviousBrowseLocation();
|
||||
|
|
|
@ -83,6 +83,8 @@ private:
|
|||
QString getPreviousBrowseLocation() const;
|
||||
void setPreviousBrowseLocation(const QString& location);
|
||||
|
||||
void ensureReticleVisible() const;
|
||||
|
||||
int createMessageBox(QString title, QString text, int buttons, int defaultButton);
|
||||
QHash<int, QQuickItem*> _messageBoxes;
|
||||
int _lastMessageBoxID{ -1 };
|
||||
|
|
|
@ -23,11 +23,14 @@
|
|||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <scripting/HMDScriptingInterface.h>
|
||||
#include <gl/OffscreenQmlSurface.h>
|
||||
#include <PathUtils.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <TabletScriptingInterface.h>
|
||||
#include <TextureCache.h>
|
||||
#include <UsersScriptingInterface.h>
|
||||
#include <UserActivityLoggerScriptingInterface.h>
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include <gl/OffscreenQmlSurface.h>
|
||||
#include <gl/OffscreenQmlSurfaceCache.h>
|
||||
|
@ -149,6 +152,10 @@ void Web3DOverlay::loadSourceURL() {
|
|||
_webSurface->load(_url, [&](QQmlContext* context, QObject* obj) {});
|
||||
_webSurface->resume();
|
||||
|
||||
_webSurface->getRootContext()->setContextProperty("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
_webSurface->getRootContext()->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
|
||||
_webSurface->getRootContext()->setContextProperty("UserActivityLogger", DependencyManager::get<UserActivityLoggerScriptingInterface>().data());
|
||||
|
||||
if (_webSurface->getRootItem() && _webSurface->getRootItem()->objectName() == "tabletRoot") {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto flags = tabletScriptingInterface->getFlags();
|
||||
|
|
|
@ -283,14 +283,20 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
destinationBuffer += sizeof(packetStateFlags);
|
||||
|
||||
if (hasAvatarGlobalPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarGlobalPosition*>(destinationBuffer);
|
||||
data->globalPosition[0] = _globalPosition.x;
|
||||
data->globalPosition[1] = _globalPosition.y;
|
||||
data->globalPosition[2] = _globalPosition.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarGlobalPosition);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
|
||||
_globalPositionRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasAvatarBoundingBox) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarBoundingBox*>(destinationBuffer);
|
||||
|
||||
data->avatarDimensions[0] = _globalBoundingBoxDimensions.x;
|
||||
|
@ -302,36 +308,56 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
data->boundOriginOffset[2] = _globalBoundingBoxOffset.z;
|
||||
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_avatarBoundingBoxRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasAvatarOrientation) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto localOrientation = getLocalOrientation();
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, localOrientation);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_avatarOrientationRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasAvatarScale) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarScale*>(destinationBuffer);
|
||||
auto scale = getDomainLimitedScale();
|
||||
packFloatRatioToTwoByte((uint8_t*)(&data->scale), scale);
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarScale);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_avatarScaleRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasLookAtPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::LookAtPosition*>(destinationBuffer);
|
||||
auto lookAt = _headData->getLookAtPosition();
|
||||
data->lookAtPosition[0] = lookAt.x;
|
||||
data->lookAtPosition[1] = lookAt.y;
|
||||
data->lookAtPosition[2] = lookAt.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::LookAtPosition);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_lookAtPositionRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasAudioLoudness) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AudioLoudness*>(destinationBuffer);
|
||||
data->audioLoudness = packFloatGainToByte(_headData->getAudioLoudness() / AUDIO_LOUDNESS_SCALE);
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AudioLoudness);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_audioLoudnessRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasSensorToWorldMatrix) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::SensorToWorldMatrix*>(destinationBuffer);
|
||||
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
|
||||
packOrientationQuatToSixBytes(data->sensorToWorldQuat, glmExtractRotation(sensorToWorldMatrix));
|
||||
|
@ -341,9 +367,13 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
data->sensorToWorldTrans[1] = sensorToWorldMatrix[3][1];
|
||||
data->sensorToWorldTrans[2] = sensorToWorldMatrix[3][2];
|
||||
destinationBuffer += sizeof(AvatarDataPacket::SensorToWorldMatrix);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_sensorToWorldRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasAdditionalFlags) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AdditionalFlags*>(destinationBuffer);
|
||||
|
||||
uint8_t flags { 0 };
|
||||
|
@ -370,27 +400,39 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
data->flags = flags;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_additionalFlagsRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasAvatarLocalPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarLocalPosition*>(destinationBuffer);
|
||||
auto localPosition = getLocalPosition();
|
||||
data->localPosition[0] = localPosition.x;
|
||||
data->localPosition[1] = localPosition.y;
|
||||
data->localPosition[2] = localPosition.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarLocalPosition);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_localPositionRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
if (hasParentInfo) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto parentInfo = reinterpret_cast<AvatarDataPacket::ParentInfo*>(destinationBuffer);
|
||||
QByteArray referentialAsBytes = parentID.toRfc4122();
|
||||
memcpy(parentInfo->parentUUID, referentialAsBytes.data(), referentialAsBytes.size());
|
||||
parentInfo->parentJointIndex = _parentJointIndex;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::ParentInfo);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_parentInfoRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
// If it is connected, pack up the data
|
||||
if (hasFaceTrackerInfo) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
|
||||
|
||||
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
|
||||
|
@ -403,10 +445,14 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
// followed by a variable number of float coefficients
|
||||
memcpy(destinationBuffer, _headData->_blendshapeCoefficients.data(), _headData->_blendshapeCoefficients.size() * sizeof(float));
|
||||
destinationBuffer += _headData->_blendshapeCoefficients.size() * sizeof(float);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_faceTrackerRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
// If it is connected, pack up the data
|
||||
if (hasJointData) {
|
||||
auto startSection = destinationBuffer;
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
|
||||
// joint rotation data
|
||||
|
@ -554,6 +600,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
<< (destinationBuffer - startPosition);
|
||||
}
|
||||
#endif
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
_jointDataRateOutbound.increment(numBytes);
|
||||
}
|
||||
|
||||
int avatarDataSize = destinationBuffer - startPosition;
|
||||
|
@ -1028,6 +1077,30 @@ float AvatarData::getDataRate(const QString& rateName) {
|
|||
return _faceTrackerRate.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "jointData") {
|
||||
return _jointDataRate.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "globalPositionOutbound") {
|
||||
return _globalPositionRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "localPositionOutbound") {
|
||||
return _localPositionRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "avatarBoundingBoxOutbound") {
|
||||
return _avatarBoundingBoxRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "avatarOrientationOutbound") {
|
||||
return _avatarOrientationRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "avatarScaleOutbound") {
|
||||
return _avatarScaleRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "lookAtPositionOutbound") {
|
||||
return _lookAtPositionRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "audioLoudnessOutbound") {
|
||||
return _audioLoudnessRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "sensorToWorkMatrixOutbound") {
|
||||
return _sensorToWorldRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "additionalFlagsOutbound") {
|
||||
return _additionalFlagsRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "parentInfoOutbound") {
|
||||
return _parentInfoRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "faceTrackerOutbound") {
|
||||
return _faceTrackerRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "jointDataOutbound") {
|
||||
return _jointDataRateOutbound.rate() / BYTES_PER_KILOBIT;
|
||||
}
|
||||
return 0.0f;
|
||||
}
|
||||
|
@ -1842,8 +1915,7 @@ QJsonObject AvatarData::toJson() const {
|
|||
return root;
|
||||
}
|
||||
|
||||
void AvatarData::fromJson(const QJsonObject& json) {
|
||||
|
||||
void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
|
||||
int version;
|
||||
if (json.contains(JSON_AVATAR_VERSION)) {
|
||||
version = json[JSON_AVATAR_VERSION].toInt();
|
||||
|
@ -1865,7 +1937,7 @@ void AvatarData::fromJson(const QJsonObject& json) {
|
|||
|
||||
if (json.contains(JSON_AVATAR_BODY_MODEL)) {
|
||||
auto bodyModelURL = json[JSON_AVATAR_BODY_MODEL].toString();
|
||||
if (bodyModelURL != getSkeletonModelURL().toString()) {
|
||||
if (useFrameSkeleton && bodyModelURL != getSkeletonModelURL().toString()) {
|
||||
setSkeletonModelURL(bodyModelURL);
|
||||
}
|
||||
}
|
||||
|
@ -1958,8 +2030,9 @@ QByteArray AvatarData::toFrame(const AvatarData& avatar) {
|
|||
}
|
||||
|
||||
|
||||
void AvatarData::fromFrame(const QByteArray& frameData, AvatarData& result) {
|
||||
void AvatarData::fromFrame(const QByteArray& frameData, AvatarData& result, bool useFrameSkeleton) {
|
||||
QJsonDocument doc = QJsonDocument::fromBinaryData(frameData);
|
||||
|
||||
#ifdef WANT_JSON_DEBUG
|
||||
{
|
||||
QJsonObject obj = doc.object();
|
||||
|
@ -1967,7 +2040,7 @@ void AvatarData::fromFrame(const QByteArray& frameData, AvatarData& result) {
|
|||
qCDebug(avatars).noquote() << QJsonDocument(obj).toJson(QJsonDocument::JsonFormat::Indented);
|
||||
}
|
||||
#endif
|
||||
result.fromJson(doc.object());
|
||||
result.fromJson(doc.object(), useFrameSkeleton);
|
||||
}
|
||||
|
||||
float AvatarData::getBodyYaw() const {
|
||||
|
|
|
@ -329,7 +329,7 @@ public:
|
|||
|
||||
static const QString FRAME_NAME;
|
||||
|
||||
static void fromFrame(const QByteArray& frameData, AvatarData& avatar);
|
||||
static void fromFrame(const QByteArray& frameData, AvatarData& avatar, bool useFrameSkeleton = true);
|
||||
static QByteArray toFrame(const AvatarData& avatar);
|
||||
|
||||
AvatarData();
|
||||
|
@ -380,8 +380,27 @@ public:
|
|||
void nextAttitude(glm::vec3 position, glm::quat orientation); // Can be safely called at any time.
|
||||
virtual void updateAttitude() {} // Tell skeleton mesh about changes
|
||||
|
||||
glm::quat getHeadOrientation() const { return _headData->getOrientation(); }
|
||||
void setHeadOrientation(const glm::quat& orientation) { _headData->setOrientation(orientation); }
|
||||
glm::quat getHeadOrientation() {
|
||||
lazyInitHeadData();
|
||||
return _headData->getOrientation();
|
||||
}
|
||||
void setHeadOrientation(const glm::quat& orientation) {
|
||||
if (_headData) {
|
||||
_headData->setOrientation(orientation);
|
||||
}
|
||||
}
|
||||
|
||||
void setLookAtPosition(const glm::vec3& lookAtPosition) {
|
||||
if (_headData) {
|
||||
_headData->setLookAtPosition(lookAtPosition);
|
||||
}
|
||||
}
|
||||
|
||||
void setBlendshapeCoefficients(const QVector<float>& blendshapeCoefficients) {
|
||||
if (_headData) {
|
||||
_headData->setBlendshapeCoefficients(blendshapeCoefficients);
|
||||
}
|
||||
}
|
||||
|
||||
// access to Head().set/getMousePitch (degrees)
|
||||
float getHeadPitch() const { return _headData->getBasePitch(); }
|
||||
|
@ -513,7 +532,7 @@ public:
|
|||
TransformPointer getRecordingBasis() const;
|
||||
void setRecordingBasis(TransformPointer recordingBasis = TransformPointer());
|
||||
QJsonObject toJson() const;
|
||||
void fromJson(const QJsonObject& json);
|
||||
void fromJson(const QJsonObject& json, bool useFrameSkeleton = true);
|
||||
|
||||
glm::vec3 getClientGlobalPosition() { return _globalPosition; }
|
||||
glm::vec3 getGlobalBoundingBoxCorner() { return _globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions; }
|
||||
|
@ -528,7 +547,7 @@ public:
|
|||
Q_INVOKABLE glm::mat4 getControllerLeftHandMatrix() const;
|
||||
Q_INVOKABLE glm::mat4 getControllerRightHandMatrix() const;
|
||||
|
||||
float getDataRate(const QString& rateName = QString(""));
|
||||
Q_INVOKABLE float getDataRate(const QString& rateName = QString(""));
|
||||
|
||||
int getJointCount() { return _jointData.size(); }
|
||||
|
||||
|
@ -596,7 +615,7 @@ protected:
|
|||
bool _forceFaceTrackerConnected;
|
||||
bool _hasNewJointData; // set in AvatarData, cleared in Avatar
|
||||
|
||||
HeadData* _headData;
|
||||
HeadData* _headData { nullptr };
|
||||
|
||||
QUrl _skeletonModelURL;
|
||||
bool _firstSkeletonCheck { true };
|
||||
|
@ -659,6 +678,21 @@ protected:
|
|||
RateCounter<> _faceTrackerRate;
|
||||
RateCounter<> _jointDataRate;
|
||||
|
||||
// Some rate data for outgoing data
|
||||
RateCounter<> _globalPositionRateOutbound;
|
||||
RateCounter<> _localPositionRateOutbound;
|
||||
RateCounter<> _avatarBoundingBoxRateOutbound;
|
||||
RateCounter<> _avatarOrientationRateOutbound;
|
||||
RateCounter<> _avatarScaleRateOutbound;
|
||||
RateCounter<> _lookAtPositionRateOutbound;
|
||||
RateCounter<> _audioLoudnessRateOutbound;
|
||||
RateCounter<> _sensorToWorldRateOutbound;
|
||||
RateCounter<> _additionalFlagsRateOutbound;
|
||||
RateCounter<> _parentInfoRateOutbound;
|
||||
RateCounter<> _faceTrackerRateOutbound;
|
||||
RateCounter<> _jointDataRateOutbound;
|
||||
|
||||
|
||||
glm::vec3 _globalBoundingBoxDimensions;
|
||||
glm::vec3 _globalBoundingBoxOffset;
|
||||
|
||||
|
|
|
@ -537,6 +537,8 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
|
|||
FBXGeometry* geometryPtr = new FBXGeometry;
|
||||
FBXGeometry& geometry = *geometryPtr;
|
||||
|
||||
geometry.originalURL = url;
|
||||
|
||||
float unitScaleFactor = 1.0f;
|
||||
glm::vec3 ambientColor;
|
||||
QString hifiGlobalNodeID;
|
||||
|
|
|
@ -288,6 +288,7 @@ class FBXGeometry {
|
|||
public:
|
||||
using Pointer = std::shared_ptr<FBXGeometry>;
|
||||
|
||||
QString originalURL;
|
||||
QString author;
|
||||
QString applicationName; ///< the name of the application that generated the model
|
||||
|
||||
|
|
|
@ -62,7 +62,8 @@ template<class T> QVariant readBinaryArray(QDataStream& in, int& position) {
|
|||
position += sizeof(T) * arrayLength;
|
||||
in.readRawData(arrayData.data(), arrayData.size());
|
||||
}
|
||||
if (!arrayData.isEmpty()) {
|
||||
|
||||
if (arrayData.size() > 0) {
|
||||
memcpy(&values[0], arrayData.constData(), arrayData.size());
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -604,6 +604,9 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
|
|||
qFatal("Could not load object as root item");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
connect(newItem, SIGNAL(sendToScript(QVariant)), this, SIGNAL(fromQml(QVariant)));
|
||||
|
||||
// The root item is ready. Associate it with the window.
|
||||
_rootItem = newItem;
|
||||
_rootItem->setParentItem(_quickWindow->contentItem());
|
||||
|
@ -952,4 +955,13 @@ void OffscreenQmlSurface::emitWebEvent(const QVariant& message) {
|
|||
}
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::sendToQml(const QVariant& message) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "emitQmlEvent", Qt::QueuedConnection, Q_ARG(QVariant, message));
|
||||
} else if (_rootItem) {
|
||||
// call fromScript method on qml root
|
||||
QMetaObject::invokeMethod(_rootItem, "fromScript", Qt::QueuedConnection, Q_ARG(QVariant, message));
|
||||
}
|
||||
}
|
||||
|
||||
#include "OffscreenQmlSurface.moc"
|
||||
|
|
|
@ -107,6 +107,11 @@ signals:
|
|||
void scriptEventReceived(const QVariant& message);
|
||||
void webEventReceived(const QVariant& message);
|
||||
|
||||
// qml event bridge
|
||||
public slots:
|
||||
void sendToQml(const QVariant& message);
|
||||
signals:
|
||||
void fromQml(QVariant message);
|
||||
|
||||
protected:
|
||||
bool filterEnabled(QObject* originalDestination, QEvent* event) const;
|
||||
|
|
|
@ -183,6 +183,18 @@ void TabletProxy::setQmlTabletRoot(QQuickItem* qmlTabletRoot, QObject* qmlOffscr
|
|||
_qmlTabletRoot = qmlTabletRoot;
|
||||
if (_qmlTabletRoot && _qmlOffscreenSurface) {
|
||||
QObject::connect(_qmlOffscreenSurface, SIGNAL(webEventReceived(QVariant)), this, SIGNAL(webEventReceived(QVariant)));
|
||||
|
||||
// forward qml surface events to interface js
|
||||
connect(dynamic_cast<OffscreenQmlSurface*>(_qmlOffscreenSurface), &OffscreenQmlSurface::fromQml, [this](QVariant message) {
|
||||
if (message.canConvert<QJSValue>()) {
|
||||
emit fromQml(qvariant_cast<QJSValue>(message).toVariant());
|
||||
} else if (message.canConvert<QString>()) {
|
||||
emit fromQml(message.toString());
|
||||
} else {
|
||||
qWarning() << "fromQml: Unsupported message type " << message;
|
||||
}
|
||||
});
|
||||
|
||||
gotoHomeScreen();
|
||||
|
||||
QMetaObject::invokeMethod(_qmlTabletRoot, "setUsername", Q_ARG(const QVariant&, QVariant(getUsername())));
|
||||
|
@ -197,6 +209,7 @@ void TabletProxy::setQmlTabletRoot(QQuickItem* qmlTabletRoot, QObject* qmlOffscr
|
|||
} else {
|
||||
removeButtonsFromHomeScreen();
|
||||
_state = State::Uninitialized;
|
||||
emit screenChanged(QVariant("Closed"), QVariant(""));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -208,10 +221,21 @@ void TabletProxy::gotoMenuScreen() {
|
|||
QObject::connect(loader, SIGNAL(loaded()), this, SLOT(addButtonsToMenuScreen()), Qt::DirectConnection);
|
||||
QMetaObject::invokeMethod(_qmlTabletRoot, "loadSource", Q_ARG(const QVariant&, QVariant(VRMENU_SOURCE_URL)));
|
||||
_state = State::Menu;
|
||||
emit screenChanged(QVariant("Menu"), QVariant(VRMENU_SOURCE_URL));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void TabletProxy::loadQMLSource(const QVariant& path) {
|
||||
if (_qmlTabletRoot) {
|
||||
if (_state != State::QML) {
|
||||
removeButtonsFromHomeScreen();
|
||||
QMetaObject::invokeMethod(_qmlTabletRoot, "loadSource", Q_ARG(const QVariant&, path));
|
||||
_state = State::QML;
|
||||
emit screenChanged(QVariant("QML"), path);
|
||||
}
|
||||
}
|
||||
}
|
||||
void TabletProxy::gotoHomeScreen() {
|
||||
if (_qmlTabletRoot) {
|
||||
if (_state != State::Home) {
|
||||
|
@ -220,6 +244,7 @@ void TabletProxy::gotoHomeScreen() {
|
|||
QMetaObject::invokeMethod(_qmlTabletRoot, "loadSource", Q_ARG(const QVariant&, QVariant(TABLET_SOURCE_URL)));
|
||||
QMetaObject::invokeMethod(_qmlTabletRoot, "playButtonClickSound");
|
||||
_state = State::Home;
|
||||
emit screenChanged(QVariant("Home"), QVariant(TABLET_SOURCE_URL));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -236,6 +261,7 @@ void TabletProxy::gotoWebScreen(const QString& url, const QString& injectedJavaS
|
|||
if (_state != State::Web) {
|
||||
QMetaObject::invokeMethod(_qmlTabletRoot, "loadSource", Q_ARG(const QVariant&, QVariant(WEB_VIEW_SOURCE_URL)));
|
||||
_state = State::Web;
|
||||
emit screenChanged(QVariant("Web"), QVariant(url));
|
||||
}
|
||||
QMetaObject::invokeMethod(_qmlTabletRoot, "loadWebUrl", Q_ARG(const QVariant&, QVariant(url)),
|
||||
Q_ARG(const QVariant&, QVariant(injectedJavaScriptUrl)));
|
||||
|
@ -298,6 +324,12 @@ void TabletProxy::emitScriptEvent(QVariant msg) {
|
|||
}
|
||||
}
|
||||
|
||||
void TabletProxy::sendToQml(QVariant msg) {
|
||||
if (_qmlOffscreenSurface) {
|
||||
QMetaObject::invokeMethod(_qmlOffscreenSurface, "sendToQml", Qt::AutoConnection, Q_ARG(QVariant, msg));
|
||||
}
|
||||
}
|
||||
|
||||
void TabletProxy::addButtonsToHomeScreen() {
|
||||
auto tablet = getQmlTablet();
|
||||
if (!tablet) {
|
||||
|
|
|
@ -89,6 +89,8 @@ public:
|
|||
Q_INVOKABLE void gotoWebScreen(const QString& url);
|
||||
Q_INVOKABLE void gotoWebScreen(const QString& url, const QString& injectedJavaScriptUrl);
|
||||
|
||||
Q_INVOKABLE void loadQMLSource(const QVariant& path);
|
||||
|
||||
/**jsdoc
|
||||
* Creates a new button, adds it to this and returns it.
|
||||
* @function TabletProxy#addButton
|
||||
|
@ -120,6 +122,13 @@ public:
|
|||
*/
|
||||
Q_INVOKABLE void emitScriptEvent(QVariant msg);
|
||||
|
||||
/**jsdoc
|
||||
* Used to send an event to the qml embedded in the tablet
|
||||
* @function TabletProxy#sendToQml
|
||||
* @param msg {object|string}
|
||||
*/
|
||||
Q_INVOKABLE void sendToQml(QVariant msg);
|
||||
|
||||
Q_INVOKABLE bool onHomeScreen();
|
||||
|
||||
QObject* getTabletSurface();
|
||||
|
@ -137,6 +146,22 @@ signals:
|
|||
*/
|
||||
void webEventReceived(QVariant msg);
|
||||
|
||||
/**jsdoc
|
||||
* Signaled when this tablet receives an event from the qml embedded in the tablet
|
||||
* @function TabletProxy#fromQml
|
||||
* @param msg {object|string}
|
||||
* @returns {Signal}
|
||||
*/
|
||||
void fromQml(QVariant msg);
|
||||
|
||||
/**jsdoc
|
||||
* Signales when this tablet screen changes.
|
||||
* @function TabletProxy#screenChanged
|
||||
* @param type {string} - "Home", "Web", "Menu", "QML", "Closed"
|
||||
* @param url {string} - only valid for Web and QML.
|
||||
*/
|
||||
void screenChanged(QVariant type, QVariant url);
|
||||
|
||||
private slots:
|
||||
void addButtonsToHomeScreen();
|
||||
void addButtonsToMenuScreen();
|
||||
|
@ -149,7 +174,7 @@ protected:
|
|||
QQuickItem* _qmlTabletRoot { nullptr };
|
||||
QObject* _qmlOffscreenSurface { nullptr };
|
||||
|
||||
enum class State { Uninitialized, Home, Web, Menu };
|
||||
enum class State { Uninitialized, Home, Web, Menu, QML };
|
||||
State _state { State::Uninitialized };
|
||||
};
|
||||
|
||||
|
|
|
@ -0,0 +1,183 @@
|
|||
//
|
||||
// BetterClientSimulationBotFromRecording.js
|
||||
// examples
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 2/6/17.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
var WANT_DEBUGGING = false;
|
||||
|
||||
randFloat = function(low, high) {
|
||||
return low + Math.random() * (high - low);
|
||||
}
|
||||
|
||||
var AVATARS_ARRAY = [
|
||||
"http://mpassets.highfidelity.com/0c2c264b-2fd2-46a4-bf80-de681881f66b-v1/F_MotRac.fst",
|
||||
"http://mpassets.highfidelity.com/bd80a6d7-7173-489e-87c6-f7ee56e65530-v1/M_RetFut.fst",
|
||||
"http://mpassets.highfidelity.com/47c8d706-d486-4c2d-afcc-70d4e1e25117-v1/M_RetSpaSuit.fst",
|
||||
"http://mpassets.highfidelity.com/548d0792-0bac-4933-bbfc-57d71912d77e-v1/M_OutMer.fst",
|
||||
"http://mpassets.highfidelity.com/13277c09-892f-4a5e-b9a5-8994a37d68bf-v1/F_WasWar.fst",
|
||||
"http://mpassets.highfidelity.com/2d384111-0f0e-42e2-b800-66bfcab4aefb-v1/F_VooQue.fst",
|
||||
"http://mpassets.highfidelity.com/57e4d1cd-9f52-4c95-9051-326f9bb114ea-v1/F_SteAvi.fst",
|
||||
"http://mpassets.highfidelity.com/da2ad4cd-47d4-41da-b764-41f39ff77e30-v1/F_JerGir.fst",
|
||||
"http://mpassets.highfidelity.com/96c747ab-f71b-44ee-8eb9-d19fc9593dda-v1/F_CatBur.fst",
|
||||
"http://mpassets.highfidelity.com/ede82c38-c66e-4f67-9e0b-0bb0782db18f-v1/M_WesOut.fst",
|
||||
"http://mpassets.highfidelity.com/8872ae86-a763-4db3-8373-d27514c1481e-v1/M_VinAvi.fst",
|
||||
"http://mpassets.highfidelity.com/faf505f1-4fd1-4ed2-8909-816af246c48f-v1/M_VicGen.fst",
|
||||
"http://mpassets.highfidelity.com/d807a7d2-5122-4436-a6f9-3173c94d1c49-v1/M_SuaGen.fst",
|
||||
"http://mpassets.highfidelity.com/1dd41735-06f4-45a3-9ec0-d05215ace77b-v1/M_MarSen.fst",
|
||||
"http://mpassets.highfidelity.com/2cad3894-8ab3-4ba5-a723-0234f93fbd6a-v1/M_BowBea.fst",
|
||||
"http://mpassets.highfidelity.com/cf0eb1be-9ec7-4756-8eaf-ac8f3ec09eba-v1/F_ClaDef.fst",
|
||||
"http://mpassets.highfidelity.com/0cedeca3-c1a4-4be9-9fd5-dad716afcc7e-v1/F_Cyria.fst",
|
||||
"http://mpassets.highfidelity.com/dc55803b-9215-47dd-9408-eb835dac4082-v1/F_ParGir.fst",
|
||||
"http://mpassets.highfidelity.com/775a8fb3-cfe7-494d-b603-a0a2d6910e55-v1/F_VinCov.fst",
|
||||
"http://mpassets.highfidelity.com/eba0d8f8-aa72-4a6b-ab64-4d3fd4695b20-v1/F_VogHei.fst",
|
||||
"http://mpassets.highfidelity.com/4f400c78-38f9-42af-b03b-11b5451d41b9-v1/M_MidRog.fst",
|
||||
"http://mpassets.highfidelity.com/ad774d79-13f1-46e2-87c9-de49a261b264-v1/F_GunSli.fst",
|
||||
"http://mpassets.highfidelity.com/5acbaefa-5455-49a2-8d40-89d12aa393ca-v1/M_KniWol.fst",
|
||||
"http://mpassets.highfidelity.com/aaa1b0a8-3e1b-492a-9aee-600e5dc907db-v1/F_RetSciSuit.fst",
|
||||
"http://mpassets.highfidelity.com/d8da10b6-25c1-40e2-9a66-369316c722d7-v1/F_AniSuit.fst",
|
||||
"http://mpassets.highfidelity.com/f3fbb9f4-e159-49ed-ac32-03af9056b17e-v1/matthew.fst",
|
||||
"http://mpassets.highfidelity.com/0c954ba0-4d87-4353-b65e-c45509f85658-v1/priscilla.fst",
|
||||
"http://mpassets.highfidelity.com/e76946cc-c272-4adf-9bb6-02cde0a4b57d-v1/9e8c5c42a0cbd436962d6bd36f032ab3.fst",
|
||||
"http://mpassets.highfidelity.com/72e083ee-194d-4113-9c61-0591d8257493-v1/skeleton_Rigged.fst",
|
||||
"http://mpassets.highfidelity.com/f14bf7c9-49a1-4249-988a-0a577ed78957-v1/beingOfLight.fst",
|
||||
"http://mpassets.highfidelity.com/1b7e1e7c-6c0b-4f20-9cd0-1d5ccedae620-v1/bb64e937acf86447f6829767e958073c.fst",
|
||||
"http://mpassets.highfidelity.com/67d7c7aa-c300-4d03-85f4-86480130eaa5-v1/F_StarCrew.fst",
|
||||
"http://mpassets.highfidelity.com/d293ef06-c659-467a-9288-c3cbaff0372a-v1/arya_avatar.fst",
|
||||
"http://mpassets.highfidelity.com/faf249d5-12a8-48e2-a08e-fb0c33087011-v1/F_Ranger.fst",
|
||||
"http://mpassets.highfidelity.com/b4502145-15eb-4023-b7d6-a81c5cbf6abf-v1/F_FitTra.fst",
|
||||
"http://mpassets.highfidelity.com/548d0792-0bac-4933-bbfc-57d71912d77e-v1/M_OutMer.fst",
|
||||
"http://mpassets.highfidelity.com/caa61e5d-5629-4165-81d8-6a7eb55e942d-v1/F_DeaSur.fst",
|
||||
"http://mpassets.highfidelity.com/2cad3894-8ab3-4ba5-a723-0234f93fbd6a-v1/M_BowBea.fst",
|
||||
"http://mpassets.highfidelity.com/fd4fa45a-9d2a-463e-a484-f9d1b3bba724-v1/M_BeaWar.fst",
|
||||
"http://mpassets.highfidelity.com/367a5b60-8a92-4d56-a152-a00f3086f02b-v1/M_Espio.fst",
|
||||
"http://mpassets.highfidelity.com/ab466729-31da-4b4c-a33c-366f7c1d38e5-v1/M_MMAFig.fst",
|
||||
"http://mpassets.highfidelity.com/b0795a0c-493d-4abd-b4cc-5f32e6d6df46-v1/M_SalMer.fst",
|
||||
"http://mpassets.highfidelity.com/0a1d44bf-a988-4199-b29e-a532ab85a2e8-v1/M_StaShi.fst",
|
||||
"http://mpassets.highfidelity.com/d807a7d2-5122-4436-a6f9-3173c94d1c49-v1/M_SuaGen.fst",
|
||||
"http://mpassets.highfidelity.com/cb20212c-36f2-4d41-bdad-132361ca6ff4-v1/M_TreTee.fst",
|
||||
"http://mpassets.highfidelity.com/830988dc-619a-4e88-96e1-a19fa0aaa30f-v1/M_UrbEnf.fst",
|
||||
"http://mpassets.highfidelity.com/faf505f1-4fd1-4ed2-8909-816af246c48f-v1/M_VicGen.fst",
|
||||
"http://mpassets.highfidelity.com/883ac86f-dd29-4676-8bda-7dd52fb6465f-v1/M_WasWan.fst",
|
||||
"http://mpassets.highfidelity.com/ede82c38-c66e-4f67-9e0b-0bb0782db18f-v1/M_WesOut.fst",
|
||||
"http://mpassets.highfidelity.com/04c9a1e9-0390-4a7f-b6c6-5f135c19e3fb-v1/F_ArmTro.fst",
|
||||
"http://mpassets.highfidelity.com/e863348f-a777-4f36-86e6-af6e65ffa161-v1/F_BloSam.fst",
|
||||
"http://mpassets.highfidelity.com/cf0eb1be-9ec7-4756-8eaf-ac8f3ec09eba-v1/F_ClaDef.fst",
|
||||
"http://mpassets.highfidelity.com/0cedeca3-c1a4-4be9-9fd5-dad716afcc7e-v1/F_Cyria.fst",
|
||||
"http://mpassets.highfidelity.com/da2ad4cd-47d4-41da-b764-41f39ff77e30-v1/F_JerGir.fst",
|
||||
"http://mpassets.highfidelity.com/534d42f8-ec13-4145-929f-5c8facac2fb7-v1/F_LegFig.fst",
|
||||
"http://mpassets.highfidelity.com/dc55803b-9215-47dd-9408-eb835dac4082-v1/F_ParGir.fst",
|
||||
"http://mpassets.highfidelity.com/f823e831-d8c4-4191-a3bd-427e406e69f9-v1/F_Shinjuku.fst",
|
||||
"http://mpassets.highfidelity.com/eba0d8f8-aa72-4a6b-ab64-4d3fd4695b20-v1/F_VogHei.fst",
|
||||
"http://mpassets.highfidelity.com/13277c09-892f-4a5e-b9a5-8994a37d68bf-v1/F_WasWar.fst",
|
||||
"http://mpassets.highfidelity.com/9b589fbb-59e4-47a9-8b3f-bf8d3a0bd1d8-v1/M_LawSur.fst",
|
||||
"http://mpassets.highfidelity.com/4f400c78-38f9-42af-b03b-11b5451d41b9-v1/M_MidRog.fst",
|
||||
"http://mpassets.highfidelity.com/c90d755d-0456-48fd-b98c-09c4d85cd481-v1/M_MouOff.fst",
|
||||
"http://mpassets.highfidelity.com/c2ed3b9a-b3a9-4424-9fd2-8a798209f32b-v1/M_PerTra.fst",
|
||||
"http://mpassets.highfidelity.com/c48928ac-7657-41f4-bbdc-9b47385736ab-v1/M_SpaMar.fst",
|
||||
"http://mpassets.highfidelity.com/d029ae8d-2905-4eb7-ba46-4bd1b8cb9d73-v1/4618d52e711fbb34df442b414da767bb.fst",
|
||||
"http://mpassets.highfidelity.com/c85c497d-c87b-42b1-9bbf-5405e05a0ad3-v1/M_ArmSol.fst",
|
||||
"http://mpassets.highfidelity.com/1dd41735-06f4-45a3-9ec0-d05215ace77b-v1/M_MarSen.fst",
|
||||
"http://mpassets.highfidelity.com/bd80a6d7-7173-489e-87c6-f7ee56e65530-v1/M_RetFut.fst",
|
||||
"http://mpassets.highfidelity.com/8872ae86-a763-4db3-8373-d27514c1481e-v1/M_VinAvi.fst",
|
||||
"http://mpassets.highfidelity.com/f798d926-9a9e-481a-b298-af0e45451252-v1/F_Assassin.fst",
|
||||
"http://mpassets.highfidelity.com/ad774d79-13f1-46e2-87c9-de49a261b264-v1/F_GunSli.fst",
|
||||
"http://mpassets.highfidelity.com/aaa1b0a8-3e1b-492a-9aee-600e5dc907db-v1/F_RetSciSuit.fst"
|
||||
];
|
||||
|
||||
|
||||
var AVATAR_URL = AVATARS_ARRAY[Math.floor(Math.random() * AVATARS_ARRAY.length)];
|
||||
print("RANDOM AVATAR SELECTED:" + AVATAR_URL);
|
||||
|
||||
var RECORDINGS_ARRAY = [
|
||||
"http://hifi-content.s3.amazonaws.com/DomainContent/Event%20/NPC%27s/waiting6.hfr",
|
||||
"http://hifi-content.s3.amazonaws.com/DomainContent/Event%20/NPC%27s/waiting7.hfr",
|
||||
"http://hifi-content.s3.amazonaws.com/DomainContent/Event%20/NPC%27s/waiting10.hfr",
|
||||
"http://hifi-content.s3.amazonaws.com/DomainContent/Event%20/NPC%27s/bot1.hfr",
|
||||
"http://hifi-content.s3.amazonaws.com/DomainContent/Event%20/NPC%27s/bot2.hfr",
|
||||
"http://hifi-content.s3.amazonaws.com/DomainContent/Event%20/NPC%27s/bot3.hfr",
|
||||
"http://hifi-content.s3.amazonaws.com/DomainContent/Event%20/NPC%27s/bot4.hfr"
|
||||
];
|
||||
|
||||
var RECORDING_URL = RECORDINGS_ARRAY[Math.floor(Math.random() * RECORDINGS_ARRAY.length)];
|
||||
print("RANDOM RECORDING SELECTED:" + RECORDING_URL);
|
||||
|
||||
// not quite what I want...
|
||||
var LOCATIONS_ARRAY = [
|
||||
{ min_x: 97.0, max_x: 103.0, y:-0.6, min_z: 30.8, max_z: 40 },
|
||||
{ min_x: 92.7, max_x: 106.6, y:-0.3, min_z: 43 , max_z: 43 },
|
||||
{ min_x: 92.7, max_x: 106.6, y: 0.3, min_z: 45 , max_z: 45 },
|
||||
{ min_x: 92.7, max_x: 106.6, y: 1 , min_z: 47 , max_z: 47 },
|
||||
{ min_x: 92.7, max_x: 106.6, y: 1.7, min_z: 51.9, max_z: 51.9 },
|
||||
];
|
||||
|
||||
var LOCATION_PARAMS = LOCATIONS_ARRAY[Math.floor(Math.random() * LOCATIONS_ARRAY.length)];
|
||||
|
||||
var LOCATION = { x: randFloat(LOCATION_PARAMS.min_x, LOCATION_PARAMS.max_x), y: LOCATION_PARAMS.y, z: randFloat(LOCATION_PARAMS.min_z, LOCATION_PARAMS.max_z) };
|
||||
|
||||
Vec3.print("RANDOM LOCATION SELECTED:", LOCATION);
|
||||
|
||||
var playFromCurrentLocation = true;
|
||||
var loop = true;
|
||||
|
||||
|
||||
// Set position here if playFromCurrentLocation is true
|
||||
Avatar.position = LOCATION;
|
||||
Avatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
|
||||
Avatar.scale = 1.0;
|
||||
Agent.isAvatar = true;
|
||||
|
||||
// make the agent "listen" to the audio stream to cause additional audio-mixer load, technically this isn't needed when you're playing a recording
|
||||
// but if you switch to a non-recording bot, you will need this, so we can leave this.
|
||||
Agent.isListeningToAudioStream = true;
|
||||
Avatar.skeletonModelURL = AVATAR_URL; // FIXME - currently setting an avatar while playing a recording doesn't work it will be ignored
|
||||
|
||||
Recording.loadRecording(RECORDING_URL);
|
||||
|
||||
count = 300; // This is necessary to wait for the audio mixer to connect
|
||||
function update(event) {
|
||||
if (count > 0) {
|
||||
count--;
|
||||
return;
|
||||
}
|
||||
if (count == 0) {
|
||||
Recording.setPlayFromCurrentLocation(playFromCurrentLocation);
|
||||
Recording.setPlayerLoop(loop);
|
||||
Recording.setPlayerUseDisplayName(true);
|
||||
Recording.setPlayerUseAttachments(true);
|
||||
Recording.setPlayerUseHeadModel(false);
|
||||
Recording.setPlayerUseSkeletonModel(false); // FIXME - this would allow you to override the recording avatar, but that's not currently working
|
||||
Recording.startPlaying();
|
||||
Vec3.print("Playing from ", Avatar.position);
|
||||
count--;
|
||||
} else if (WANT_DEBUGGING) {
|
||||
count = 100;
|
||||
Vec3.print("Avatar at: ", Avatar.position);
|
||||
Quat.print("Avatar head orientation: ", Avatar.headOrientation);
|
||||
print("outbound:"
|
||||
+" GP: " + Avatar.getDataRate("globalPositionOutbound").toFixed(2) + "\n"
|
||||
+" LP: " + Avatar.getDataRate("localPositionOutbound").toFixed(2) + "\n"
|
||||
+" BB: " + Avatar.getDataRate("avatarBoundingBoxOutbound").toFixed(2) + "\n"
|
||||
+" AO: " + Avatar.getDataRate("avatarOrientationOutbound").toFixed(2) + "\n"
|
||||
+" AS: " + Avatar.getDataRate("avatarScaleOutbound").toFixed(2) + "\n"
|
||||
+" LA: " + Avatar.getDataRate("lookAtPositionOutbound").toFixed(2) + "\n"
|
||||
+" AL: " + Avatar.getDataRate("audioLoudnessOutbound").toFixed(2) + "\n"
|
||||
+" SW: " + Avatar.getDataRate("sensorToWorkMatrixOutbound").toFixed(2) + "\n"
|
||||
+" AF: " + Avatar.getDataRate("additionalFlagsOutbound").toFixed(2) + "\n"
|
||||
+" PI: " + Avatar.getDataRate("parentInfoOutbound").toFixed(2) + "\n"
|
||||
+" FT: " + Avatar.getDataRate("faceTrackerOutbound").toFixed(2) + "\n"
|
||||
+" JD: " + Avatar.getDataRate("jointDataOutbound").toFixed(2));
|
||||
}
|
||||
|
||||
if (!Recording.isPlaying()) {
|
||||
Script.update.disconnect(update);
|
||||
}
|
||||
}
|
||||
|
||||
Script.update.connect(update);
|
|
@ -26,6 +26,7 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
//
|
||||
// add lines where the hand ray picking is happening
|
||||
//
|
||||
|
||||
var WANT_DEBUG = false;
|
||||
var WANT_DEBUG_STATE = false;
|
||||
var WANT_DEBUG_SEARCH_NAME = null;
|
||||
|
@ -752,6 +753,7 @@ function MyController(hand) {
|
|||
this.previouslyUnhooked = {};
|
||||
|
||||
this.shouldScale = false;
|
||||
this.isScalingAvatar = false;
|
||||
|
||||
// handPosition is where the avatar's hand appears to be, in-world.
|
||||
this.getHandPosition = function () {
|
||||
|
@ -824,11 +826,7 @@ function MyController(hand) {
|
|||
|
||||
this.update = function(deltaTime, timestamp) {
|
||||
this.updateSmoothedTrigger();
|
||||
// If both trigger and grip buttons squeezed and nothing is held, rescale my avatar!
|
||||
if (this.hand === RIGHT_HAND && this.state === STATE_SEARCHING &&
|
||||
this.getOtherHandController().state === STATE_SEARCHING) {
|
||||
this.maybeScaleMyAvatar();
|
||||
}
|
||||
this.maybeScaleMyAvatar();
|
||||
|
||||
if (this.ignoreInput()) {
|
||||
|
||||
|
@ -2595,22 +2593,29 @@ function MyController(hand) {
|
|||
};
|
||||
|
||||
this.maybeScaleMyAvatar = function() {
|
||||
if (!myAvatarScalingEnabled) {
|
||||
if (!myAvatarScalingEnabled || this.shouldScale || this.hand === LEFT_HAND) {
|
||||
// If scaling disabled, or if we are currently scaling an entity, don't scale avatar
|
||||
// and only rescale avatar for one hand (so we're not doing it twice)
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.shouldScale) {
|
||||
// Only scale avatar if both triggers and grips are squeezed
|
||||
var tryingToScale = this.secondarySqueezed() && this.getOtherHandController().secondarySqueezed() &&
|
||||
this.triggerSmoothedSqueezed() && this.getOtherHandController().triggerSmoothedSqueezed();
|
||||
|
||||
|
||||
if (!this.isScalingAvatar) {
|
||||
// If both secondary triggers squeezed, start scaling
|
||||
if (this.secondarySqueezed() && this.getOtherHandController().secondarySqueezed()) {
|
||||
if (tryingToScale) {
|
||||
this.scalingStartDistance = Vec3.length(Vec3.subtract(this.getHandPosition(),
|
||||
this.getOtherHandController().getHandPosition()));
|
||||
this.scalingStartAvatarScale = MyAvatar.scale;
|
||||
this.shouldScale = true;
|
||||
this.isScalingAvatar = true;
|
||||
}
|
||||
} else if (!this.secondarySqueezed() || !this.getOtherHandController().secondarySqueezed()) {
|
||||
this.shouldScale = false;
|
||||
} else if (!tryingToScale) {
|
||||
this.isScalingAvatar = false;
|
||||
}
|
||||
if (this.shouldScale) {
|
||||
if (this.isScalingAvatar) {
|
||||
var scalingCurrentDistance = Vec3.length(Vec3.subtract(this.getHandPosition(),
|
||||
this.getOtherHandController().getHandPosition()));
|
||||
var newAvatarScale = (scalingCurrentDistance / this.scalingStartDistance) * this.scalingStartAvatarScale;
|
||||
|
|
|
@ -203,8 +203,7 @@ var pal = new OverlayWindow({
|
|||
height: 640,
|
||||
visible: false
|
||||
});
|
||||
pal.fromQml.connect(function (message) { // messages are {method, params}, like json-rpc. See also sendToQml.
|
||||
print('From PAL QML:', JSON.stringify(message));
|
||||
function fromQml(message) { // messages are {method, params}, like json-rpc. See also sendToQml.
|
||||
switch (message.method) {
|
||||
case 'selected':
|
||||
selectedIds = message.params;
|
||||
|
@ -259,7 +258,15 @@ pal.fromQml.connect(function (message) { // messages are {method, params}, like
|
|||
default:
|
||||
print('Unrecognized message from Pal.qml:', JSON.stringify(message));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function sendToQml(message) {
|
||||
if (Settings.getValue("HUDUIEnabled")) {
|
||||
pal.sendToQml(message);
|
||||
} else {
|
||||
tablet.sendToQml(message);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Main operations.
|
||||
|
@ -298,10 +305,10 @@ function populateUserList(selectData) {
|
|||
data.push(avatarPalDatum);
|
||||
print('PAL data:', JSON.stringify(avatarPalDatum));
|
||||
});
|
||||
pal.sendToQml({ method: 'users', params: data });
|
||||
sendToQml({ method: 'users', params: data });
|
||||
if (selectData) {
|
||||
selectData[2] = true;
|
||||
pal.sendToQml({ method: 'select', params: selectData });
|
||||
sendToQml({ method: 'select', params: selectData });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -322,7 +329,7 @@ function usernameFromIDReply(id, username, machineFingerprint, isAdmin) {
|
|||
}
|
||||
print('Username Data:', JSON.stringify(data));
|
||||
// Ship the data off to QML
|
||||
pal.sendToQml({ method: 'updateUsername', params: data });
|
||||
sendToQml({ method: 'updateUsername', params: data });
|
||||
}
|
||||
|
||||
var pingPong = true;
|
||||
|
@ -396,7 +403,7 @@ function handleClick(pickRay) {
|
|||
ExtendedOverlay.applyPickRay(pickRay, function (overlay) {
|
||||
// Don't select directly. Tell qml, who will give us back a list of ids.
|
||||
var message = {method: 'select', params: [[overlay.key], !overlay.selected, false]};
|
||||
pal.sendToQml(message);
|
||||
sendToQml(message);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
@ -492,6 +499,7 @@ if (Settings.getValue("HUDUIEnabled")) {
|
|||
visible: true,
|
||||
alpha: 0.9
|
||||
});
|
||||
pal.fromQml.connect(fromQml);
|
||||
} else {
|
||||
tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
button = tablet.addButton({
|
||||
|
@ -499,7 +507,9 @@ if (Settings.getValue("HUDUIEnabled")) {
|
|||
icon: "icons/tablet-icons/people-i.svg",
|
||||
sortOrder: 7
|
||||
});
|
||||
tablet.fromQml.connect(fromQml);
|
||||
}
|
||||
|
||||
var isWired = false;
|
||||
var audioTimer;
|
||||
var AUDIO_LEVEL_UPDATE_INTERVAL_MS = 100; // 10hz for now (change this and change the AVERAGING_RATIO too)
|
||||
|
@ -518,10 +528,26 @@ function off() {
|
|||
Users.requestsDomainListData = false;
|
||||
}
|
||||
function onClicked() {
|
||||
if (!pal.visible) {
|
||||
if (Settings.getValue("HUDUIEnabled")) {
|
||||
if (!pal.visible) {
|
||||
Users.requestsDomainListData = true;
|
||||
populateUserList();
|
||||
pal.raise();
|
||||
isWired = true;
|
||||
Script.update.connect(updateOverlays);
|
||||
Controller.mousePressEvent.connect(handleMouseEvent);
|
||||
Controller.mouseMoveEvent.connect(handleMouseMoveEvent);
|
||||
triggerMapping.enable();
|
||||
triggerPressMapping.enable();
|
||||
audioTimer = createAudioInterval(conserveResources ? AUDIO_LEVEL_CONSERVED_UPDATE_INTERVAL_MS : AUDIO_LEVEL_UPDATE_INTERVAL_MS);
|
||||
} else {
|
||||
off();
|
||||
}
|
||||
pal.setVisible(!pal.visible);
|
||||
} else {
|
||||
tablet.loadQMLSource("../Pal.qml");
|
||||
Users.requestsDomainListData = true;
|
||||
populateUserList();
|
||||
pal.raise();
|
||||
isWired = true;
|
||||
Script.update.connect(updateOverlays);
|
||||
Controller.mousePressEvent.connect(handleMouseEvent);
|
||||
|
@ -529,10 +555,7 @@ function onClicked() {
|
|||
triggerMapping.enable();
|
||||
triggerPressMapping.enable();
|
||||
audioTimer = createAudioInterval(conserveResources ? AUDIO_LEVEL_CONSERVED_UPDATE_INTERVAL_MS : AUDIO_LEVEL_UPDATE_INTERVAL_MS);
|
||||
} else {
|
||||
off();
|
||||
}
|
||||
pal.setVisible(!pal.visible);
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -550,7 +573,7 @@ function receiveMessage(channel, messageString, senderID) {
|
|||
if (!pal.visible) {
|
||||
onClicked();
|
||||
}
|
||||
pal.sendToQml(message); // Accepts objects, not just strings.
|
||||
sendToQml(message); // Accepts objects, not just strings.
|
||||
break;
|
||||
default:
|
||||
print('Unrecognized PAL message', messageString);
|
||||
|
@ -607,13 +630,13 @@ function createAudioInterval(interval) {
|
|||
var userId = id || 0;
|
||||
param[userId] = level;
|
||||
});
|
||||
pal.sendToQml({method: 'updateAudioLevel', params: param});
|
||||
sendToQml({method: 'updateAudioLevel', params: param});
|
||||
}, interval);
|
||||
}
|
||||
|
||||
function avatarDisconnected(nodeID) {
|
||||
// remove from the pal list
|
||||
pal.sendToQml({method: 'avatarDisconnected', params: [nodeID]});
|
||||
sendToQml({method: 'avatarDisconnected', params: [nodeID]});
|
||||
}
|
||||
//
|
||||
// Button state.
|
||||
|
@ -624,11 +647,20 @@ function onVisibleChanged() {
|
|||
button.clicked.connect(onClicked);
|
||||
pal.visibleChanged.connect(onVisibleChanged);
|
||||
pal.closed.connect(off);
|
||||
|
||||
if (!Settings.getValue("HUDUIEnabled")) {
|
||||
tablet.screenChanged.connect(function (type, url) {
|
||||
if (type !== "QML" || url !== "../Pal.qml") {
|
||||
off();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Users.usernameFromIDReply.connect(usernameFromIDReply);
|
||||
Users.avatarDisconnected.connect(avatarDisconnected);
|
||||
|
||||
function clearLocalQMLDataAndClosePAL() {
|
||||
pal.sendToQml({ method: 'clearLocalQMLData' });
|
||||
sendToQml({ method: 'clearLocalQMLData' });
|
||||
if (pal.visible) {
|
||||
onClicked(); // Close the PAL
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue