mirror of
https://github.com/lubosz/overte.git
synced 2025-04-24 21:23:18 +02:00
Merge branch 'master' into fix_toolbar_apps_switch
This commit is contained in:
commit
2d4396ac8e
25 changed files with 249 additions and 195 deletions
BIN
Test Plan 2.docx
Normal file
BIN
Test Plan 2.docx
Normal file
Binary file not shown.
|
@ -22,6 +22,9 @@ macro(GENERATE_INSTALLERS)
|
|||
set(CPACK_PACKAGE_FILE_NAME "HighFidelity-Beta-${BUILD_VERSION}")
|
||||
set(CPACK_NSIS_DISPLAY_NAME ${_DISPLAY_NAME})
|
||||
set(CPACK_NSIS_PACKAGE_NAME ${_DISPLAY_NAME})
|
||||
if (PR_BUILD)
|
||||
set(CPACK_NSIS_COMPRESSOR "/SOLID bzip2")
|
||||
endif ()
|
||||
set(CPACK_PACKAGE_INSTALL_DIRECTORY ${_DISPLAY_NAME})
|
||||
|
||||
if (WIN32)
|
||||
|
|
|
@ -2323,7 +2323,7 @@ void Application::paintGL() {
|
|||
}
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
if (isHMDMode()) {
|
||||
auto mirrorBodyOrientation = myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f));
|
||||
auto mirrorBodyOrientation = myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f));
|
||||
|
||||
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
|
||||
// Mirror HMD yaw and roll
|
||||
|
@ -2345,7 +2345,7 @@ void Application::paintGL() {
|
|||
+ mirrorBodyOrientation * glm::vec3(0.0f, 0.0f, 1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
|
||||
+ mirrorBodyOrientation * hmdOffset);
|
||||
} else {
|
||||
_myCamera.setOrientation(myAvatar->getWorldAlignedOrientation()
|
||||
_myCamera.setOrientation(myAvatar->getOrientation()
|
||||
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
|
||||
+ glm::vec3(0, _raiseMirror * myAvatar->getUniformScale(), 0)
|
||||
|
@ -4483,11 +4483,9 @@ void Application::cameraModeChanged() {
|
|||
void Application::cameraMenuChanged() {
|
||||
auto menu = Menu::getInstance();
|
||||
if (menu->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||
if (isHMDMode()) {
|
||||
menu->setIsOptionChecked(MenuOption::FullscreenMirror, false);
|
||||
menu->setIsOptionChecked(MenuOption::FirstPerson, true);
|
||||
} else if (_myCamera.getMode() != CAMERA_MODE_MIRROR) {
|
||||
if (_myCamera.getMode() != CAMERA_MODE_MIRROR) {
|
||||
_myCamera.setMode(CAMERA_MODE_MIRROR);
|
||||
getMyAvatar()->reset(false, false, false); // to reset any active MyAvatar::FollowHelpers
|
||||
}
|
||||
} else if (menu->isOptionChecked(MenuOption::FirstPerson)) {
|
||||
if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON) {
|
||||
|
|
|
@ -1299,7 +1299,7 @@ eyeContactTarget MyAvatar::getEyeContactTarget() {
|
|||
}
|
||||
|
||||
glm::vec3 MyAvatar::getDefaultEyePosition() const {
|
||||
return getPosition() + getWorldAlignedOrientation() * Quaternions::Y_180 * _skeletonModel->getDefaultEyeModelPosition();
|
||||
return getPosition() + getOrientation() * Quaternions::Y_180 * _skeletonModel->getDefaultEyeModelPosition();
|
||||
}
|
||||
|
||||
const float SCRIPT_PRIORITY = 1.0f + 1.0f;
|
||||
|
@ -1594,9 +1594,14 @@ void MyAvatar::updateMotors() {
|
|||
motorRotation = getMyHead()->getHeadOrientation();
|
||||
} else {
|
||||
// non-hovering = walking: follow camera twist about vertical but not lift
|
||||
// so we decompose camera's rotation and store the twist part in motorRotation
|
||||
// we decompose camera's rotation and store the twist part in motorRotation
|
||||
// however, we need to perform the decomposition in the avatar-frame
|
||||
// using the local UP axis and then transform back into world-frame
|
||||
glm::quat orientation = getOrientation();
|
||||
glm::quat headOrientation = glm::inverse(orientation) * getMyHead()->getHeadOrientation(); // avatar-frame
|
||||
glm::quat liftRotation;
|
||||
swingTwistDecomposition(getMyHead()->getHeadOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
||||
swingTwistDecomposition(headOrientation, Vectors::UNIT_Y, liftRotation, motorRotation);
|
||||
motorRotation = orientation * motorRotation;
|
||||
}
|
||||
const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
|
||||
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
|
||||
|
@ -1650,11 +1655,31 @@ void MyAvatar::prepareForPhysicsSimulation() {
|
|||
_prePhysicsRoomPose = AnimPose(_sensorToWorldMatrix);
|
||||
}
|
||||
|
||||
// There are a number of possible strategies for this set of tools through endRender, below.
|
||||
void MyAvatar::nextAttitude(glm::vec3 position, glm::quat orientation) {
|
||||
bool success;
|
||||
Transform trans = getTransform(success);
|
||||
if (!success) {
|
||||
qCWarning(interfaceapp) << "Warning -- MyAvatar::nextAttitude failed";
|
||||
return;
|
||||
}
|
||||
trans.setTranslation(position);
|
||||
trans.setRotation(orientation);
|
||||
SpatiallyNestable::setTransform(trans, success);
|
||||
if (!success) {
|
||||
qCWarning(interfaceapp) << "Warning -- MyAvatar::nextAttitude failed";
|
||||
}
|
||||
updateAttitude(orientation);
|
||||
}
|
||||
|
||||
void MyAvatar::harvestResultsFromPhysicsSimulation(float deltaTime) {
|
||||
glm::vec3 position = getPosition();
|
||||
glm::quat orientation = getOrientation();
|
||||
glm::vec3 position;
|
||||
glm::quat orientation;
|
||||
if (_characterController.isEnabledAndReady()) {
|
||||
_characterController.getPositionAndOrientation(position, orientation);
|
||||
} else {
|
||||
position = getPosition();
|
||||
orientation = getOrientation();
|
||||
}
|
||||
nextAttitude(position, orientation);
|
||||
_bodySensorMatrix = _follow.postPhysicsUpdate(*this, _bodySensorMatrix);
|
||||
|
@ -2849,7 +2874,8 @@ bool MyAvatar::FollowHelper::shouldActivateVertical(const MyAvatar& myAvatar, co
|
|||
void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix,
|
||||
const glm::mat4& currentBodyMatrix, bool hasDriveInput) {
|
||||
|
||||
if (myAvatar.getHMDLeanRecenterEnabled()) {
|
||||
if (myAvatar.getHMDLeanRecenterEnabled() &&
|
||||
qApp->getCamera().getMode() != CAMERA_MODE_MIRROR) {
|
||||
if (!isActive(Rotation) && (shouldActivateRotation(myAvatar, desiredBodyMatrix, currentBodyMatrix) || hasDriveInput)) {
|
||||
activate(Rotation);
|
||||
}
|
||||
|
|
|
@ -438,6 +438,7 @@ public:
|
|||
|
||||
void updateMotors();
|
||||
void prepareForPhysicsSimulation();
|
||||
void nextAttitude(glm::vec3 position, glm::quat orientation); // Can be safely called at any time.
|
||||
void harvestResultsFromPhysicsSimulation(float deltaTime);
|
||||
|
||||
const QString& getCollisionSoundURL() { return _collisionSoundURL; }
|
||||
|
@ -557,7 +558,6 @@ public:
|
|||
Q_INVOKABLE bool isUp(const glm::vec3& direction) { return glm::dot(direction, _worldUpDirection) > 0.0f; }; // true iff direction points up wrt avatar's definition of up.
|
||||
Q_INVOKABLE bool isDown(const glm::vec3& direction) { return glm::dot(direction, _worldUpDirection) < 0.0f; };
|
||||
|
||||
|
||||
public slots:
|
||||
void increaseSize();
|
||||
void decreaseSize();
|
||||
|
|
|
@ -29,7 +29,7 @@ MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
|||
glm::quat MyHead::getHeadOrientation() const {
|
||||
// NOTE: Head::getHeadOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where your
|
||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||
// always the same.
|
||||
|
||||
|
@ -39,7 +39,7 @@ glm::quat MyHead::getHeadOrientation() const {
|
|||
return headPose.rotation * Quaternions::Y_180;
|
||||
}
|
||||
|
||||
return myAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
return myAvatar->getOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
|
||||
void MyHead::simulate(float deltaTime) {
|
||||
|
|
|
@ -192,6 +192,7 @@ void Web3DOverlay::loadSourceURL() {
|
|||
_webSurface->getSurfaceContext()->setContextProperty("SoundCache", DependencyManager::get<SoundCache>().data());
|
||||
|
||||
_webSurface->getSurfaceContext()->setContextProperty("pathToFonts", "../../");
|
||||
|
||||
tabletScriptingInterface->setQmlTabletRoot("com.highfidelity.interface.tablet.system", _webSurface.data());
|
||||
|
||||
// mark the TabletProxy object as cpp ownership.
|
||||
|
|
|
@ -151,11 +151,6 @@ glm::vec3 Avatar::getNeckPosition() const {
|
|||
return _skeletonModel->getNeckPosition(neckPosition) ? neckPosition : getPosition();
|
||||
}
|
||||
|
||||
|
||||
glm::quat Avatar::getWorldAlignedOrientation () const {
|
||||
return computeRotationFromBodyToWorldUp() * getOrientation();
|
||||
}
|
||||
|
||||
AABox Avatar::getBounds() const {
|
||||
if (!_skeletonModel->isRenderable() || _skeletonModel->needsFixupInScene()) {
|
||||
// approximately 2m tall, scaled to user request.
|
||||
|
@ -436,6 +431,11 @@ void Avatar::slamPosition(const glm::vec3& newPosition) {
|
|||
_lastVelocity = glm::vec3(0.0f);
|
||||
}
|
||||
|
||||
void Avatar::updateAttitude(const glm::quat& orientation) {
|
||||
_skeletonModel->updateAttitude(orientation);
|
||||
_worldUpDirection = orientation * Vectors::UNIT_Y;
|
||||
}
|
||||
|
||||
void Avatar::applyPositionDelta(const glm::vec3& delta) {
|
||||
setPosition(getPosition() + delta);
|
||||
_positionDeltaAccumulator += delta;
|
||||
|
@ -628,22 +628,6 @@ void Avatar::render(RenderArgs* renderArgs) {
|
|||
}
|
||||
}
|
||||
|
||||
glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
|
||||
glm::quat orientation = getOrientation();
|
||||
glm::vec3 currentUp = orientation * IDENTITY_UP;
|
||||
float angle = acosf(glm::clamp(glm::dot(currentUp, _worldUpDirection), -1.0f, 1.0f));
|
||||
if (angle < EPSILON) {
|
||||
return glm::quat();
|
||||
}
|
||||
glm::vec3 axis;
|
||||
if (angle > 179.99f * RADIANS_PER_DEGREE) { // 180 degree rotation; must use another axis
|
||||
axis = orientation * IDENTITY_RIGHT;
|
||||
} else {
|
||||
axis = glm::normalize(glm::cross(currentUp, _worldUpDirection));
|
||||
}
|
||||
return glm::angleAxis(angle * proportion, axis);
|
||||
}
|
||||
|
||||
void Avatar::fixupModelsInScene(const render::ScenePointer& scene) {
|
||||
_attachmentsToDelete.clear();
|
||||
|
||||
|
@ -1401,14 +1385,14 @@ glm::quat Avatar::getUncachedRightPalmRotation() const {
|
|||
return rightPalmRotation;
|
||||
}
|
||||
|
||||
void Avatar::setPosition(const glm::vec3& position) {
|
||||
AvatarData::setPosition(position);
|
||||
updateAttitude();
|
||||
void Avatar::setPositionViaScript(const glm::vec3& position) {
|
||||
setPosition(position);
|
||||
updateAttitude(getOrientation());
|
||||
}
|
||||
|
||||
void Avatar::setOrientation(const glm::quat& orientation) {
|
||||
AvatarData::setOrientation(orientation);
|
||||
updateAttitude();
|
||||
void Avatar::setOrientationViaScript(const glm::quat& orientation) {
|
||||
setOrientation(orientation);
|
||||
updateAttitude(orientation);
|
||||
}
|
||||
|
||||
void Avatar::updatePalms() {
|
||||
|
|
|
@ -112,8 +112,6 @@ public:
|
|||
const Head* getHead() const { return static_cast<const Head*>(_headData); }
|
||||
Head* getHead() { return static_cast<Head*>(_headData); }
|
||||
|
||||
glm::quat getWorldAlignedOrientation() const;
|
||||
|
||||
AABox getBounds() const;
|
||||
|
||||
/// Returns the distance to use as a LOD parameter.
|
||||
|
@ -184,7 +182,7 @@ public:
|
|||
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
|
||||
|
||||
void slamPosition(const glm::vec3& position);
|
||||
virtual void updateAttitude() override { _skeletonModel->updateAttitude(); }
|
||||
virtual void updateAttitude(const glm::quat& orientation) override;
|
||||
|
||||
// Call this when updating Avatar position with a delta. This will allow us to
|
||||
// _accurately_ measure position changes and compute the resulting velocity
|
||||
|
@ -197,10 +195,8 @@ public:
|
|||
void getCapsule(glm::vec3& start, glm::vec3& end, float& radius);
|
||||
float computeMass();
|
||||
|
||||
using SpatiallyNestable::setPosition;
|
||||
virtual void setPosition(const glm::vec3& position) override;
|
||||
using SpatiallyNestable::setOrientation;
|
||||
virtual void setOrientation(const glm::quat& orientation) override;
|
||||
void setPositionViaScript(const glm::vec3& position) override;
|
||||
void setOrientationViaScript(const glm::quat& orientation) override;
|
||||
|
||||
// these call through to the SpatiallyNestable versions, but they are here to expose these to javascript.
|
||||
Q_INVOKABLE virtual const QUuid getParentID() const override { return SpatiallyNestable::getParentID(); }
|
||||
|
@ -240,7 +236,7 @@ public:
|
|||
bool hasNewJointData() const { return _hasNewJointData; }
|
||||
|
||||
float getBoundingRadius() const;
|
||||
|
||||
|
||||
void addToScene(AvatarSharedPointer self, const render::ScenePointer& scene);
|
||||
void ensureInScene(AvatarSharedPointer self, const render::ScenePointer& scene);
|
||||
bool isInScene() const { return render::Item::isValidID(_renderItemID); }
|
||||
|
@ -303,7 +299,6 @@ protected:
|
|||
|
||||
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
glm::vec3 getBodyUpDirection() const { return getOrientation() * IDENTITY_UP; }
|
||||
glm::quat computeRotationFromBodyToWorldUp(float proportion = 1.0f) const;
|
||||
void measureMotionDerivatives(float deltaTime);
|
||||
|
||||
float getSkeletonHeight() const;
|
||||
|
|
|
@ -118,16 +118,16 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
_rig.updateFromEyeParameters(eyeParams);
|
||||
}
|
||||
|
||||
void SkeletonModel::updateAttitude() {
|
||||
void SkeletonModel::updateAttitude(const glm::quat& orientation) {
|
||||
setTranslation(_owningAvatar->getSkeletonPosition());
|
||||
setRotation(_owningAvatar->getOrientation() * Quaternions::Y_180);
|
||||
setRotation(orientation * Quaternions::Y_180);
|
||||
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale());
|
||||
}
|
||||
|
||||
// Called by Avatar::simulate after it has set the joint states (fullUpdate true if changed),
|
||||
// but just before head has been simulated.
|
||||
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
||||
updateAttitude();
|
||||
updateAttitude(_owningAvatar->getOrientation());
|
||||
if (fullUpdate) {
|
||||
setBlendshapeCoefficients(_owningAvatar->getHead()->getSummedBlendshapeCoefficients());
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ public:
|
|||
|
||||
void simulate(float deltaTime, bool fullUpdate = true) override;
|
||||
void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
void updateAttitude();
|
||||
void updateAttitude(const glm::quat& orientation);
|
||||
|
||||
/// Returns the index of the left hand joint, or -1 if not found.
|
||||
int getLeftHandJointIndex() const { return isActive() ? getFBXGeometry().leftHandJointIndex : -1; }
|
||||
|
|
|
@ -91,9 +91,6 @@ AvatarData::AvatarData() :
|
|||
_targetVelocity(0.0f),
|
||||
_density(DEFAULT_AVATAR_DENSITY)
|
||||
{
|
||||
setBodyPitch(0.0f);
|
||||
setBodyYaw(-90.0f);
|
||||
setBodyRoll(0.0f);
|
||||
}
|
||||
|
||||
AvatarData::~AvatarData() {
|
||||
|
@ -110,23 +107,6 @@ const QUrl& AvatarData::defaultFullAvatarModelUrl() {
|
|||
return _defaultFullAvatarModelUrl;
|
||||
}
|
||||
|
||||
// There are a number of possible strategies for this set of tools through endRender, below.
|
||||
void AvatarData::nextAttitude(glm::vec3 position, glm::quat orientation) {
|
||||
bool success;
|
||||
Transform trans = getTransform(success);
|
||||
if (!success) {
|
||||
qCWarning(avatars) << "Warning -- AvatarData::nextAttitude failed";
|
||||
return;
|
||||
}
|
||||
trans.setTranslation(position);
|
||||
trans.setRotation(orientation);
|
||||
SpatiallyNestable::setTransform(trans, success);
|
||||
if (!success) {
|
||||
qCWarning(avatars) << "Warning -- AvatarData::nextAttitude failed";
|
||||
}
|
||||
updateAttitude();
|
||||
}
|
||||
|
||||
void AvatarData::setTargetScale(float targetScale) {
|
||||
auto newValue = glm::clamp(targetScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE);
|
||||
if (_targetScale != newValue) {
|
||||
|
@ -2100,6 +2080,7 @@ void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
|
|||
currentBasis = std::make_shared<Transform>(Transform::fromJson(json[JSON_AVATAR_BASIS]));
|
||||
}
|
||||
|
||||
glm::quat orientation;
|
||||
if (json.contains(JSON_AVATAR_RELATIVE)) {
|
||||
// During playback you can either have the recording basis set to the avatar current state
|
||||
// meaning that all playback is relative to this avatars starting position, or
|
||||
|
@ -2111,12 +2092,14 @@ void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
|
|||
auto relativeTransform = Transform::fromJson(json[JSON_AVATAR_RELATIVE]);
|
||||
auto worldTransform = currentBasis->worldTransform(relativeTransform);
|
||||
setPosition(worldTransform.getTranslation());
|
||||
setOrientation(worldTransform.getRotation());
|
||||
orientation = worldTransform.getRotation();
|
||||
} else {
|
||||
// We still set the position in the case that there is no movement.
|
||||
setPosition(currentBasis->getTranslation());
|
||||
setOrientation(currentBasis->getRotation());
|
||||
orientation = currentBasis->getRotation();
|
||||
}
|
||||
setOrientation(orientation);
|
||||
updateAttitude(orientation);
|
||||
|
||||
// Do after avatar orientation because head look-at needs avatar orientation.
|
||||
if (json.contains(JSON_AVATAR_HEAD)) {
|
||||
|
@ -2234,11 +2217,11 @@ void AvatarData::setBodyRoll(float bodyRoll) {
|
|||
setOrientation(glm::quat(glm::radians(eulerAngles)));
|
||||
}
|
||||
|
||||
void AvatarData::setPosition(const glm::vec3& position) {
|
||||
void AvatarData::setPositionViaScript(const glm::vec3& position) {
|
||||
SpatiallyNestable::setPosition(position);
|
||||
}
|
||||
|
||||
void AvatarData::setOrientation(const glm::quat& orientation) {
|
||||
void AvatarData::setOrientationViaScript(const glm::quat& orientation) {
|
||||
SpatiallyNestable::setOrientation(orientation);
|
||||
}
|
||||
|
||||
|
|
|
@ -351,14 +351,14 @@ public:
|
|||
class AvatarData : public QObject, public SpatiallyNestable {
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPosition)
|
||||
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPositionViaScript)
|
||||
Q_PROPERTY(float scale READ getTargetScale WRITE setTargetScale)
|
||||
Q_PROPERTY(glm::vec3 handPosition READ getHandPosition WRITE setHandPosition)
|
||||
Q_PROPERTY(float bodyYaw READ getBodyYaw WRITE setBodyYaw)
|
||||
Q_PROPERTY(float bodyPitch READ getBodyPitch WRITE setBodyPitch)
|
||||
Q_PROPERTY(float bodyRoll READ getBodyRoll WRITE setBodyRoll)
|
||||
|
||||
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientation)
|
||||
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientationViaScript)
|
||||
Q_PROPERTY(glm::quat headOrientation READ getHeadOrientation WRITE setHeadOrientation)
|
||||
Q_PROPERTY(float headPitch READ getHeadPitch WRITE setHeadPitch)
|
||||
Q_PROPERTY(float headYaw READ getHeadYaw WRITE setHeadYaw)
|
||||
|
@ -440,13 +440,10 @@ public:
|
|||
float getBodyRoll() const;
|
||||
void setBodyRoll(float bodyRoll);
|
||||
|
||||
using SpatiallyNestable::setPosition;
|
||||
virtual void setPosition(const glm::vec3& position) override;
|
||||
using SpatiallyNestable::setOrientation;
|
||||
virtual void setOrientation(const glm::quat& orientation) override;
|
||||
virtual void setPositionViaScript(const glm::vec3& position);
|
||||
virtual void setOrientationViaScript(const glm::quat& orientation);
|
||||
|
||||
void nextAttitude(glm::vec3 position, glm::quat orientation); // Can be safely called at any time.
|
||||
virtual void updateAttitude() {} // Tell skeleton mesh about changes
|
||||
virtual void updateAttitude(const glm::quat& orientation) {}
|
||||
|
||||
glm::quat getHeadOrientation() const {
|
||||
lazyInitHeadData();
|
||||
|
|
|
@ -54,6 +54,7 @@ void EntityEditPacketSender::queueEditAvatarEntityMessage(PacketType type,
|
|||
EntityItemProperties entityProperties = entity->getProperties();
|
||||
entityProperties.merge(properties);
|
||||
|
||||
std::lock_guard<std::mutex> lock(_mutex);
|
||||
QScriptValue scriptProperties = EntityItemNonDefaultPropertiesToScriptValue(&_scriptEngine, entityProperties);
|
||||
QVariant variantProperties = scriptProperties.toVariant();
|
||||
QJsonDocument jsonProperties = QJsonDocument::fromVariant(variantProperties);
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
#include <OctreeEditPacketSender.h>
|
||||
|
||||
#include <mutex>
|
||||
|
||||
#include "EntityItem.h"
|
||||
#include "AvatarData.h"
|
||||
|
||||
|
@ -49,6 +51,7 @@ private:
|
|||
EntityItemID entityItemID, const EntityItemProperties& properties);
|
||||
|
||||
private:
|
||||
std::mutex _mutex;
|
||||
AvatarData* _myAvatar { nullptr };
|
||||
QScriptEngine _scriptEngine;
|
||||
};
|
||||
|
|
|
@ -445,7 +445,7 @@ void CharacterController::handleChangedCollisionGroup() {
|
|||
|
||||
void CharacterController::updateUpAxis(const glm::quat& rotation) {
|
||||
_currentUp = quatRotate(glmToBullet(rotation), LOCAL_UP_AXIS);
|
||||
if (_state != State::Hover && _rigidBody) {
|
||||
if (_rigidBody) {
|
||||
_rigidBody->setGravity(_gravity * _currentUp);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -127,7 +127,7 @@ private:
|
|||
glm::mat4 _projection;
|
||||
|
||||
// derived
|
||||
glm::vec3 _position;
|
||||
glm::vec3 _position { 0.0f, 0.0f, 0.0f };
|
||||
glm::quat _orientation;
|
||||
bool _isKeepLookingAt{ false };
|
||||
glm::vec3 _lookingAt;
|
||||
|
|
|
@ -482,6 +482,11 @@ bool TabletProxy::pushOntoStack(const QVariant& path) {
|
|||
return result;
|
||||
}
|
||||
|
||||
//set landscape off when pushing menu items while in Create mode
|
||||
if (_landscape) {
|
||||
setLandscape(false);
|
||||
}
|
||||
|
||||
QObject* root = nullptr;
|
||||
if (!_toolbarMode && _qmlTabletRoot) {
|
||||
root = _qmlTabletRoot;
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
Settings, Entities, Tablet, Toolbars, Messages, Menu, Camera, progressDialog, tooltip, MyAvatar, Quat, Controller, Clipboard, HMD, UndoStack, ParticleExplorerTool */
|
||||
|
||||
(function() { // BEGIN LOCAL_SCOPE
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
|
@ -26,11 +26,8 @@ Script.include([
|
|||
"libraries/stringHelpers.js",
|
||||
"libraries/dataViewHelpers.js",
|
||||
"libraries/progressDialog.js",
|
||||
|
||||
"libraries/entitySelectionTool.js",
|
||||
|
||||
"libraries/ToolTip.js",
|
||||
|
||||
"libraries/entityCameraTool.js",
|
||||
"libraries/gridTool.js",
|
||||
"libraries/entityList.js",
|
||||
|
@ -275,7 +272,8 @@ var toolBar = (function () {
|
|||
properties.userData = JSON.stringify({ grabbableKey: { grabbable: true } });
|
||||
}
|
||||
entityID = Entities.addEntity(properties);
|
||||
if (properties.type == "ParticleEffect") {
|
||||
|
||||
if (properties.type === "ParticleEffect") {
|
||||
selectParticleEntity(entityID);
|
||||
}
|
||||
|
||||
|
@ -2229,10 +2227,9 @@ var particleExplorerTool = new ParticleExplorerTool();
|
|||
var selectedParticleEntity = 0;
|
||||
var selectedParticleEntityID = null;
|
||||
|
||||
|
||||
function selectParticleEntity(entityID) {
|
||||
var properties = Entities.getEntityProperties(entityID);
|
||||
|
||||
selectedParticleEntityID = entityID;
|
||||
if (properties.emitOrientation) {
|
||||
properties.emitOrientation = Quat.safeEulerAngles(properties.emitOrientation);
|
||||
}
|
||||
|
@ -2274,7 +2271,6 @@ entityListTool.webView.webEventReceived.connect(function (data) {
|
|||
return;
|
||||
}
|
||||
// Destroy the old particles web view first
|
||||
selectParticleEntity(ids[0]);
|
||||
} else {
|
||||
selectedParticleEntity = 0;
|
||||
particleExplorerTool.destroyWebView();
|
||||
|
|
|
@ -40,9 +40,8 @@ function updateControllerDisplay() {
|
|||
var button;
|
||||
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
|
||||
// Independent and Entity mode make people sick. Third Person and Mirror have traps that we need to work through.
|
||||
// Disable them in hmd.
|
||||
var desktopOnlyViews = ['Mirror', 'Independent Mode', 'Entity Mode'];
|
||||
// Independent and Entity mode make people sick; disable them in hmd.
|
||||
var desktopOnlyViews = ['Independent Mode', 'Entity Mode'];
|
||||
|
||||
function onHmdChanged(isHmd) {
|
||||
HMD.closeTablet();
|
||||
|
|
|
@ -475,6 +475,15 @@ function unbindAllInputs() {
|
|||
}
|
||||
}
|
||||
|
||||
function clearSelection() {
|
||||
if(document.selection && document.selection.empty) {
|
||||
document.selection.empty();
|
||||
} else if(window.getSelection) {
|
||||
var sel = window.getSelection();
|
||||
sel.removeAllRanges();
|
||||
}
|
||||
}
|
||||
|
||||
function loaded() {
|
||||
openEventBridge(function() {
|
||||
|
||||
|
@ -1051,6 +1060,7 @@ function loaded() {
|
|||
activeElement.select();
|
||||
}
|
||||
}
|
||||
clearSelection();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
//
|
||||
// pUtils.js
|
||||
//
|
||||
// Created by Patrick Gosch on 03/28/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
getEntityTextures = function(id) {
|
||||
var results = null;
|
||||
var properties = Entities.getEntityProperties(id, "textures");
|
||||
if (properties.textures) {
|
||||
try {
|
||||
results = JSON.parse(properties.textures);
|
||||
} catch (err) {
|
||||
logDebug(err);
|
||||
logDebug(properties.textures);
|
||||
}
|
||||
}
|
||||
return results ? results : {};
|
||||
};
|
||||
|
||||
setEntityTextures = function(id, textureList) {
|
||||
var json = JSON.stringify(textureList);
|
||||
Entities.editEntity(id, {textures: json});
|
||||
};
|
||||
|
||||
editEntityTextures = function(id, textureName, textureURL) {
|
||||
var textureList = getEntityTextures(id);
|
||||
textureList[textureName] = textureURL;
|
||||
setEntityTextures(id, textureList);
|
||||
};
|
|
@ -9,12 +9,12 @@
|
|||
//
|
||||
|
||||
(function() {
|
||||
Script.include(Script.resolvePath("pUtils.js"));
|
||||
var TIMEOUT = 150;
|
||||
var TEXGRAY = Script.resolvePath("xylotex_bar_gray.png");
|
||||
var TEXBLACK = Script.resolvePath("xylotex_bar_black.png");
|
||||
var TIMEOUT = 50; // at 30 ms, the key's color sometimes fails to switch when hit
|
||||
var TEXTURE_GRAY = Script.resolvePath("xylotex_bar_gray.png");
|
||||
var TEXTURE_BLACK = Script.resolvePath("xylotex_bar_black.png");
|
||||
var IS_DEBUG = false;
|
||||
var _this;
|
||||
|
||||
|
||||
function XylophoneKey() {
|
||||
_this = this;
|
||||
}
|
||||
|
@ -22,7 +22,7 @@
|
|||
XylophoneKey.prototype = {
|
||||
sound: null,
|
||||
isWaiting: false,
|
||||
homePos: null,
|
||||
homePosition: null,
|
||||
injector: null,
|
||||
|
||||
preload: function(entityID) {
|
||||
|
@ -34,31 +34,66 @@
|
|||
|
||||
collisionWithEntity: function(thisEntity, otherEntity, collision) {
|
||||
if (collision.type === 0) {
|
||||
_this.hit();
|
||||
_this.hit(otherEntity);
|
||||
}
|
||||
},
|
||||
|
||||
clickDownOnEntity: function() {
|
||||
_this.hit();
|
||||
clickDownOnEntity: function(otherEntity) {
|
||||
_this.hit(otherEntity);
|
||||
},
|
||||
|
||||
hit: function() {
|
||||
hit: function(otherEntity) {
|
||||
if (!_this.isWaiting) {
|
||||
_this.isWaiting = true;
|
||||
_this.homePos = Entities.getEntityProperties(_this.entityID, ["position"]).position;
|
||||
_this.injector = Audio.playSound(_this.sound, {position: _this.homePos, volume: 1});
|
||||
editEntityTextures(_this.entityID, "file5", TEXGRAY);
|
||||
_this.homePosition = Entities.getEntityProperties(_this.entityID, ["position"]).position;
|
||||
_this.injector = Audio.playSound(_this.sound, {position: _this.homePosition, volume: 1});
|
||||
_this.editEntityTextures(_this.entityID, "file5", TEXTURE_GRAY);
|
||||
|
||||
var HAPTIC_STRENGTH = 1;
|
||||
var HAPTIC_DURATION = 20;
|
||||
var userData = JSON.parse(Entities.getEntityProperties(otherEntity, 'userData').userData);
|
||||
if (userData.hasOwnProperty('hand')){
|
||||
Controller.triggerHapticPulse(HAPTIC_STRENGTH, HAPTIC_DURATION, userData.hand);
|
||||
}
|
||||
|
||||
_this.timeout();
|
||||
}
|
||||
},
|
||||
|
||||
timeout: function() {
|
||||
Script.setTimeout(function() {
|
||||
editEntityTextures(_this.entityID, "file5", TEXBLACK);
|
||||
_this.editEntityTextures(_this.entityID, "file5", TEXTURE_BLACK);
|
||||
_this.isWaiting = false;
|
||||
}, TIMEOUT);
|
||||
},
|
||||
|
||||
getEntityTextures: function(id) {
|
||||
var results = null;
|
||||
var properties = Entities.getEntityProperties(id, "textures");
|
||||
if (properties.textures) {
|
||||
try {
|
||||
results = JSON.parse(properties.textures);
|
||||
} catch (err) {
|
||||
if (IS_DEBUG) {
|
||||
print(err);
|
||||
print(properties.textures);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results ? results : {};
|
||||
},
|
||||
|
||||
setEntityTextures: function(id, textureList) {
|
||||
var json = JSON.stringify(textureList);
|
||||
Entities.editEntity(id, {textures: json});
|
||||
},
|
||||
|
||||
editEntityTextures: function(id, textureName, textureURL) {
|
||||
var textureList = _this.getEntityTextures(id);
|
||||
textureList[textureName] = textureURL;
|
||||
_this.setEntityTextures(id, textureList);
|
||||
}
|
||||
};
|
||||
|
||||
return new XylophoneKey();
|
||||
|
||||
});
|
||||
|
|
25
unpublishedScripts/marketplace/xylophone/xylophoneMallet.js
Normal file
25
unpublishedScripts/marketplace/xylophone/xylophoneMallet.js
Normal file
|
@ -0,0 +1,25 @@
|
|||
//
|
||||
// xylophoneMallet.js
|
||||
//
|
||||
// Created by Johnathan Franck on 07/30/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
(function() {
|
||||
function XylophoneMallet() {
|
||||
}
|
||||
|
||||
XylophoneMallet.prototype = {
|
||||
startEquip: function(entityID, args) {
|
||||
var LEFT_HAND = 0;
|
||||
var RIGHT_HAND = 1;
|
||||
var userData = JSON.parse(Entities.getEntityProperties(entityID, 'userData').userData);
|
||||
userData.hand = args[0] === "left" ? LEFT_HAND : RIGHT_HAND;
|
||||
Entities.editEntity(entityID, {userData: JSON.stringify(userData)});
|
||||
}
|
||||
};
|
||||
|
||||
return new XylophoneMallet();
|
||||
});
|
|
@ -8,65 +8,70 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var soundFiles = ["C4.wav", "D4.wav", "E4.wav", "F4.wav", "G4.wav", "A4.wav", "B4.wav", "C5.wav"];
|
||||
var keyModelURL = Script.resolvePath("xyloKey_2_a_e.fbx");
|
||||
var keyScriptURL = Script.resolvePath("xylophoneKey.js");
|
||||
var TEXBLACK = Script.resolvePath("xylotex_bar_black.png");
|
||||
var malletModelURL = Script.resolvePath("Mallet3-2pc.fbx");
|
||||
var malletModelColliderURL = Script.resolvePath("Mallet3-2bpc_phys.obj");
|
||||
var SOUND_FILES = ["C4.wav", "D4.wav", "E4.wav", "F4.wav", "G4.wav", "A4.wav", "B4.wav", "C5.wav"];
|
||||
var KEY_MODEL_URL = Script.resolvePath("xyloKey_2_a_e.fbx");
|
||||
var KEY_SCRIPT_URL = Script.resolvePath("xylophoneKey.js");
|
||||
var MALLET_SCRIPT_URL = Script.resolvePath("xylophoneMallet.js");
|
||||
var TEXTURE_BLACK = Script.resolvePath("xylotex_bar_black.png");
|
||||
var MALLET_MODEL_URL = Script.resolvePath("Mallet3-2pc.fbx");
|
||||
var MALLET_MODEL_COLLIDER_URL = Script.resolvePath("Mallet3-2bpc_phys.obj");
|
||||
var FORWARD = { x: 0, y: 0, z: -1 };
|
||||
var center = MyAvatar.position;
|
||||
var fwd = {x:0, y:0, z:-1};
|
||||
|
||||
var xyloFramePos = Vec3.sum(center, Vec3.multiply(fwd, 0.8));
|
||||
var xyloFrameID = Entities.addEntity( {
|
||||
var XYLOPHONE_FORWARD_OFFSET = 0.8;
|
||||
var xylophoneFramePosition = Vec3.sum(center, Vec3.multiply(FORWARD, XYLOPHONE_FORWARD_OFFSET));
|
||||
var xylophoneFrameID = Entities.addEntity({
|
||||
name: "Xylophone",
|
||||
type: "Model",
|
||||
modelURL: Script.resolvePath("xylophoneFrameWithWave.fbx"),
|
||||
position: xyloFramePos,
|
||||
rotation: Quat.fromVec3Radians({x:0, y:Math.PI, z:0}),
|
||||
position: xylophoneFramePosition,
|
||||
rotation: Quat.fromVec3Radians({ x: 0, y: Math.PI, z: 0 }),
|
||||
shapeType: "static-mesh"
|
||||
});
|
||||
|
||||
center.y += (0.45); // key Y offset from frame
|
||||
var keyPos, keyRot, ud, td, keyID;
|
||||
for (var i = 1; i <= soundFiles.length; i++) {
|
||||
var KEY_Y_OFFSET = 0.45;
|
||||
center.y += KEY_Y_OFFSET;
|
||||
var keyPosition, keyRotation, userData, textureData, keyID;
|
||||
var ROTATION_START = 0.9;
|
||||
var ROTATION_DELTA = 0.2;
|
||||
for (var i = 1; i <= SOUND_FILES.length; i++) {
|
||||
|
||||
keyRotation = Quat.fromVec3Radians({ x: 0, y: ROTATION_START - (i*ROTATION_DELTA), z: 0 });
|
||||
keyPosition = Vec3.sum(center, Vec3.multiplyQbyV(keyRotation, FORWARD));
|
||||
|
||||
keyRot = Quat.fromVec3Radians({x:0, y:(0.9 - (i*0.2)), z:0});
|
||||
keyPos = Vec3.sum(center, Vec3.multiplyQbyV(keyRot, fwd));
|
||||
|
||||
ud = {
|
||||
soundFile: soundFiles[i-1]
|
||||
userData = {
|
||||
soundFile: SOUND_FILES[i-1]
|
||||
};
|
||||
|
||||
td = {
|
||||
textureData = {
|
||||
"file4": Script.resolvePath("xylotex_bar" + i + ".png"),
|
||||
"file5": TEXBLACK
|
||||
"file5": TEXTURE_BLACK
|
||||
};
|
||||
|
||||
keyID = Entities.addEntity( {
|
||||
keyID = Entities.addEntity({
|
||||
name: ("XyloKey" + i),
|
||||
type: "Model",
|
||||
modelURL: keyModelURL,
|
||||
position: keyPos,
|
||||
rotation: keyRot,
|
||||
modelURL: KEY_MODEL_URL,
|
||||
position: keyPosition,
|
||||
rotation: keyRotation,
|
||||
shapeType: "static-mesh",
|
||||
script: keyScriptURL,
|
||||
textures: JSON.stringify(td),
|
||||
userData: JSON.stringify(ud),
|
||||
parentID: xyloFrameID
|
||||
} );
|
||||
script: KEY_SCRIPT_URL,
|
||||
textures: JSON.stringify(textureData),
|
||||
userData: JSON.stringify(userData),
|
||||
parentID: xylophoneFrameID
|
||||
});
|
||||
}
|
||||
|
||||
// if rezzed on/above something, wait until after model has loaded so you can read its dimensions then move object on to that surface.
|
||||
var pickRay = {origin: center, direction: {x:0, y:-1, z:0}};
|
||||
var pickRay = {origin: center, direction: {x: 0, y: -1, z: 0}};
|
||||
var intersection = Entities.findRayIntersection(pickRay, true);
|
||||
if (intersection.intersects && (intersection.distance < 10)) {
|
||||
var surfaceY = intersection.intersection.y;
|
||||
Script.setTimeout( function() {
|
||||
// should add loop to check for fbx loaded instead of delay
|
||||
var xyloDimensions = Entities.getEntityProperties(xyloFrameID, ["dimensions"]).dimensions;
|
||||
xyloFramePos.y = surfaceY + (xyloDimensions.y/2);
|
||||
Entities.editEntity(xyloFrameID, {position: xyloFramePos});
|
||||
var xylophoneDimensions = Entities.getEntityProperties(xylophoneFrameID, ["dimensions"]).dimensions;
|
||||
xylophoneFramePosition.y = surfaceY + (xylophoneDimensions.y/2);
|
||||
Entities.editEntity(xylophoneFrameID, {position: xylophoneFramePosition});
|
||||
rezMallets();
|
||||
}, 2000);
|
||||
} else {
|
||||
|
@ -75,28 +80,50 @@ if (intersection.intersects && (intersection.distance < 10)) {
|
|||
}
|
||||
|
||||
function rezMallets() {
|
||||
var malletProps = {
|
||||
var malletProperties = {
|
||||
name: "Xylophone Mallet",
|
||||
type: "Model",
|
||||
modelURL: malletModelURL,
|
||||
compoundShapeURL: malletModelColliderURL,
|
||||
collidesWith: "static,dynamic,kinematic,",
|
||||
modelURL: MALLET_MODEL_URL,
|
||||
compoundShapeURL: MALLET_MODEL_COLLIDER_URL,
|
||||
collidesWith: "static,dynamic,kinematic",
|
||||
collisionMask: 7,
|
||||
collisionsWillMove: 1,
|
||||
dynamic: 1,
|
||||
damping: 1,
|
||||
angularDamping: 1,
|
||||
shapeType: "compound",
|
||||
userData: "{\"grabbableKey\":{\"grabbable\":true}}",
|
||||
dimensions: {"x": 0.057845603674650192, "y": 0.057845607399940491, "z": 0.30429631471633911} // not being set from fbx for some reason.
|
||||
script: MALLET_SCRIPT_URL,
|
||||
userData: JSON.stringify({
|
||||
grabbableKey: {
|
||||
invertSolidWhileHeld: true
|
||||
},
|
||||
wearable: {
|
||||
joints: {
|
||||
LeftHand: [
|
||||
{ x: 0, y: 0.2, z: 0.04 },
|
||||
Quat.fromVec3Degrees({ x: 0, y: 90, z: 90 })
|
||||
],
|
||||
RightHand: [
|
||||
{ x: 0, y: 0.2, z: 0.04 },
|
||||
Quat.fromVec3Degrees({ x: 0, y: 90, z: 90 })
|
||||
]
|
||||
}
|
||||
}
|
||||
}),
|
||||
dimensions: { "x": 0.057845603674650192, "y": 0.057845607399940491, "z": 0.30429631471633911 } // not being set from fbx for some reason.
|
||||
};
|
||||
|
||||
malletProps.position = Vec3.sum(xyloFramePos, {x: 0.1, y: 0.55, z: 0});
|
||||
malletProps.rotation = Quat.fromVec3Radians({x:0, y:Math.PI - 0.1, z:0});
|
||||
Entities.addEntity(malletProps);
|
||||
var LEFT_MALLET_POSITION = { x: 0.1, y: 0.55, z: 0 };
|
||||
var LEFT_MALLET_ROTATION = { x: 0, y: Math.PI - 0.1, z: 0 };
|
||||
var RIGHT_MALLET_POSITION = { x: -0.1, y: 0.55, z: 0 };
|
||||
var RIGHT_MALLET_ROTATION = { x: 0, y: Math.PI + 0.1, z: 0 };
|
||||
|
||||
malletProps.position = Vec3.sum(xyloFramePos, {x: -0.1, y: 0.55, z: 0});
|
||||
malletProps.rotation = Quat.fromVec3Radians({x:0, y:Math.PI + 0.1, z:0});
|
||||
Entities.addEntity(malletProps);
|
||||
malletProperties.position = Vec3.sum(xylophoneFramePosition, LEFT_MALLET_POSITION);
|
||||
malletProperties.rotation = Quat.fromVec3Radians(LEFT_MALLET_ROTATION);
|
||||
Entities.addEntity(malletProperties);
|
||||
|
||||
malletProperties.position = Vec3.sum(xylophoneFramePosition, RIGHT_MALLET_POSITION);
|
||||
malletProperties.rotation = Quat.fromVec3Radians(RIGHT_MALLET_ROTATION);
|
||||
Entities.addEntity(malletProperties);
|
||||
Script.stop();
|
||||
}
|
Loading…
Reference in a new issue