Merge branch 'master' of https://github.com/highfidelity/hifi into blocks

This commit is contained in:
Elisa Lupin-Jimenez 2017-08-02 08:28:55 -07:00
commit eb1fb1a1e1
25 changed files with 249 additions and 195 deletions

BIN
Test Plan 2.docx Normal file

Binary file not shown.

View file

@ -22,6 +22,9 @@ macro(GENERATE_INSTALLERS)
set(CPACK_PACKAGE_FILE_NAME "HighFidelity-Beta-${BUILD_VERSION}") set(CPACK_PACKAGE_FILE_NAME "HighFidelity-Beta-${BUILD_VERSION}")
set(CPACK_NSIS_DISPLAY_NAME ${_DISPLAY_NAME}) set(CPACK_NSIS_DISPLAY_NAME ${_DISPLAY_NAME})
set(CPACK_NSIS_PACKAGE_NAME ${_DISPLAY_NAME}) set(CPACK_NSIS_PACKAGE_NAME ${_DISPLAY_NAME})
if (PR_BUILD)
set(CPACK_NSIS_COMPRESSOR "/SOLID bzip2")
endif ()
set(CPACK_PACKAGE_INSTALL_DIRECTORY ${_DISPLAY_NAME}) set(CPACK_PACKAGE_INSTALL_DIRECTORY ${_DISPLAY_NAME})
if (WIN32) if (WIN32)

View file

@ -2329,7 +2329,7 @@ void Application::paintGL() {
} }
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) { } else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
if (isHMDMode()) { if (isHMDMode()) {
auto mirrorBodyOrientation = myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)); auto mirrorBodyOrientation = myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f));
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix()); glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
// Mirror HMD yaw and roll // Mirror HMD yaw and roll
@ -2351,7 +2351,7 @@ void Application::paintGL() {
+ mirrorBodyOrientation * glm::vec3(0.0f, 0.0f, 1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror + mirrorBodyOrientation * glm::vec3(0.0f, 0.0f, 1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
+ mirrorBodyOrientation * hmdOffset); + mirrorBodyOrientation * hmdOffset);
} else { } else {
_myCamera.setOrientation(myAvatar->getWorldAlignedOrientation() _myCamera.setOrientation(myAvatar->getOrientation()
* glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f))); * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setPosition(myAvatar->getDefaultEyePosition() _myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ glm::vec3(0, _raiseMirror * myAvatar->getUniformScale(), 0) + glm::vec3(0, _raiseMirror * myAvatar->getUniformScale(), 0)
@ -4497,11 +4497,9 @@ void Application::cameraModeChanged() {
void Application::cameraMenuChanged() { void Application::cameraMenuChanged() {
auto menu = Menu::getInstance(); auto menu = Menu::getInstance();
if (menu->isOptionChecked(MenuOption::FullscreenMirror)) { if (menu->isOptionChecked(MenuOption::FullscreenMirror)) {
if (isHMDMode()) { if (_myCamera.getMode() != CAMERA_MODE_MIRROR) {
menu->setIsOptionChecked(MenuOption::FullscreenMirror, false);
menu->setIsOptionChecked(MenuOption::FirstPerson, true);
} else if (_myCamera.getMode() != CAMERA_MODE_MIRROR) {
_myCamera.setMode(CAMERA_MODE_MIRROR); _myCamera.setMode(CAMERA_MODE_MIRROR);
getMyAvatar()->reset(false, false, false); // to reset any active MyAvatar::FollowHelpers
} }
} else if (menu->isOptionChecked(MenuOption::FirstPerson)) { } else if (menu->isOptionChecked(MenuOption::FirstPerson)) {
if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON) { if (_myCamera.getMode() != CAMERA_MODE_FIRST_PERSON) {

View file

@ -1299,7 +1299,7 @@ eyeContactTarget MyAvatar::getEyeContactTarget() {
} }
glm::vec3 MyAvatar::getDefaultEyePosition() const { glm::vec3 MyAvatar::getDefaultEyePosition() const {
return getPosition() + getWorldAlignedOrientation() * Quaternions::Y_180 * _skeletonModel->getDefaultEyeModelPosition(); return getPosition() + getOrientation() * Quaternions::Y_180 * _skeletonModel->getDefaultEyeModelPosition();
} }
const float SCRIPT_PRIORITY = 1.0f + 1.0f; const float SCRIPT_PRIORITY = 1.0f + 1.0f;
@ -1594,9 +1594,14 @@ void MyAvatar::updateMotors() {
motorRotation = getMyHead()->getHeadOrientation(); motorRotation = getMyHead()->getHeadOrientation();
} else { } else {
// non-hovering = walking: follow camera twist about vertical but not lift // non-hovering = walking: follow camera twist about vertical but not lift
// so we decompose camera's rotation and store the twist part in motorRotation // we decompose camera's rotation and store the twist part in motorRotation
// however, we need to perform the decomposition in the avatar-frame
// using the local UP axis and then transform back into world-frame
glm::quat orientation = getOrientation();
glm::quat headOrientation = glm::inverse(orientation) * getMyHead()->getHeadOrientation(); // avatar-frame
glm::quat liftRotation; glm::quat liftRotation;
swingTwistDecomposition(getMyHead()->getHeadOrientation(), _worldUpDirection, liftRotation, motorRotation); swingTwistDecomposition(headOrientation, Vectors::UNIT_Y, liftRotation, motorRotation);
motorRotation = orientation * motorRotation;
} }
const float DEFAULT_MOTOR_TIMESCALE = 0.2f; const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
const float INVALID_MOTOR_TIMESCALE = 1.0e6f; const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
@ -1650,11 +1655,31 @@ void MyAvatar::prepareForPhysicsSimulation() {
_prePhysicsRoomPose = AnimPose(_sensorToWorldMatrix); _prePhysicsRoomPose = AnimPose(_sensorToWorldMatrix);
} }
// There are a number of possible strategies for this set of tools through endRender, below.
void MyAvatar::nextAttitude(glm::vec3 position, glm::quat orientation) {
bool success;
Transform trans = getTransform(success);
if (!success) {
qCWarning(interfaceapp) << "Warning -- MyAvatar::nextAttitude failed";
return;
}
trans.setTranslation(position);
trans.setRotation(orientation);
SpatiallyNestable::setTransform(trans, success);
if (!success) {
qCWarning(interfaceapp) << "Warning -- MyAvatar::nextAttitude failed";
}
updateAttitude(orientation);
}
void MyAvatar::harvestResultsFromPhysicsSimulation(float deltaTime) { void MyAvatar::harvestResultsFromPhysicsSimulation(float deltaTime) {
glm::vec3 position = getPosition(); glm::vec3 position;
glm::quat orientation = getOrientation(); glm::quat orientation;
if (_characterController.isEnabledAndReady()) { if (_characterController.isEnabledAndReady()) {
_characterController.getPositionAndOrientation(position, orientation); _characterController.getPositionAndOrientation(position, orientation);
} else {
position = getPosition();
orientation = getOrientation();
} }
nextAttitude(position, orientation); nextAttitude(position, orientation);
_bodySensorMatrix = _follow.postPhysicsUpdate(*this, _bodySensorMatrix); _bodySensorMatrix = _follow.postPhysicsUpdate(*this, _bodySensorMatrix);
@ -2849,7 +2874,8 @@ bool MyAvatar::FollowHelper::shouldActivateVertical(const MyAvatar& myAvatar, co
void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix,
const glm::mat4& currentBodyMatrix, bool hasDriveInput) { const glm::mat4& currentBodyMatrix, bool hasDriveInput) {
if (myAvatar.getHMDLeanRecenterEnabled()) { if (myAvatar.getHMDLeanRecenterEnabled() &&
qApp->getCamera().getMode() != CAMERA_MODE_MIRROR) {
if (!isActive(Rotation) && (shouldActivateRotation(myAvatar, desiredBodyMatrix, currentBodyMatrix) || hasDriveInput)) { if (!isActive(Rotation) && (shouldActivateRotation(myAvatar, desiredBodyMatrix, currentBodyMatrix) || hasDriveInput)) {
activate(Rotation); activate(Rotation);
} }

View file

@ -438,6 +438,7 @@ public:
void updateMotors(); void updateMotors();
void prepareForPhysicsSimulation(); void prepareForPhysicsSimulation();
void nextAttitude(glm::vec3 position, glm::quat orientation); // Can be safely called at any time.
void harvestResultsFromPhysicsSimulation(float deltaTime); void harvestResultsFromPhysicsSimulation(float deltaTime);
const QString& getCollisionSoundURL() { return _collisionSoundURL; } const QString& getCollisionSoundURL() { return _collisionSoundURL; }
@ -557,7 +558,6 @@ public:
Q_INVOKABLE bool isUp(const glm::vec3& direction) { return glm::dot(direction, _worldUpDirection) > 0.0f; }; // true iff direction points up wrt avatar's definition of up. Q_INVOKABLE bool isUp(const glm::vec3& direction) { return glm::dot(direction, _worldUpDirection) > 0.0f; }; // true iff direction points up wrt avatar's definition of up.
Q_INVOKABLE bool isDown(const glm::vec3& direction) { return glm::dot(direction, _worldUpDirection) < 0.0f; }; Q_INVOKABLE bool isDown(const glm::vec3& direction) { return glm::dot(direction, _worldUpDirection) < 0.0f; };
public slots: public slots:
void increaseSize(); void increaseSize();
void decreaseSize(); void decreaseSize();

View file

@ -29,7 +29,7 @@ MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
glm::quat MyHead::getHeadOrientation() const { glm::quat MyHead::getHeadOrientation() const {
// NOTE: Head::getHeadOrientation() is not used for orienting the camera "view" while in Oculus mode, so // NOTE: Head::getHeadOrientation() is not used for orienting the camera "view" while in Oculus mode, so
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how // you may wonder why this code is here. This method will be called while in Oculus mode to determine how
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're // to change the driving direction while in Oculus mode. It is used to support driving toward where your
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not // head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
// always the same. // always the same.
@ -39,7 +39,7 @@ glm::quat MyHead::getHeadOrientation() const {
return headPose.rotation * Quaternions::Y_180; return headPose.rotation * Quaternions::Y_180;
} }
return myAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f))); return myAvatar->getOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
} }
void MyHead::simulate(float deltaTime) { void MyHead::simulate(float deltaTime) {

View file

@ -192,6 +192,7 @@ void Web3DOverlay::loadSourceURL() {
_webSurface->getSurfaceContext()->setContextProperty("SoundCache", DependencyManager::get<SoundCache>().data()); _webSurface->getSurfaceContext()->setContextProperty("SoundCache", DependencyManager::get<SoundCache>().data());
_webSurface->getSurfaceContext()->setContextProperty("pathToFonts", "../../"); _webSurface->getSurfaceContext()->setContextProperty("pathToFonts", "../../");
tabletScriptingInterface->setQmlTabletRoot("com.highfidelity.interface.tablet.system", _webSurface.data()); tabletScriptingInterface->setQmlTabletRoot("com.highfidelity.interface.tablet.system", _webSurface.data());
// mark the TabletProxy object as cpp ownership. // mark the TabletProxy object as cpp ownership.

View file

@ -151,11 +151,6 @@ glm::vec3 Avatar::getNeckPosition() const {
return _skeletonModel->getNeckPosition(neckPosition) ? neckPosition : getPosition(); return _skeletonModel->getNeckPosition(neckPosition) ? neckPosition : getPosition();
} }
glm::quat Avatar::getWorldAlignedOrientation () const {
return computeRotationFromBodyToWorldUp() * getOrientation();
}
AABox Avatar::getBounds() const { AABox Avatar::getBounds() const {
if (!_skeletonModel->isRenderable() || _skeletonModel->needsFixupInScene()) { if (!_skeletonModel->isRenderable() || _skeletonModel->needsFixupInScene()) {
// approximately 2m tall, scaled to user request. // approximately 2m tall, scaled to user request.
@ -436,6 +431,11 @@ void Avatar::slamPosition(const glm::vec3& newPosition) {
_lastVelocity = glm::vec3(0.0f); _lastVelocity = glm::vec3(0.0f);
} }
void Avatar::updateAttitude(const glm::quat& orientation) {
_skeletonModel->updateAttitude(orientation);
_worldUpDirection = orientation * Vectors::UNIT_Y;
}
void Avatar::applyPositionDelta(const glm::vec3& delta) { void Avatar::applyPositionDelta(const glm::vec3& delta) {
setPosition(getPosition() + delta); setPosition(getPosition() + delta);
_positionDeltaAccumulator += delta; _positionDeltaAccumulator += delta;
@ -628,22 +628,6 @@ void Avatar::render(RenderArgs* renderArgs) {
} }
} }
glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
glm::quat orientation = getOrientation();
glm::vec3 currentUp = orientation * IDENTITY_UP;
float angle = acosf(glm::clamp(glm::dot(currentUp, _worldUpDirection), -1.0f, 1.0f));
if (angle < EPSILON) {
return glm::quat();
}
glm::vec3 axis;
if (angle > 179.99f * RADIANS_PER_DEGREE) { // 180 degree rotation; must use another axis
axis = orientation * IDENTITY_RIGHT;
} else {
axis = glm::normalize(glm::cross(currentUp, _worldUpDirection));
}
return glm::angleAxis(angle * proportion, axis);
}
void Avatar::fixupModelsInScene(const render::ScenePointer& scene) { void Avatar::fixupModelsInScene(const render::ScenePointer& scene) {
_attachmentsToDelete.clear(); _attachmentsToDelete.clear();
@ -1401,14 +1385,14 @@ glm::quat Avatar::getUncachedRightPalmRotation() const {
return rightPalmRotation; return rightPalmRotation;
} }
void Avatar::setPosition(const glm::vec3& position) { void Avatar::setPositionViaScript(const glm::vec3& position) {
AvatarData::setPosition(position); setPosition(position);
updateAttitude(); updateAttitude(getOrientation());
} }
void Avatar::setOrientation(const glm::quat& orientation) { void Avatar::setOrientationViaScript(const glm::quat& orientation) {
AvatarData::setOrientation(orientation); setOrientation(orientation);
updateAttitude(); updateAttitude(orientation);
} }
void Avatar::updatePalms() { void Avatar::updatePalms() {

View file

@ -112,8 +112,6 @@ public:
const Head* getHead() const { return static_cast<const Head*>(_headData); } const Head* getHead() const { return static_cast<const Head*>(_headData); }
Head* getHead() { return static_cast<Head*>(_headData); } Head* getHead() { return static_cast<Head*>(_headData); }
glm::quat getWorldAlignedOrientation() const;
AABox getBounds() const; AABox getBounds() const;
/// Returns the distance to use as a LOD parameter. /// Returns the distance to use as a LOD parameter.
@ -184,7 +182,7 @@ public:
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const; void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
void slamPosition(const glm::vec3& position); void slamPosition(const glm::vec3& position);
virtual void updateAttitude() override { _skeletonModel->updateAttitude(); } virtual void updateAttitude(const glm::quat& orientation) override;
// Call this when updating Avatar position with a delta. This will allow us to // Call this when updating Avatar position with a delta. This will allow us to
// _accurately_ measure position changes and compute the resulting velocity // _accurately_ measure position changes and compute the resulting velocity
@ -197,10 +195,8 @@ public:
void getCapsule(glm::vec3& start, glm::vec3& end, float& radius); void getCapsule(glm::vec3& start, glm::vec3& end, float& radius);
float computeMass(); float computeMass();
using SpatiallyNestable::setPosition; void setPositionViaScript(const glm::vec3& position) override;
virtual void setPosition(const glm::vec3& position) override; void setOrientationViaScript(const glm::quat& orientation) override;
using SpatiallyNestable::setOrientation;
virtual void setOrientation(const glm::quat& orientation) override;
// these call through to the SpatiallyNestable versions, but they are here to expose these to javascript. // these call through to the SpatiallyNestable versions, but they are here to expose these to javascript.
Q_INVOKABLE virtual const QUuid getParentID() const override { return SpatiallyNestable::getParentID(); } Q_INVOKABLE virtual const QUuid getParentID() const override { return SpatiallyNestable::getParentID(); }
@ -303,7 +299,6 @@ protected:
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; } glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
glm::vec3 getBodyUpDirection() const { return getOrientation() * IDENTITY_UP; } glm::vec3 getBodyUpDirection() const { return getOrientation() * IDENTITY_UP; }
glm::quat computeRotationFromBodyToWorldUp(float proportion = 1.0f) const;
void measureMotionDerivatives(float deltaTime); void measureMotionDerivatives(float deltaTime);
float getSkeletonHeight() const; float getSkeletonHeight() const;

View file

@ -118,16 +118,16 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
_rig.updateFromEyeParameters(eyeParams); _rig.updateFromEyeParameters(eyeParams);
} }
void SkeletonModel::updateAttitude() { void SkeletonModel::updateAttitude(const glm::quat& orientation) {
setTranslation(_owningAvatar->getSkeletonPosition()); setTranslation(_owningAvatar->getSkeletonPosition());
setRotation(_owningAvatar->getOrientation() * Quaternions::Y_180); setRotation(orientation * Quaternions::Y_180);
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale()); setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale());
} }
// Called by Avatar::simulate after it has set the joint states (fullUpdate true if changed), // Called by Avatar::simulate after it has set the joint states (fullUpdate true if changed),
// but just before head has been simulated. // but just before head has been simulated.
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) { void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
updateAttitude(); updateAttitude(_owningAvatar->getOrientation());
if (fullUpdate) { if (fullUpdate) {
setBlendshapeCoefficients(_owningAvatar->getHead()->getSummedBlendshapeCoefficients()); setBlendshapeCoefficients(_owningAvatar->getHead()->getSummedBlendshapeCoefficients());

View file

@ -35,7 +35,7 @@ public:
void simulate(float deltaTime, bool fullUpdate = true) override; void simulate(float deltaTime, bool fullUpdate = true) override;
void updateRig(float deltaTime, glm::mat4 parentTransform) override; void updateRig(float deltaTime, glm::mat4 parentTransform) override;
void updateAttitude(); void updateAttitude(const glm::quat& orientation);
/// Returns the index of the left hand joint, or -1 if not found. /// Returns the index of the left hand joint, or -1 if not found.
int getLeftHandJointIndex() const { return isActive() ? getFBXGeometry().leftHandJointIndex : -1; } int getLeftHandJointIndex() const { return isActive() ? getFBXGeometry().leftHandJointIndex : -1; }

View file

@ -91,9 +91,6 @@ AvatarData::AvatarData() :
_targetVelocity(0.0f), _targetVelocity(0.0f),
_density(DEFAULT_AVATAR_DENSITY) _density(DEFAULT_AVATAR_DENSITY)
{ {
setBodyPitch(0.0f);
setBodyYaw(-90.0f);
setBodyRoll(0.0f);
} }
AvatarData::~AvatarData() { AvatarData::~AvatarData() {
@ -110,23 +107,6 @@ const QUrl& AvatarData::defaultFullAvatarModelUrl() {
return _defaultFullAvatarModelUrl; return _defaultFullAvatarModelUrl;
} }
// There are a number of possible strategies for this set of tools through endRender, below.
void AvatarData::nextAttitude(glm::vec3 position, glm::quat orientation) {
bool success;
Transform trans = getTransform(success);
if (!success) {
qCWarning(avatars) << "Warning -- AvatarData::nextAttitude failed";
return;
}
trans.setTranslation(position);
trans.setRotation(orientation);
SpatiallyNestable::setTransform(trans, success);
if (!success) {
qCWarning(avatars) << "Warning -- AvatarData::nextAttitude failed";
}
updateAttitude();
}
void AvatarData::setTargetScale(float targetScale) { void AvatarData::setTargetScale(float targetScale) {
auto newValue = glm::clamp(targetScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE); auto newValue = glm::clamp(targetScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE);
if (_targetScale != newValue) { if (_targetScale != newValue) {
@ -2100,6 +2080,7 @@ void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
currentBasis = std::make_shared<Transform>(Transform::fromJson(json[JSON_AVATAR_BASIS])); currentBasis = std::make_shared<Transform>(Transform::fromJson(json[JSON_AVATAR_BASIS]));
} }
glm::quat orientation;
if (json.contains(JSON_AVATAR_RELATIVE)) { if (json.contains(JSON_AVATAR_RELATIVE)) {
// During playback you can either have the recording basis set to the avatar current state // During playback you can either have the recording basis set to the avatar current state
// meaning that all playback is relative to this avatars starting position, or // meaning that all playback is relative to this avatars starting position, or
@ -2111,12 +2092,14 @@ void AvatarData::fromJson(const QJsonObject& json, bool useFrameSkeleton) {
auto relativeTransform = Transform::fromJson(json[JSON_AVATAR_RELATIVE]); auto relativeTransform = Transform::fromJson(json[JSON_AVATAR_RELATIVE]);
auto worldTransform = currentBasis->worldTransform(relativeTransform); auto worldTransform = currentBasis->worldTransform(relativeTransform);
setPosition(worldTransform.getTranslation()); setPosition(worldTransform.getTranslation());
setOrientation(worldTransform.getRotation()); orientation = worldTransform.getRotation();
} else { } else {
// We still set the position in the case that there is no movement. // We still set the position in the case that there is no movement.
setPosition(currentBasis->getTranslation()); setPosition(currentBasis->getTranslation());
setOrientation(currentBasis->getRotation()); orientation = currentBasis->getRotation();
} }
setOrientation(orientation);
updateAttitude(orientation);
// Do after avatar orientation because head look-at needs avatar orientation. // Do after avatar orientation because head look-at needs avatar orientation.
if (json.contains(JSON_AVATAR_HEAD)) { if (json.contains(JSON_AVATAR_HEAD)) {
@ -2234,11 +2217,11 @@ void AvatarData::setBodyRoll(float bodyRoll) {
setOrientation(glm::quat(glm::radians(eulerAngles))); setOrientation(glm::quat(glm::radians(eulerAngles)));
} }
void AvatarData::setPosition(const glm::vec3& position) { void AvatarData::setPositionViaScript(const glm::vec3& position) {
SpatiallyNestable::setPosition(position); SpatiallyNestable::setPosition(position);
} }
void AvatarData::setOrientation(const glm::quat& orientation) { void AvatarData::setOrientationViaScript(const glm::quat& orientation) {
SpatiallyNestable::setOrientation(orientation); SpatiallyNestable::setOrientation(orientation);
} }

View file

@ -351,14 +351,14 @@ public:
class AvatarData : public QObject, public SpatiallyNestable { class AvatarData : public QObject, public SpatiallyNestable {
Q_OBJECT Q_OBJECT
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPosition) Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPositionViaScript)
Q_PROPERTY(float scale READ getTargetScale WRITE setTargetScale) Q_PROPERTY(float scale READ getTargetScale WRITE setTargetScale)
Q_PROPERTY(glm::vec3 handPosition READ getHandPosition WRITE setHandPosition) Q_PROPERTY(glm::vec3 handPosition READ getHandPosition WRITE setHandPosition)
Q_PROPERTY(float bodyYaw READ getBodyYaw WRITE setBodyYaw) Q_PROPERTY(float bodyYaw READ getBodyYaw WRITE setBodyYaw)
Q_PROPERTY(float bodyPitch READ getBodyPitch WRITE setBodyPitch) Q_PROPERTY(float bodyPitch READ getBodyPitch WRITE setBodyPitch)
Q_PROPERTY(float bodyRoll READ getBodyRoll WRITE setBodyRoll) Q_PROPERTY(float bodyRoll READ getBodyRoll WRITE setBodyRoll)
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientation) Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientationViaScript)
Q_PROPERTY(glm::quat headOrientation READ getHeadOrientation WRITE setHeadOrientation) Q_PROPERTY(glm::quat headOrientation READ getHeadOrientation WRITE setHeadOrientation)
Q_PROPERTY(float headPitch READ getHeadPitch WRITE setHeadPitch) Q_PROPERTY(float headPitch READ getHeadPitch WRITE setHeadPitch)
Q_PROPERTY(float headYaw READ getHeadYaw WRITE setHeadYaw) Q_PROPERTY(float headYaw READ getHeadYaw WRITE setHeadYaw)
@ -440,13 +440,10 @@ public:
float getBodyRoll() const; float getBodyRoll() const;
void setBodyRoll(float bodyRoll); void setBodyRoll(float bodyRoll);
using SpatiallyNestable::setPosition; virtual void setPositionViaScript(const glm::vec3& position);
virtual void setPosition(const glm::vec3& position) override; virtual void setOrientationViaScript(const glm::quat& orientation);
using SpatiallyNestable::setOrientation;
virtual void setOrientation(const glm::quat& orientation) override;
void nextAttitude(glm::vec3 position, glm::quat orientation); // Can be safely called at any time. virtual void updateAttitude(const glm::quat& orientation) {}
virtual void updateAttitude() {} // Tell skeleton mesh about changes
glm::quat getHeadOrientation() const { glm::quat getHeadOrientation() const {
lazyInitHeadData(); lazyInitHeadData();

View file

@ -54,6 +54,7 @@ void EntityEditPacketSender::queueEditAvatarEntityMessage(PacketType type,
EntityItemProperties entityProperties = entity->getProperties(); EntityItemProperties entityProperties = entity->getProperties();
entityProperties.merge(properties); entityProperties.merge(properties);
std::lock_guard<std::mutex> lock(_mutex);
QScriptValue scriptProperties = EntityItemNonDefaultPropertiesToScriptValue(&_scriptEngine, entityProperties); QScriptValue scriptProperties = EntityItemNonDefaultPropertiesToScriptValue(&_scriptEngine, entityProperties);
QVariant variantProperties = scriptProperties.toVariant(); QVariant variantProperties = scriptProperties.toVariant();
QJsonDocument jsonProperties = QJsonDocument::fromVariant(variantProperties); QJsonDocument jsonProperties = QJsonDocument::fromVariant(variantProperties);

View file

@ -14,6 +14,8 @@
#include <OctreeEditPacketSender.h> #include <OctreeEditPacketSender.h>
#include <mutex>
#include "EntityItem.h" #include "EntityItem.h"
#include "AvatarData.h" #include "AvatarData.h"
@ -49,6 +51,7 @@ private:
EntityItemID entityItemID, const EntityItemProperties& properties); EntityItemID entityItemID, const EntityItemProperties& properties);
private: private:
std::mutex _mutex;
AvatarData* _myAvatar { nullptr }; AvatarData* _myAvatar { nullptr };
QScriptEngine _scriptEngine; QScriptEngine _scriptEngine;
}; };

View file

@ -445,7 +445,7 @@ void CharacterController::handleChangedCollisionGroup() {
void CharacterController::updateUpAxis(const glm::quat& rotation) { void CharacterController::updateUpAxis(const glm::quat& rotation) {
_currentUp = quatRotate(glmToBullet(rotation), LOCAL_UP_AXIS); _currentUp = quatRotate(glmToBullet(rotation), LOCAL_UP_AXIS);
if (_state != State::Hover && _rigidBody) { if (_rigidBody) {
_rigidBody->setGravity(_gravity * _currentUp); _rigidBody->setGravity(_gravity * _currentUp);
} }
} }

View file

@ -127,7 +127,7 @@ private:
glm::mat4 _projection; glm::mat4 _projection;
// derived // derived
glm::vec3 _position; glm::vec3 _position { 0.0f, 0.0f, 0.0f };
glm::quat _orientation; glm::quat _orientation;
bool _isKeepLookingAt{ false }; bool _isKeepLookingAt{ false };
glm::vec3 _lookingAt; glm::vec3 _lookingAt;

View file

@ -483,6 +483,11 @@ bool TabletProxy::pushOntoStack(const QVariant& path) {
return result; return result;
} }
//set landscape off when pushing menu items while in Create mode
if (_landscape) {
setLandscape(false);
}
QObject* root = nullptr; QObject* root = nullptr;
if (!_toolbarMode && _qmlTabletRoot) { if (!_toolbarMode && _qmlTabletRoot) {
root = _qmlTabletRoot; root = _qmlTabletRoot;

View file

@ -26,11 +26,8 @@ Script.include([
"libraries/stringHelpers.js", "libraries/stringHelpers.js",
"libraries/dataViewHelpers.js", "libraries/dataViewHelpers.js",
"libraries/progressDialog.js", "libraries/progressDialog.js",
"libraries/entitySelectionTool.js", "libraries/entitySelectionTool.js",
"libraries/ToolTip.js", "libraries/ToolTip.js",
"libraries/entityCameraTool.js", "libraries/entityCameraTool.js",
"libraries/gridTool.js", "libraries/gridTool.js",
"libraries/entityList.js", "libraries/entityList.js",
@ -275,7 +272,8 @@ var toolBar = (function () {
properties.userData = JSON.stringify({ grabbableKey: { grabbable: true } }); properties.userData = JSON.stringify({ grabbableKey: { grabbable: true } });
} }
entityID = Entities.addEntity(properties); entityID = Entities.addEntity(properties);
if (properties.type == "ParticleEffect") {
if (properties.type === "ParticleEffect") {
selectParticleEntity(entityID); selectParticleEntity(entityID);
} }
@ -2229,10 +2227,9 @@ var particleExplorerTool = new ParticleExplorerTool();
var selectedParticleEntity = 0; var selectedParticleEntity = 0;
var selectedParticleEntityID = null; var selectedParticleEntityID = null;
function selectParticleEntity(entityID) { function selectParticleEntity(entityID) {
var properties = Entities.getEntityProperties(entityID); var properties = Entities.getEntityProperties(entityID);
selectedParticleEntityID = entityID;
if (properties.emitOrientation) { if (properties.emitOrientation) {
properties.emitOrientation = Quat.safeEulerAngles(properties.emitOrientation); properties.emitOrientation = Quat.safeEulerAngles(properties.emitOrientation);
} }
@ -2274,7 +2271,6 @@ entityListTool.webView.webEventReceived.connect(function (data) {
return; return;
} }
// Destroy the old particles web view first // Destroy the old particles web view first
selectParticleEntity(ids[0]);
} else { } else {
selectedParticleEntity = 0; selectedParticleEntity = 0;
particleExplorerTool.destroyWebView(); particleExplorerTool.destroyWebView();

View file

@ -40,9 +40,8 @@ function updateControllerDisplay() {
var button; var button;
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system"); var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
// Independent and Entity mode make people sick. Third Person and Mirror have traps that we need to work through. // Independent and Entity mode make people sick; disable them in hmd.
// Disable them in hmd. var desktopOnlyViews = ['Independent Mode', 'Entity Mode'];
var desktopOnlyViews = ['Mirror', 'Independent Mode', 'Entity Mode'];
function onHmdChanged(isHmd) { function onHmdChanged(isHmd) {
HMD.closeTablet(); HMD.closeTablet();

View file

@ -475,6 +475,15 @@ function unbindAllInputs() {
} }
} }
function clearSelection() {
if(document.selection && document.selection.empty) {
document.selection.empty();
} else if(window.getSelection) {
var sel = window.getSelection();
sel.removeAllRanges();
}
}
function loaded() { function loaded() {
openEventBridge(function() { openEventBridge(function() {
@ -1051,6 +1060,7 @@ function loaded() {
activeElement.select(); activeElement.select();
} }
} }
clearSelection();
} }
}); });
} }

View file

@ -1,34 +0,0 @@
//
// pUtils.js
//
// Created by Patrick Gosch on 03/28/2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
getEntityTextures = function(id) {
var results = null;
var properties = Entities.getEntityProperties(id, "textures");
if (properties.textures) {
try {
results = JSON.parse(properties.textures);
} catch (err) {
logDebug(err);
logDebug(properties.textures);
}
}
return results ? results : {};
};
setEntityTextures = function(id, textureList) {
var json = JSON.stringify(textureList);
Entities.editEntity(id, {textures: json});
};
editEntityTextures = function(id, textureName, textureURL) {
var textureList = getEntityTextures(id);
textureList[textureName] = textureURL;
setEntityTextures(id, textureList);
};

View file

@ -9,10 +9,10 @@
// //
(function() { (function() {
Script.include(Script.resolvePath("pUtils.js")); var TIMEOUT = 50; // at 30 ms, the key's color sometimes fails to switch when hit
var TIMEOUT = 150; var TEXTURE_GRAY = Script.resolvePath("xylotex_bar_gray.png");
var TEXGRAY = Script.resolvePath("xylotex_bar_gray.png"); var TEXTURE_BLACK = Script.resolvePath("xylotex_bar_black.png");
var TEXBLACK = Script.resolvePath("xylotex_bar_black.png"); var IS_DEBUG = false;
var _this; var _this;
function XylophoneKey() { function XylophoneKey() {
@ -22,7 +22,7 @@
XylophoneKey.prototype = { XylophoneKey.prototype = {
sound: null, sound: null,
isWaiting: false, isWaiting: false,
homePos: null, homePosition: null,
injector: null, injector: null,
preload: function(entityID) { preload: function(entityID) {
@ -34,31 +34,66 @@
collisionWithEntity: function(thisEntity, otherEntity, collision) { collisionWithEntity: function(thisEntity, otherEntity, collision) {
if (collision.type === 0) { if (collision.type === 0) {
_this.hit(); _this.hit(otherEntity);
} }
}, },
clickDownOnEntity: function() { clickDownOnEntity: function(otherEntity) {
_this.hit(); _this.hit(otherEntity);
}, },
hit: function() { hit: function(otherEntity) {
if (!_this.isWaiting) { if (!_this.isWaiting) {
_this.isWaiting = true; _this.isWaiting = true;
_this.homePos = Entities.getEntityProperties(_this.entityID, ["position"]).position; _this.homePosition = Entities.getEntityProperties(_this.entityID, ["position"]).position;
_this.injector = Audio.playSound(_this.sound, {position: _this.homePos, volume: 1}); _this.injector = Audio.playSound(_this.sound, {position: _this.homePosition, volume: 1});
editEntityTextures(_this.entityID, "file5", TEXGRAY); _this.editEntityTextures(_this.entityID, "file5", TEXTURE_GRAY);
var HAPTIC_STRENGTH = 1;
var HAPTIC_DURATION = 20;
var userData = JSON.parse(Entities.getEntityProperties(otherEntity, 'userData').userData);
if (userData.hasOwnProperty('hand')){
Controller.triggerHapticPulse(HAPTIC_STRENGTH, HAPTIC_DURATION, userData.hand);
}
_this.timeout(); _this.timeout();
} }
}, },
timeout: function() { timeout: function() {
Script.setTimeout(function() { Script.setTimeout(function() {
editEntityTextures(_this.entityID, "file5", TEXBLACK); _this.editEntityTextures(_this.entityID, "file5", TEXTURE_BLACK);
_this.isWaiting = false; _this.isWaiting = false;
}, TIMEOUT); }, TIMEOUT);
},
getEntityTextures: function(id) {
var results = null;
var properties = Entities.getEntityProperties(id, "textures");
if (properties.textures) {
try {
results = JSON.parse(properties.textures);
} catch (err) {
if (IS_DEBUG) {
print(err);
print(properties.textures);
}
}
}
return results ? results : {};
},
setEntityTextures: function(id, textureList) {
var json = JSON.stringify(textureList);
Entities.editEntity(id, {textures: json});
},
editEntityTextures: function(id, textureName, textureURL) {
var textureList = _this.getEntityTextures(id);
textureList[textureName] = textureURL;
_this.setEntityTextures(id, textureList);
}
}; };
return new XylophoneKey(); return new XylophoneKey();
}); });

View file

@ -0,0 +1,25 @@
//
// xylophoneMallet.js
//
// Created by Johnathan Franck on 07/30/2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
(function() {
function XylophoneMallet() {
}
XylophoneMallet.prototype = {
startEquip: function(entityID, args) {
var LEFT_HAND = 0;
var RIGHT_HAND = 1;
var userData = JSON.parse(Entities.getEntityProperties(entityID, 'userData').userData);
userData.hand = args[0] === "left" ? LEFT_HAND : RIGHT_HAND;
Entities.editEntity(entityID, {userData: JSON.stringify(userData)});
}
};
return new XylophoneMallet();
});

View file

@ -8,65 +8,70 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// //
var soundFiles = ["C4.wav", "D4.wav", "E4.wav", "F4.wav", "G4.wav", "A4.wav", "B4.wav", "C5.wav"]; var SOUND_FILES = ["C4.wav", "D4.wav", "E4.wav", "F4.wav", "G4.wav", "A4.wav", "B4.wav", "C5.wav"];
var keyModelURL = Script.resolvePath("xyloKey_2_a_e.fbx"); var KEY_MODEL_URL = Script.resolvePath("xyloKey_2_a_e.fbx");
var keyScriptURL = Script.resolvePath("xylophoneKey.js"); var KEY_SCRIPT_URL = Script.resolvePath("xylophoneKey.js");
var TEXBLACK = Script.resolvePath("xylotex_bar_black.png"); var MALLET_SCRIPT_URL = Script.resolvePath("xylophoneMallet.js");
var malletModelURL = Script.resolvePath("Mallet3-2pc.fbx"); var TEXTURE_BLACK = Script.resolvePath("xylotex_bar_black.png");
var malletModelColliderURL = Script.resolvePath("Mallet3-2bpc_phys.obj"); var MALLET_MODEL_URL = Script.resolvePath("Mallet3-2pc.fbx");
var MALLET_MODEL_COLLIDER_URL = Script.resolvePath("Mallet3-2bpc_phys.obj");
var FORWARD = { x: 0, y: 0, z: -1 };
var center = MyAvatar.position; var center = MyAvatar.position;
var fwd = {x:0, y:0, z:-1};
var xyloFramePos = Vec3.sum(center, Vec3.multiply(fwd, 0.8)); var XYLOPHONE_FORWARD_OFFSET = 0.8;
var xyloFrameID = Entities.addEntity( { var xylophoneFramePosition = Vec3.sum(center, Vec3.multiply(FORWARD, XYLOPHONE_FORWARD_OFFSET));
var xylophoneFrameID = Entities.addEntity({
name: "Xylophone", name: "Xylophone",
type: "Model", type: "Model",
modelURL: Script.resolvePath("xylophoneFrameWithWave.fbx"), modelURL: Script.resolvePath("xylophoneFrameWithWave.fbx"),
position: xyloFramePos, position: xylophoneFramePosition,
rotation: Quat.fromVec3Radians({x:0, y:Math.PI, z:0}), rotation: Quat.fromVec3Radians({ x: 0, y: Math.PI, z: 0 }),
shapeType: "static-mesh" shapeType: "static-mesh"
}); });
center.y += (0.45); // key Y offset from frame var KEY_Y_OFFSET = 0.45;
var keyPos, keyRot, ud, td, keyID; center.y += KEY_Y_OFFSET;
for (var i = 1; i <= soundFiles.length; i++) { var keyPosition, keyRotation, userData, textureData, keyID;
var ROTATION_START = 0.9;
var ROTATION_DELTA = 0.2;
for (var i = 1; i <= SOUND_FILES.length; i++) {
keyRot = Quat.fromVec3Radians({x:0, y:(0.9 - (i*0.2)), z:0}); keyRotation = Quat.fromVec3Radians({ x: 0, y: ROTATION_START - (i*ROTATION_DELTA), z: 0 });
keyPos = Vec3.sum(center, Vec3.multiplyQbyV(keyRot, fwd)); keyPosition = Vec3.sum(center, Vec3.multiplyQbyV(keyRotation, FORWARD));
ud = { userData = {
soundFile: soundFiles[i-1] soundFile: SOUND_FILES[i-1]
}; };
td = { textureData = {
"file4": Script.resolvePath("xylotex_bar" + i + ".png"), "file4": Script.resolvePath("xylotex_bar" + i + ".png"),
"file5": TEXBLACK "file5": TEXTURE_BLACK
}; };
keyID = Entities.addEntity( { keyID = Entities.addEntity({
name: ("XyloKey" + i), name: ("XyloKey" + i),
type: "Model", type: "Model",
modelURL: keyModelURL, modelURL: KEY_MODEL_URL,
position: keyPos, position: keyPosition,
rotation: keyRot, rotation: keyRotation,
shapeType: "static-mesh", shapeType: "static-mesh",
script: keyScriptURL, script: KEY_SCRIPT_URL,
textures: JSON.stringify(td), textures: JSON.stringify(textureData),
userData: JSON.stringify(ud), userData: JSON.stringify(userData),
parentID: xyloFrameID parentID: xylophoneFrameID
} ); });
} }
// if rezzed on/above something, wait until after model has loaded so you can read its dimensions then move object on to that surface. // if rezzed on/above something, wait until after model has loaded so you can read its dimensions then move object on to that surface.
var pickRay = {origin: center, direction: {x:0, y:-1, z:0}}; var pickRay = {origin: center, direction: {x: 0, y: -1, z: 0}};
var intersection = Entities.findRayIntersection(pickRay, true); var intersection = Entities.findRayIntersection(pickRay, true);
if (intersection.intersects && (intersection.distance < 10)) { if (intersection.intersects && (intersection.distance < 10)) {
var surfaceY = intersection.intersection.y; var surfaceY = intersection.intersection.y;
Script.setTimeout( function() { Script.setTimeout( function() {
// should add loop to check for fbx loaded instead of delay // should add loop to check for fbx loaded instead of delay
var xyloDimensions = Entities.getEntityProperties(xyloFrameID, ["dimensions"]).dimensions; var xylophoneDimensions = Entities.getEntityProperties(xylophoneFrameID, ["dimensions"]).dimensions;
xyloFramePos.y = surfaceY + (xyloDimensions.y/2); xylophoneFramePosition.y = surfaceY + (xylophoneDimensions.y/2);
Entities.editEntity(xyloFrameID, {position: xyloFramePos}); Entities.editEntity(xylophoneFrameID, {position: xylophoneFramePosition});
rezMallets(); rezMallets();
}, 2000); }, 2000);
} else { } else {
@ -75,28 +80,50 @@ if (intersection.intersects && (intersection.distance < 10)) {
} }
function rezMallets() { function rezMallets() {
var malletProps = { var malletProperties = {
name: "Xylophone Mallet", name: "Xylophone Mallet",
type: "Model", type: "Model",
modelURL: malletModelURL, modelURL: MALLET_MODEL_URL,
compoundShapeURL: malletModelColliderURL, compoundShapeURL: MALLET_MODEL_COLLIDER_URL,
collidesWith: "static,dynamic,kinematic,", collidesWith: "static,dynamic,kinematic",
collisionMask: 7, collisionMask: 7,
collisionsWillMove: 1, collisionsWillMove: 1,
dynamic: 1, dynamic: 1,
damping: 1, damping: 1,
angularDamping: 1, angularDamping: 1,
shapeType: "compound", shapeType: "compound",
userData: "{\"grabbableKey\":{\"grabbable\":true}}", script: MALLET_SCRIPT_URL,
dimensions: {"x": 0.057845603674650192, "y": 0.057845607399940491, "z": 0.30429631471633911} // not being set from fbx for some reason. userData: JSON.stringify({
grabbableKey: {
invertSolidWhileHeld: true
},
wearable: {
joints: {
LeftHand: [
{ x: 0, y: 0.2, z: 0.04 },
Quat.fromVec3Degrees({ x: 0, y: 90, z: 90 })
],
RightHand: [
{ x: 0, y: 0.2, z: 0.04 },
Quat.fromVec3Degrees({ x: 0, y: 90, z: 90 })
]
}
}
}),
dimensions: { "x": 0.057845603674650192, "y": 0.057845607399940491, "z": 0.30429631471633911 } // not being set from fbx for some reason.
}; };
malletProps.position = Vec3.sum(xyloFramePos, {x: 0.1, y: 0.55, z: 0}); var LEFT_MALLET_POSITION = { x: 0.1, y: 0.55, z: 0 };
malletProps.rotation = Quat.fromVec3Radians({x:0, y:Math.PI - 0.1, z:0}); var LEFT_MALLET_ROTATION = { x: 0, y: Math.PI - 0.1, z: 0 };
Entities.addEntity(malletProps); var RIGHT_MALLET_POSITION = { x: -0.1, y: 0.55, z: 0 };
var RIGHT_MALLET_ROTATION = { x: 0, y: Math.PI + 0.1, z: 0 };
malletProps.position = Vec3.sum(xyloFramePos, {x: -0.1, y: 0.55, z: 0}); malletProperties.position = Vec3.sum(xylophoneFramePosition, LEFT_MALLET_POSITION);
malletProps.rotation = Quat.fromVec3Radians({x:0, y:Math.PI + 0.1, z:0}); malletProperties.rotation = Quat.fromVec3Radians(LEFT_MALLET_ROTATION);
Entities.addEntity(malletProps); Entities.addEntity(malletProperties);
malletProperties.position = Vec3.sum(xylophoneFramePosition, RIGHT_MALLET_POSITION);
malletProperties.rotation = Quat.fromVec3Radians(RIGHT_MALLET_ROTATION);
Entities.addEntity(malletProperties);
Script.stop(); Script.stop();
} }