mirror of
https://github.com/AleziaKurdis/overte.git
synced 2025-04-18 17:37:30 +02:00
Merge branch 'master' into sit_variants_and_fidgets
This commit is contained in:
commit
37afabe866
10 changed files with 144 additions and 70 deletions
|
@ -3610,10 +3610,9 @@ void Application::updateCamera(RenderArgs& renderArgs, float deltaTime) {
|
|||
mat4 camMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||
_myCamera.setPosition(extractTranslation(camMat));
|
||||
_myCamera.setOrientation(glmExtractRotation(camMat));
|
||||
}
|
||||
else {
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
|
||||
_myCamera.setOrientation(myAvatar->getMyHead()->getHeadOrientation());
|
||||
} else {
|
||||
_myCamera.setPosition(myAvatar->getLookAtPivotPoint());
|
||||
_myCamera.setOrientation(myAvatar->getLookAtRotation());
|
||||
}
|
||||
} else if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
||||
if (isHMDMode()) {
|
||||
|
@ -3647,9 +3646,9 @@ void Application::updateCamera(RenderArgs& renderArgs, float deltaTime) {
|
|||
if (mode == CAMERA_MODE_SELFIE) {
|
||||
lookAtRotation = lookAtRotation * glm::angleAxis(PI, myAvatar->getWorldOrientation() * Vectors::UP);
|
||||
}
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
|
||||
_myCamera.setPosition(myAvatar->getLookAtPivotPoint()
|
||||
+ lookAtRotation * boomOffset);
|
||||
_myCamera.lookAt(myAvatar->getDefaultEyePosition());
|
||||
_myCamera.lookAt(myAvatar->getLookAtPivotPoint());
|
||||
}
|
||||
}
|
||||
} else if (mode == CAMERA_MODE_MIRROR) {
|
||||
|
@ -3677,8 +3676,7 @@ void Application::updateCamera(RenderArgs& renderArgs, float deltaTime) {
|
|||
+ glm::vec3(0, _raiseMirror * myAvatar->getModelScale(), 0)
|
||||
+ mirrorBodyOrientation * glm::vec3(0.0f, 0.0f, 1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror
|
||||
+ mirrorBodyOrientation * hmdOffset);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
const float YAW_SPEED = TWO_PI / 5.0f;
|
||||
float deltaYaw = userInputMapper->getActionState(controller::Action::YAW) * YAW_SPEED * deltaTime;
|
||||
|
@ -3699,8 +3697,7 @@ void Application::updateCamera(RenderArgs& renderArgs, float deltaTime) {
|
|||
_myCamera.setOrientation(cameraEntity->getWorldOrientation() * hmdRotation);
|
||||
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
|
||||
_myCamera.setPosition(cameraEntity->getWorldPosition() + (hmdRotation * hmdOffset));
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
_myCamera.setOrientation(cameraEntity->getWorldOrientation());
|
||||
_myCamera.setPosition(cameraEntity->getWorldPosition());
|
||||
}
|
||||
|
|
|
@ -958,7 +958,7 @@ void MyAvatar::simulate(float deltaTime, bool inView) {
|
|||
head->setScale(getModelScale());
|
||||
head->simulate(deltaTime);
|
||||
CameraMode mode = qApp->getCamera().getMode();
|
||||
if (_scriptControlsHeadLookAt || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
||||
if (_scriptControlsHeadLookAt || mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
||||
if (!_pointAtActive || !_isPointTargetValid) {
|
||||
updateHeadLookAt(deltaTime);
|
||||
} else {
|
||||
|
@ -2718,7 +2718,8 @@ void MyAvatar::updateMotors() {
|
|||
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
|
||||
if (_characterController.getState() == CharacterController::State::Hover ||
|
||||
_characterController.computeCollisionMask() == BULLET_COLLISION_MASK_COLLISIONLESS) {
|
||||
if (qApp->getCamera().getMode() == CAMERA_MODE_LOOK_AT || qApp->getCamera().getMode() == CAMERA_MODE_SELFIE) {
|
||||
CameraMode mode = qApp->getCamera().getMode();
|
||||
if (mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
||||
motorRotation = getLookAtRotation();
|
||||
} else {
|
||||
motorRotation = getMyHead()->getHeadOrientation();
|
||||
|
@ -3442,8 +3443,10 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
// Smoothly rotate body with arrow keys
|
||||
float targetSpeed = getDriveKey(YAW) * _yawSpeed;
|
||||
CameraMode mode = qApp->getCamera().getMode();
|
||||
bool computeLookAt = (mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) && isReadyForPhysics() && !qApp->isHMDMode();
|
||||
if (computeLookAt) {
|
||||
bool computeLookAt = isReadyForPhysics() && !qApp->isHMDMode() &&
|
||||
(mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE);
|
||||
bool smoothCameraYaw = computeLookAt && mode != CAMERA_MODE_FIRST_PERSON;
|
||||
if (smoothCameraYaw) {
|
||||
// For "Look At" and "Selfie" camera modes we also smooth the yaw rotation from right-click mouse movement.
|
||||
float speedFromDeltaYaw = deltaTime > FLT_EPSILON ? getDriveKey(DELTA_YAW) / deltaTime : 0.0f;
|
||||
speedFromDeltaYaw *= _yawSpeed / YAW_SPEED_DEFAULT;
|
||||
|
@ -3472,7 +3475,7 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
}
|
||||
}
|
||||
float totalBodyYaw = _bodyYawDelta * deltaTime;
|
||||
if (!computeLookAt) {
|
||||
if (!smoothCameraYaw) {
|
||||
// Rotate directly proportional to delta yaw and delta pitch from right-click mouse movement.
|
||||
totalBodyYaw += getDriveKey(DELTA_YAW) * _yawSpeed / YAW_SPEED_DEFAULT;
|
||||
}
|
||||
|
@ -3563,12 +3566,16 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
blend = 1.0f;
|
||||
}
|
||||
glm::quat faceRotation = _lookAtYaw;
|
||||
if (isMovingFwdBwd && isMovingSideways) {
|
||||
// Reorient avatar to face camera diagonal
|
||||
blend = DIAGONAL_TURN_BLEND;
|
||||
float turnSign = getDriveKey(TRANSLATE_Z) < 0.0f ? -1.0f : 1.0f;
|
||||
turnSign = getDriveKey(TRANSLATE_X) > 0.0f ? -turnSign : turnSign;
|
||||
faceRotation = _lookAtYaw * glm::angleAxis(turnSign * 0.25f * PI, Vectors::UP);
|
||||
if (isMovingFwdBwd) {
|
||||
if (isMovingSideways) {
|
||||
// Reorient avatar to face camera diagonal
|
||||
blend = mode == CAMERA_MODE_FIRST_PERSON ? 1.0f : DIAGONAL_TURN_BLEND;
|
||||
float turnSign = getDriveKey(TRANSLATE_Z) < 0.0f ? -1.0f : 1.0f;
|
||||
turnSign = getDriveKey(TRANSLATE_X) > 0.0f ? -turnSign : turnSign;
|
||||
faceRotation = _lookAtYaw * glm::angleAxis(turnSign * 0.25f * PI, Vectors::UP);
|
||||
} else if (mode == CAMERA_MODE_FIRST_PERSON) {
|
||||
blend = 1.0f;
|
||||
}
|
||||
}
|
||||
setWorldOrientation(glm::slerp(getWorldOrientation(), faceRotation, blend));
|
||||
} else if (isRotatingWhileSeated) {
|
||||
|
@ -3630,20 +3637,32 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
glm::vec3 avatarVectorRight = getWorldOrientation() * Vectors::RIGHT;
|
||||
float leftRightDot = glm::dot(cameraYawVector, avatarVectorRight);
|
||||
|
||||
const float REORIENT_ANGLE = 65.0f;
|
||||
const float DEFAULT_REORIENT_ANGLE = 65.0f;
|
||||
const float FIRST_PERSON_REORIENT_ANGLE = 95.0f;
|
||||
const float TRIGGER_REORIENT_ANGLE = 45.0f;
|
||||
const float FIRST_PERSON_TRIGGER_REORIENT_ANGLE = 65.0f;
|
||||
glm::vec3 ajustedYawVector = cameraYawVector;
|
||||
if (frontBackDot < 0.0f) {
|
||||
ajustedYawVector = (leftRightDot < 0.0f ? -avatarVectorRight : avatarVectorRight);
|
||||
cameraVector = (ajustedYawVector * _lookAtPitch) * Vectors::FRONT;
|
||||
float limitAngle = 0.0f;
|
||||
float triggerAngle = -glm::sin(glm::radians(TRIGGER_REORIENT_ANGLE));
|
||||
if (mode == CAMERA_MODE_FIRST_PERSON) {
|
||||
limitAngle = glm::sin(glm::radians(90.0f - FIRST_PERSON_TRIGGER_REORIENT_ANGLE));
|
||||
triggerAngle = limitAngle;
|
||||
}
|
||||
float reorientAngle = mode == CAMERA_MODE_FIRST_PERSON ? FIRST_PERSON_REORIENT_ANGLE : DEFAULT_REORIENT_ANGLE;
|
||||
if (frontBackDot < limitAngle) {
|
||||
if (frontBackDot < 0.0f) {
|
||||
ajustedYawVector = (leftRightDot < 0.0f ? -avatarVectorRight : avatarVectorRight);
|
||||
cameraVector = (ajustedYawVector * _lookAtPitch) * Vectors::FRONT;
|
||||
}
|
||||
if (!isRotatingWhileSeated) {
|
||||
if (frontBackDot < -glm::sin(glm::radians(TRIGGER_REORIENT_ANGLE))) {
|
||||
if (frontBackDot < triggerAngle) {
|
||||
_shouldTurnToFaceCamera = true;
|
||||
_firstPersonSteadyHeadTimer = 0.0f;
|
||||
}
|
||||
} else {
|
||||
setWorldOrientation(previousOrientation);
|
||||
}
|
||||
} else if (frontBackDot > glm::sin(glm::radians(REORIENT_ANGLE))) {
|
||||
} else if (frontBackDot > glm::sin(glm::radians(reorientAngle))) {
|
||||
_shouldTurnToFaceCamera = false;
|
||||
}
|
||||
|
||||
|
@ -3664,6 +3683,22 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
_lookAtCameraTarget = targetPoint;
|
||||
}
|
||||
_headLookAtActive = true;
|
||||
const float FIRST_PERSON_RECENTER_SECONDS = 15.0f;
|
||||
if (mode == CAMERA_MODE_FIRST_PERSON) {
|
||||
if (getDriveKey(YAW) + getDriveKey(STEP_YAW) + getDriveKey(DELTA_YAW) == 0.0f) {
|
||||
if (_firstPersonSteadyHeadTimer < FIRST_PERSON_RECENTER_SECONDS) {
|
||||
if (_firstPersonSteadyHeadTimer > 0.0f) {
|
||||
_firstPersonSteadyHeadTimer += deltaTime;
|
||||
}
|
||||
} else {
|
||||
_shouldTurnToFaceCamera = true;
|
||||
_firstPersonSteadyHeadTimer = 0.0f;
|
||||
}
|
||||
} else {
|
||||
_firstPersonSteadyHeadTimer = deltaTime;
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
head->setBaseYaw(0.0f);
|
||||
head->setBasePitch(getHead()->getBasePitch() + getDriveKey(PITCH) * _pitchSpeed * deltaTime
|
||||
|
@ -3736,7 +3771,8 @@ glm::vec3 MyAvatar::scaleMotorSpeed(const glm::vec3 forward, const glm::vec3 rig
|
|||
} else {
|
||||
// Desktop mode.
|
||||
direction = (zSpeed * forward) + (xSpeed * right);
|
||||
if (qApp->getCamera().getMode() == CAMERA_MODE_LOOK_AT && zSpeed != 0.0f && xSpeed != 0.0f){
|
||||
CameraMode mode = qApp->getCamera().getMode();
|
||||
if ((mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_SELFIE) && zSpeed != 0.0f && xSpeed != 0.0f){
|
||||
direction = (zSpeed * forward);
|
||||
}
|
||||
|
||||
|
@ -5399,7 +5435,7 @@ glm::quat MyAvatar::getOrientationForAudio() {
|
|||
case AudioListenerMode::FROM_HEAD: {
|
||||
// Using the camera's orientation instead, when the current mode is controlling the avatar's head.
|
||||
CameraMode mode = qApp->getCamera().getMode();
|
||||
bool headFollowsCamera = mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE;
|
||||
bool headFollowsCamera = mode == CAMERA_MODE_FIRST_PERSON || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE;
|
||||
result = headFollowsCamera ? qApp->getCamera().getOrientation() : getHead()->getFinalOrientationInWorldFrame();
|
||||
break;
|
||||
}
|
||||
|
@ -6768,6 +6804,12 @@ void MyAvatar::setHeadLookAt(const glm::vec3& lookAtTarget) {
|
|||
_lookAtScriptTarget = lookAtTarget;
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getLookAtPivotPoint() {
|
||||
glm::vec3 avatarUp = getWorldOrientation() * Vectors::UP;
|
||||
glm::vec3 yAxisEyePosition = getWorldPosition() + avatarUp * glm::dot(avatarUp, _skeletonModel->getDefaultEyeModelPosition());
|
||||
return yAxisEyePosition;
|
||||
}
|
||||
|
||||
bool MyAvatar::setPointAt(const glm::vec3& pointAtTarget) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
bool result = false;
|
||||
|
@ -6794,3 +6836,4 @@ void MyAvatar::resetPointAt() {
|
|||
POINT_BLEND_LINEAR_ALPHA_NAME, POINT_ALPHA_BLENDING);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1906,6 +1906,7 @@ public:
|
|||
void debugDrawPose(controller::Action action, const char* channelName, float size);
|
||||
|
||||
bool getIsJointOverridden(int jointIndex) const;
|
||||
glm::vec3 getLookAtPivotPoint();
|
||||
|
||||
public slots:
|
||||
|
||||
|
@ -2663,6 +2664,7 @@ private:
|
|||
bool _shouldTurnToFaceCamera { false };
|
||||
bool _scriptControlsHeadLookAt { false };
|
||||
float _scriptHeadControlTimer { 0.0f };
|
||||
float _firstPersonSteadyHeadTimer { 0.0f };
|
||||
bool _pointAtActive { false };
|
||||
bool _isPointTargetValid { true };
|
||||
|
||||
|
|
|
@ -282,9 +282,9 @@ void AudioDeviceList::onDevicesChanged(const QList<HifiAudioDeviceInfo>& devices
|
|||
|
||||
if (deviceInfo.isDefault()) {
|
||||
if (deviceInfo.getMode() == QAudio::AudioInput) {
|
||||
device.display = "Default microphone (recommended)";
|
||||
device.display = "Computer's default microphone (recommended)";
|
||||
} else {
|
||||
device.display = "Default audio (recommended)";
|
||||
device.display = "Computer's default audio (recommended)";
|
||||
}
|
||||
} else {
|
||||
device.display = device.info.deviceName()
|
||||
|
|
|
@ -494,7 +494,9 @@ HifiAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
waveInGetDevCaps(WAVE_MAPPER, &wic, sizeof(wic));
|
||||
//Use the received manufacturer id to get the device's real name
|
||||
waveInGetDevCaps(wic.wMid, &wic, sizeof(wic));
|
||||
#if !defined(NDEBUG)
|
||||
qCDebug(audioclient) << "input device:" << wic.szPname;
|
||||
#endif
|
||||
deviceName = wic.szPname;
|
||||
} else {
|
||||
WAVEOUTCAPS woc;
|
||||
|
@ -502,7 +504,9 @@ HifiAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
waveOutGetDevCaps(WAVE_MAPPER, &woc, sizeof(woc));
|
||||
//Use the received manufacturer id to get the device's real name
|
||||
waveOutGetDevCaps(woc.wMid, &woc, sizeof(woc));
|
||||
#if !defined(NDEBUG)
|
||||
qCDebug(audioclient) << "output device:" << woc.szPname;
|
||||
#endif
|
||||
deviceName = woc.szPname;
|
||||
}
|
||||
} else {
|
||||
|
@ -532,10 +536,10 @@ HifiAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
qCDebug(audioclient) << "defaultAudioDeviceForMode mode: " << (mode == QAudio::AudioOutput ? "Output" : "Input")
|
||||
<< " [" << deviceName << "] [" << foundDevice.deviceName() << "]";
|
||||
|
||||
#endif
|
||||
return foundDevice;
|
||||
#endif
|
||||
|
||||
|
|
|
@ -92,22 +92,23 @@ void Head::simulate(float deltaTime) {
|
|||
} else if (_timeWithoutTalking - deltaTime < BLINK_AFTER_TALKING && _timeWithoutTalking >= BLINK_AFTER_TALKING) {
|
||||
forceBlink = true;
|
||||
}
|
||||
|
||||
if (_leftEyeBlinkVelocity == 0.0f && _rightEyeBlinkVelocity == 0.0f) {
|
||||
// no blinking when brows are raised; blink less with increasing loudness
|
||||
const float BASE_BLINK_RATE = 15.0f / 60.0f;
|
||||
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
|
||||
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
|
||||
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
|
||||
_leftEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
|
||||
_rightEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
|
||||
float randSpeedVariability = randFloat();
|
||||
float eyeBlinkVelocity = BLINK_SPEED + randSpeedVariability * BLINK_SPEED_VARIABILITY;
|
||||
_leftEyeBlinkVelocity = eyeBlinkVelocity;
|
||||
_rightEyeBlinkVelocity = eyeBlinkVelocity;
|
||||
if (randFloat() < 0.5f) {
|
||||
_leftEyeBlink = BLINK_START_VARIABILITY;
|
||||
} else {
|
||||
_rightEyeBlink = BLINK_START_VARIABILITY;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
_leftEyeBlink = glm::clamp(_leftEyeBlink + _leftEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
|
||||
_rightEyeBlink = glm::clamp(_rightEyeBlink + _rightEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
|
||||
|
||||
|
|
|
@ -234,14 +234,38 @@ const JSONCallbackParameters& AddressManager::apiCallbackParameters() {
|
|||
return callbackParams;
|
||||
}
|
||||
|
||||
bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
|
||||
bool AddressManager::handleUrl(const QUrl& lookupUrlIn, LookupTrigger trigger) {
|
||||
static QString URL_TYPE_USER = "user";
|
||||
static QString URL_TYPE_DOMAIN_ID = "domain_id";
|
||||
static QString URL_TYPE_PLACE = "place";
|
||||
static QString URL_TYPE_NETWORK_ADDRESS = "network_address";
|
||||
if (lookupUrl.scheme() == URL_SCHEME_HIFI) {
|
||||
|
||||
qCDebug(networking) << "Trying to go to URL" << lookupUrl.toString();
|
||||
QUrl lookupUrl = lookupUrlIn;
|
||||
|
||||
qCDebug(networking) << "Trying to go to URL" << lookupUrl.toString();
|
||||
|
||||
if (lookupUrl.scheme().isEmpty() && !lookupUrl.path().startsWith("/")) {
|
||||
// 'urls' without schemes are taken as domain names, as opposed to
|
||||
// simply a path portion of a url, so we need to set the scheme
|
||||
lookupUrl.setScheme(URL_SCHEME_HIFI);
|
||||
}
|
||||
|
||||
static const QRegExp PORT_REGEX = QRegExp("\\d{1,5}(\\/.*)?");
|
||||
if(!lookupUrl.scheme().isEmpty() && lookupUrl.host().isEmpty() && PORT_REGEX.exactMatch(lookupUrl.path())) {
|
||||
// this is in the form somewhere:<port>, convert it to hifi://somewhere:<port>
|
||||
lookupUrl = QUrl(URL_SCHEME_HIFI + "://" + lookupUrl.toString());
|
||||
}
|
||||
// it should be noted that url's in the form
|
||||
// somewhere:<port> are not valid, as that
|
||||
// would indicate that the scheme is 'somewhere'
|
||||
// use hifi://somewhere:<port> instead
|
||||
|
||||
if (lookupUrl.scheme() == URL_SCHEME_HIFI) {
|
||||
if (lookupUrl.host().isEmpty()) {
|
||||
// this was in the form hifi:/somewhere or hifi:somewhere. Fix it by making it hifi://somewhere
|
||||
static const QRegExp HIFI_SCHEME_REGEX = QRegExp(URL_SCHEME_HIFI + ":\\/?", Qt::CaseInsensitive);
|
||||
lookupUrl = QUrl(lookupUrl.toString().replace(HIFI_SCHEME_REGEX, URL_SCHEME_HIFI + "://"));
|
||||
}
|
||||
|
||||
DependencyManager::get<NodeList>()->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::LookupAddress);
|
||||
|
||||
|
@ -379,25 +403,11 @@ bool isPossiblePlaceName(QString possiblePlaceName) {
|
|||
}
|
||||
|
||||
void AddressManager::handleLookupString(const QString& lookupString, bool fromSuggestions) {
|
||||
if (!lookupString.isEmpty()) {
|
||||
|
||||
QString sanitizedString = lookupString.trimmed();
|
||||
if (!sanitizedString.isEmpty()) {
|
||||
// make this a valid hifi URL and handle it off to handleUrl
|
||||
QString sanitizedString = lookupString.trimmed();
|
||||
QUrl lookupURL;
|
||||
|
||||
if (!lookupString.startsWith('/')) {
|
||||
// sometimes we need to handle lookupStrings like hifi:/somewhere
|
||||
const QRegExp HIFI_SCHEME_REGEX = QRegExp(URL_SCHEME_HIFI + ":\\/{1,2}", Qt::CaseInsensitive);
|
||||
sanitizedString = sanitizedString.remove(HIFI_SCHEME_REGEX);
|
||||
|
||||
lookupURL = QUrl(sanitizedString);
|
||||
if (lookupURL.scheme().isEmpty() || lookupURL.scheme().toLower() == LOCALHOST) {
|
||||
lookupURL = QUrl("hifi://" + sanitizedString);
|
||||
}
|
||||
} else {
|
||||
lookupURL = QUrl(sanitizedString);
|
||||
}
|
||||
|
||||
handleUrl(lookupURL, fromSuggestions ? Suggestions : UserInput);
|
||||
handleUrl(sanitizedString, fromSuggestions ? Suggestions : UserInput);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,8 @@
|
|||
#include <shared/QtHelpers.h>
|
||||
#include <Gzip.h>
|
||||
|
||||
#include <future>
|
||||
|
||||
using Promise = MiniPromise::Promise;
|
||||
|
||||
AssetScriptingInterface::AssetScriptingInterface(QObject* parent) : BaseAssetScriptingInterface(parent) {
|
||||
|
@ -38,6 +40,25 @@ AssetScriptingInterface::AssetScriptingInterface(QObject* parent) : BaseAssetScr
|
|||
|
||||
#define JS_VERIFY(cond, error) { if (!this->jsVerify(cond, error)) { return; } }
|
||||
|
||||
bool AssetScriptingInterface::initializeCache() {
|
||||
if (!Parent::initializeCache()) {
|
||||
if (assetClient()) {
|
||||
std::promise<bool> cacheStatusResult;
|
||||
Promise assetClientPromise(makePromise(__func__));
|
||||
assetClientPromise->moveToThread(qApp->thread()); // To ensure the finally() is processed.
|
||||
|
||||
assetClient()->cacheInfoRequestAsync(assetClientPromise);
|
||||
assetClientPromise->finally([&](QString, QVariantMap result)
|
||||
{ cacheStatusResult.set_value(!result.isEmpty()); });
|
||||
return cacheStatusResult.get_future().get();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
void AssetScriptingInterface::uploadData(QString data, QScriptValue callback) {
|
||||
auto handler = jsBindCallback(thisObject(), callback);
|
||||
QByteArray dataByteArray = data.toUtf8();
|
||||
|
|
|
@ -356,7 +356,7 @@ public:
|
|||
* @function Assets.initializeCache
|
||||
* @returns {boolean} <code>true</code> if the cache is initialized, <code>false</code> if it isn't.
|
||||
*/
|
||||
Q_INVOKABLE bool initializeCache() { return Parent::initializeCache(); }
|
||||
Q_INVOKABLE bool initializeCache();
|
||||
|
||||
/**jsdoc
|
||||
* Checks whether the script can write to the cache.
|
||||
|
|
|
@ -296,7 +296,7 @@ function updateOutputDeviceMutedOverlay(isMuted) {
|
|||
props.y = Window.innerHeight / 2 - overlayDims / 2;
|
||||
|
||||
var outputDeviceMutedOverlayBottomY = props.y + overlayDims;
|
||||
var inputDeviceMutedOverlayTopY = getInputDeviceMutedOverlayTopY();
|
||||
var inputDeviceMutedOverlayTopY = INPUT_DEVICE_MUTED_MARGIN_TOP_PX;
|
||||
if (outputDeviceMutedOverlayBottomY + OUTPUT_DEVICE_MUTED_MARGIN_BOTTOM_PX > inputDeviceMutedOverlayTopY) {
|
||||
overlayDims = 2 * (inputDeviceMutedOverlayTopY - Window.innerHeight / 2 - OUTPUT_DEVICE_MUTED_MARGIN_BOTTOM_PX);
|
||||
}
|
||||
|
@ -473,15 +473,11 @@ function maybeDeleteInputDeviceMutedOverlay() {
|
|||
}
|
||||
|
||||
|
||||
function getInputDeviceMutedOverlayTopY() {
|
||||
return (Window.innerHeight - INPUT_DEVICE_MUTED_OVERLAY_DEFAULT_Y_PX - INPUT_DEVICE_MUTED_MARGIN_BOTTOM_PX);
|
||||
}
|
||||
|
||||
|
||||
var inputDeviceMutedOverlay = false;
|
||||
var INPUT_DEVICE_MUTED_OVERLAY_DEFAULT_X_PX = 353;
|
||||
var INPUT_DEVICE_MUTED_OVERLAY_DEFAULT_Y_PX = 95;
|
||||
var INPUT_DEVICE_MUTED_MARGIN_BOTTOM_PX = 20 + TOP_BAR_HEIGHT_PX;
|
||||
var INPUT_DEVICE_MUTED_OVERLAY_DEFAULT_X_PX = 237;
|
||||
var INPUT_DEVICE_MUTED_OVERLAY_DEFAULT_Y_PX = 64;
|
||||
var INPUT_DEVICE_MUTED_MARGIN_LEFT_PX = 20;
|
||||
var INPUT_DEVICE_MUTED_MARGIN_TOP_PX = 20;
|
||||
function updateInputDeviceMutedOverlay(isMuted) {
|
||||
if (isMuted) {
|
||||
var props = {
|
||||
|
@ -490,8 +486,8 @@ function updateInputDeviceMutedOverlay(isMuted) {
|
|||
};
|
||||
props.width = INPUT_DEVICE_MUTED_OVERLAY_DEFAULT_X_PX;
|
||||
props.height = INPUT_DEVICE_MUTED_OVERLAY_DEFAULT_Y_PX;
|
||||
props.x = Window.innerWidth / 2 - INPUT_DEVICE_MUTED_OVERLAY_DEFAULT_X_PX / 2;
|
||||
props.y = getInputDeviceMutedOverlayTopY();
|
||||
props.x = INPUT_DEVICE_MUTED_MARGIN_LEFT_PX;
|
||||
props.y = INPUT_DEVICE_MUTED_MARGIN_TOP_PX;
|
||||
if (inputDeviceMutedOverlay) {
|
||||
Overlays.editOverlay(inputDeviceMutedOverlay, props);
|
||||
} else {
|
||||
|
|
Loading…
Reference in a new issue