Merge branch 'master' into tablet_home_paginate

This commit is contained in:
vladest 2017-12-08 19:59:41 +01:00
commit 3aaf666295
55 changed files with 1709 additions and 795 deletions

View file

@ -870,8 +870,8 @@ AvatarMixerClientData* AvatarMixer::getOrCreateClientData(SharedNodePointer node
node->setLinkedData(std::unique_ptr<NodeData> { new AvatarMixerClientData(node->getUUID()) });
clientData = dynamic_cast<AvatarMixerClientData*>(node->getLinkedData());
auto& avatar = clientData->getAvatar();
avatar.setDomainMinimumScale(_domainMinimumScale);
avatar.setDomainMaximumScale(_domainMaximumScale);
avatar.setDomainMinimumHeight(_domainMinimumHeight);
avatar.setDomainMaximumHeight(_domainMaximumHeight);
}
return clientData;
@ -939,21 +939,21 @@ void AvatarMixer::parseDomainServerSettings(const QJsonObject& domainSettings) {
const QString AVATARS_SETTINGS_KEY = "avatars";
static const QString MIN_SCALE_OPTION = "min_avatar_scale";
float settingMinScale = domainSettings[AVATARS_SETTINGS_KEY].toObject()[MIN_SCALE_OPTION].toDouble(MIN_AVATAR_SCALE);
_domainMinimumScale = glm::clamp(settingMinScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE);
static const QString MIN_HEIGHT_OPTION = "min_avatar_height";
float settingMinHeight = domainSettings[AVATARS_SETTINGS_KEY].toObject()[MIN_HEIGHT_OPTION].toDouble(MIN_AVATAR_HEIGHT);
_domainMinimumHeight = glm::clamp(settingMinHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
static const QString MAX_SCALE_OPTION = "max_avatar_scale";
float settingMaxScale = domainSettings[AVATARS_SETTINGS_KEY].toObject()[MAX_SCALE_OPTION].toDouble(MAX_AVATAR_SCALE);
_domainMaximumScale = glm::clamp(settingMaxScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE);
static const QString MAX_HEIGHT_OPTION = "max_avatar_height";
float settingMaxHeight = domainSettings[AVATARS_SETTINGS_KEY].toObject()[MAX_HEIGHT_OPTION].toDouble(MAX_AVATAR_HEIGHT);
_domainMaximumHeight = glm::clamp(settingMaxHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
// make sure that the domain owner didn't flip min and max
if (_domainMinimumScale > _domainMaximumScale) {
std::swap(_domainMinimumScale, _domainMaximumScale);
if (_domainMinimumHeight > _domainMaximumHeight) {
std::swap(_domainMinimumHeight, _domainMaximumHeight);
}
qCDebug(avatars) << "This domain requires a minimum avatar scale of" << _domainMinimumScale
<< "and a maximum avatar scale of" << _domainMaximumScale;
qCDebug(avatars) << "This domain requires a minimum avatar height of" << _domainMinimumHeight
<< "and a maximum avatar height of" << _domainMaximumHeight;
const QString AVATAR_WHITELIST_DEFAULT{ "" };
static const QString AVATAR_WHITELIST_OPTION = "avatar_whitelist";

View file

@ -90,8 +90,8 @@ private:
float _maxKbpsPerNode = 0.0f;
float _domainMinimumScale { MIN_AVATAR_SCALE };
float _domainMaximumScale { MAX_AVATAR_SCALE };
float _domainMinimumHeight { MIN_AVATAR_HEIGHT };
float _domainMaximumHeight { MAX_AVATAR_HEIGHT };
RateCounter<> _broadcastRate;
p_high_resolution_clock::time_point _lastDebugMessage;

View file

@ -1,5 +1,5 @@
{
"version": 2.0,
"version": 2.1,
"settings": [
{
"name": "label",
@ -1015,20 +1015,20 @@
"assignment-types": [ 1, 2 ],
"settings": [
{
"name": "min_avatar_scale",
"name": "min_avatar_height",
"type": "double",
"label": "Minimum Avatar Scale",
"help": "Limits the scale of avatars in your domain. Must be at least 0.005.",
"placeholder": 0.25,
"default": 0.25
"label": "Minimum Avatar Height (meters)",
"help": "Limits the height of avatars in your domain. Must be at least 0.009.",
"placeholder": 0.4,
"default": 0.4
},
{
"name": "max_avatar_scale",
"name": "max_avatar_height",
"type": "double",
"label": "Maximum Avatar Scale",
"help": "Limits the scale of avatars in your domain. Cannot be greater than 1000.",
"placeholder": 3.0,
"default": 3.0
"label": "Maximum Avatar Height (meters)",
"help": "Limits the scale of avatars in your domain. Cannot be greater than 1755.",
"placeholder": 5.2,
"default": 5.2
},
{
"name": "avatar_whitelist",

View file

@ -304,6 +304,26 @@ void DomainServerSettingsManager::setupConfigMap(const QStringList& argumentList
*wizardCompletedOnce = QVariant(true);
}
if (oldVersion < 2.1) {
// convert old avatar scale settings into avatar height.
const QString AVATAR_MIN_SCALE_KEYPATH = "avatars.min_avatar_scale";
const QString AVATAR_MAX_SCALE_KEYPATH = "avatars.max_avatar_scale";
const QString AVATAR_MIN_HEIGHT_KEYPATH = "avatars.min_avatar_height";
const QString AVATAR_MAX_HEIGHT_KEYPATH = "avatars.max_avatar_height";
QVariant* avatarMinScale = _configMap.valueForKeyPath(AVATAR_MIN_SCALE_KEYPATH);
if (avatarMinScale) {
float scale = avatarMinScale->toFloat();
_configMap.valueForKeyPath(AVATAR_MIN_HEIGHT_KEYPATH, scale * DEFAULT_AVATAR_HEIGHT);
}
QVariant* avatarMaxScale = _configMap.valueForKeyPath(AVATAR_MAX_SCALE_KEYPATH);
if (avatarMaxScale) {
float scale = avatarMaxScale->toFloat();
_configMap.valueForKeyPath(AVATAR_MAX_HEIGHT_KEYPATH, scale * DEFAULT_AVATAR_HEIGHT);
}
}
// write the current description version to our settings
*versionVariant = _descriptionVersion;

View file

@ -13,11 +13,11 @@
{ "from": "OculusTouch.LY", "to": "Standard.LY",
"filters": [
{ "type": "deadZone", "min": 0.3 },
{ "type": "deadZone", "min": 0.7 },
"invert"
]
},
{ "from": "OculusTouch.LX", "filters": { "type": "deadZone", "min": 0.3 }, "to": "Standard.LX" },
{ "from": "OculusTouch.LX", "filters": { "type": "deadZone", "min": 0.7 }, "to": "Standard.LX" },
{ "from": "OculusTouch.LT", "to": "Standard.LTClick",
"peek": true,
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
@ -29,11 +29,11 @@
{ "from": "OculusTouch.RY", "to": "Standard.RY",
"filters": [
{ "type": "deadZone", "min": 0.3 },
{ "type": "deadZone", "min": 0.7 },
"invert"
]
},
{ "from": "OculusTouch.RX", "filters": { "type": "deadZone", "min": 0.3 }, "to": "Standard.RX" },
{ "from": "OculusTouch.RX", "filters": { "type": "deadZone", "min": 0.7 }, "to": "Standard.RX" },
{ "from": "OculusTouch.RT", "to": "Standard.RTClick",
"peek": true,
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]

View file

@ -0,0 +1,634 @@
//
// AudioScope.qml
//
// Created by Luis Cuenca on 11/22/2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import "styles-uit"
import "controls-uit" as HifiControlsUit
Item {
id: root
width: parent.width
height: parent.height
property var _scopeInputData
property var _scopeOutputLeftData
property var _scopeOutputRightData
property var _triggerInputData
property var _triggerOutputLeftData
property var _triggerOutputRightData
property var _triggerValues: QtObject{
property int x: parent.width/2
property int y: parent.height/3
}
property var _triggered: false
property var _steps
property var _refreshMs: 32
property var _framesPerSecond: AudioScope.getFramesPerSecond()
property var _isFrameUnits: true
property var _holdStart: QtObject{
property int x: 0
property int y: 0
}
property var _holdEnd: QtObject{
property int x: 0
property int y: 0
}
property var _timeBeforeHold: 300
property var _pressedTime: 0
property var _isPressed: false
property var _recOpacity : 0.0
property var _recSign : 0.05
property var _outputLeftState: false
property var _outputRightState: false
property var _wavFilePath: ""
function isHolding() {
return (_pressedTime > _timeBeforeHold);
}
function updateMeasureUnits() {
timeButton.text = _isFrameUnits ? "Display Frames" : "Milliseconds";
fiveLabel.text = _isFrameUnits ? "5" : "" + (Math.round(1000 * 5.0/_framesPerSecond));
twentyLabel.text = _isFrameUnits ? "20" : "" + (Math.round(1000 * 20.0/_framesPerSecond));
fiftyLabel.text = _isFrameUnits ? "50" : "" + (Math.round(1000 * 50.0/_framesPerSecond));
}
function collectScopeData() {
if (inputCh.checked) {
_scopeInputData = AudioScope.scopeInput;
}
if (outputLeftCh.checked) {
_scopeOutputLeftData = AudioScope.scopeOutputLeft;
}
if (outputRightCh.checked) {
_scopeOutputRightData = AudioScope.scopeOutputRight;
}
}
function collectTriggerData() {
if (inputCh.checked) {
_triggerInputData = AudioScope.triggerInput;
}
if (outputLeftCh.checked) {
_triggerOutputLeftData = AudioScope.triggerOutputLeft;
}
if (outputRightCh.checked) {
_triggerOutputRightData = AudioScope.triggerOutputRight;
}
}
function setRecordingLabelOpacity(opacity) {
_recOpacity = opacity;
recCircle.opacity = _recOpacity;
recText.opacity = _recOpacity;
}
function updateRecordingLabel() {
_recOpacity += _recSign;
if (_recOpacity > 1.0 || _recOpacity < 0.0) {
_recOpacity = _recOpacity > 1.0 ? 1.0 : 0.0;
_recSign *= -1;
}
setRecordingLabelOpacity(_recOpacity);
}
function pullFreshValues() {
if (Audio.getRecording()) {
updateRecordingLabel();
}
if (!AudioScope.getPause()) {
if (!_triggered) {
collectScopeData();
}
}
if (inputCh.checked || outputLeftCh.checked || outputRightCh.checked) {
mycanvas.requestPaint();
}
}
function startRecording() {
_wavFilePath = (new Date()).toISOString(); // yyyy-mm-ddThh:mm:ss.sssZ
_wavFilePath = _wavFilePath.replace(/[\-:]|\.\d*Z$/g, "").replace("T", "-") + ".wav";
// Using controller recording default directory
_wavFilePath = Recording.getDefaultRecordingSaveDirectory() + _wavFilePath;
if (!Audio.startRecording(_wavFilePath)) {
Messages.sendMessage("Hifi-Notifications", JSON.stringify({message:"Error creating: "+_wavFilePath}));
updateRecordingUI(false);
}
}
function stopRecording() {
Audio.stopRecording();
setRecordingLabelOpacity(0.0);
Messages.sendMessage("Hifi-Notifications", JSON.stringify({message:"Saved: "+_wavFilePath}));
}
function updateRecordingUI(isRecording) {
if (!isRecording) {
recordButton.text = "Record";
recordButton.color = hifi.buttons.black;
outputLeftCh.checked = _outputLeftState;
outputRightCh.checked = _outputRightState;
} else {
recordButton.text = "Stop";
recordButton.color = hifi.buttons.red;
_outputLeftState = outputLeftCh.checked;
_outputRightState = outputRightCh.checked;
outputLeftCh.checked = true;
outputRightCh.checked = true;
}
}
function toggleRecording() {
if (Audio.getRecording()) {
updateRecordingUI(false);
stopRecording();
} else {
updateRecordingUI(true);
startRecording();
}
}
Timer {
interval: _refreshMs; running: true; repeat: true
onTriggered: pullFreshValues()
}
Canvas {
id: mycanvas
anchors.fill:parent
onPaint: {
function displayMeasureArea(ctx) {
ctx.fillStyle = Qt.rgba(0.1, 0.1, 0.1, 1);
ctx.fillRect(_holdStart.x, 0, _holdEnd.x - _holdStart.x, height);
ctx.lineWidth = "2";
ctx.strokeStyle = "#555555";
ctx.beginPath();
ctx.moveTo(_holdStart.x, 0);
ctx.lineTo(_holdStart.x, height);
ctx.moveTo(_holdEnd.x, 0);
ctx.lineTo(_holdEnd.x, height);
ctx.moveTo(_holdStart.x, _holdStart.y);
ctx.lineTo(_holdEnd.x, _holdStart.y);
ctx.moveTo(_holdEnd.x, _holdEnd.y);
ctx.lineTo(_holdStart.x, _holdEnd.y);
ctx.stroke();
}
function displayTrigger(ctx, lineWidth, color) {
var crossSize = 3;
var holeSize = 2;
ctx.lineWidth = lineWidth;
ctx.strokeStyle = color;
ctx.beginPath();
ctx.moveTo(_triggerValues.x - (crossSize + holeSize), _triggerValues.y);
ctx.lineTo(_triggerValues.x - holeSize, _triggerValues.y);
ctx.moveTo(_triggerValues.x + holeSize, _triggerValues.y);
ctx.lineTo(_triggerValues.x + (crossSize + holeSize), _triggerValues.y);
ctx.moveTo(_triggerValues.x, _triggerValues.y - (crossSize + holeSize));
ctx.lineTo(_triggerValues.x, _triggerValues.y - holeSize);
ctx.moveTo(_triggerValues.x, _triggerValues.y + holeSize);
ctx.lineTo(_triggerValues.x, _triggerValues.y + (crossSize + holeSize));
ctx.stroke();
}
function displayBackground(ctx, datawidth, steps, lineWidth, color) {
var verticalPadding = 100;
ctx.strokeStyle = color;
ctx.lineWidth = lineWidth;
ctx.moveTo(0, height/2);
ctx.lineTo(datawidth, height/2);
var gap = datawidth/steps;
for (var i = 0; i < steps; i++) {
ctx.moveTo(i*gap + 1, verticalPadding);
ctx.lineTo(i*gap + 1, height-verticalPadding);
}
ctx.moveTo(datawidth-1, verticalPadding);
ctx.lineTo(datawidth-1, height-verticalPadding);
ctx.stroke();
}
function drawScope(ctx, data, width, color) {
ctx.beginPath();
ctx.strokeStyle = color;
ctx.lineWidth = width;
var x = 0;
for (var i = 0; i < data.length-1; i++) {
ctx.moveTo(x, data[i] + height/2);
ctx.lineTo(++x, data[i+1] + height/2);
}
ctx.stroke();
}
function getMeasurementText(dist) {
var datasize = _scopeInputData.length;
var value = 0;
if (fiveFrames.checked) {
value = (_isFrameUnits) ? 5.0*dist/datasize : (Math.round(1000 * 5.0/_framesPerSecond))*dist/datasize;
} else if (twentyFrames.checked) {
value = (_isFrameUnits) ? 20.0*dist/datasize : (Math.round(1000 * 20.0/_framesPerSecond))*dist/datasize;
} else if (fiftyFrames.checked) {
value = (_isFrameUnits) ? 50.0*dist/datasize : (Math.round(1000 * 50.0/_framesPerSecond))*dist/datasize;
}
value = Math.abs(Math.round(value*100)/100);
var measureText = "" + value + (_isFrameUnits ? " frames" : " milliseconds");
return measureText;
}
function drawMeasurements(ctx, color) {
ctx.fillStyle = color;
ctx.font = "normal 16px sans-serif";
var fontwidth = 8;
var measureText = getMeasurementText(_holdEnd.x - _holdStart.x);
if (_holdStart.x < _holdEnd.x) {
ctx.fillText("" + height/2 - _holdStart.y, _holdStart.x-40, _holdStart.y);
ctx.fillText("" + height/2 - _holdEnd.y, _holdStart.x-40, _holdEnd.y);
ctx.fillText(measureText, _holdEnd.x+10, _holdEnd.y);
} else {
ctx.fillText("" + height/2 - _holdStart.y, _holdStart.x+10, _holdStart.y);
ctx.fillText("" + height/2 - _holdEnd.y, _holdStart.x+10, _holdEnd.y);
ctx.fillText(measureText, _holdEnd.x-fontwidth*measureText.length, _holdEnd.y);
}
}
var ctx = getContext("2d");
ctx.fillStyle = Qt.rgba(0, 0, 0, 1);
ctx.fillRect(0, 0, width, height);
if (isHolding()) {
displayMeasureArea(ctx);
}
var guideLinesColor = "#555555"
var guideLinesWidth = "1"
displayBackground(ctx, _scopeInputData.length, _steps, guideLinesWidth, guideLinesColor);
var triggerWidth = "3"
var triggerColor = "#EFB400"
if (AudioScope.getAutoTrigger()) {
displayTrigger(ctx, triggerWidth, triggerColor);
}
var scopeWidth = "2"
var scopeInputColor = "#00B4EF"
var scopeOutputLeftColor = "#BB0000"
var scopeOutputRightColor = "#00BB00"
if (!_triggered) {
if (inputCh.checked) {
drawScope(ctx, _scopeInputData, scopeWidth, scopeInputColor);
}
if (outputLeftCh.checked) {
drawScope(ctx, _scopeOutputLeftData, scopeWidth, scopeOutputLeftColor);
}
if (outputRightCh.checked) {
drawScope(ctx, _scopeOutputRightData, scopeWidth, scopeOutputRightColor);
}
} else {
if (inputCh.checked) {
drawScope(ctx, _triggerInputData, scopeWidth, scopeInputColor);
}
if (outputLeftCh.checked) {
drawScope(ctx, _triggerOutputLeftData, scopeWidth, scopeOutputLeftColor);
}
if (outputRightCh.checked) {
drawScope(ctx, _triggerOutputRightData, scopeWidth, scopeOutputRightColor);
}
}
if (isHolding()) {
drawMeasurements(ctx, "#eeeeee");
}
if (_isPressed) {
_pressedTime += _refreshMs;
}
}
}
MouseArea {
id: hitbox
anchors.fill: mycanvas
hoverEnabled: true
onPressed: {
_isPressed = true;
_pressedTime = 0;
_holdStart.x = mouseX;
_holdStart.y = mouseY;
}
onPositionChanged: {
_holdEnd.x = mouseX;
_holdEnd.y = mouseY;
}
onReleased: {
if (!isHolding() && AudioScope.getAutoTrigger()) {
_triggerValues.x = mouseX
_triggerValues.y = mouseY
AudioScope.setTriggerValues(mouseX, mouseY-height/2);
}
_isPressed = false;
_pressedTime = 0;
}
}
HifiControlsUit.CheckBox {
id: activated
boxSize: 20
anchors.top: parent.top;
anchors.left: parent.left;
anchors.topMargin: 8;
anchors.leftMargin: 20;
checked: AudioScope.getVisible();
onCheckedChanged: {
AudioScope.setVisible(checked);
activelabel.text = AudioScope.getVisible() ? "On" : "Off"
}
}
HifiControlsUit.Label {
id: activelabel
text: AudioScope.getVisible() ? "On" : "Off"
anchors.top: activated.top;
anchors.left: activated.right;
}
HifiControlsUit.CheckBox {
id: outputLeftCh
boxSize: 20
text: "Output L"
anchors.horizontalCenter: parent.horizontalCenter;
anchors.top: parent.top;
anchors.topMargin: 8;
onCheckedChanged: {
AudioScope.setServerEcho(outputLeftCh.checked || outputRightCh.checked);
}
}
HifiControlsUit.Label {
text: "Channels";
anchors.horizontalCenter: outputLeftCh.horizontalCenter;
anchors.bottom: outputLeftCh.top;
anchors.bottomMargin: 8;
}
HifiControlsUit.CheckBox {
id: inputCh
boxSize: 20
text: "Input Mono"
anchors.bottom: outputLeftCh.bottom;
anchors.right: outputLeftCh.left;
anchors.rightMargin: 40;
onCheckedChanged: {
AudioScope.setLocalEcho(checked);
}
}
HifiControlsUit.CheckBox {
id: outputRightCh
boxSize: 20
text: "Output R"
anchors.bottom: outputLeftCh.bottom;
anchors.left: outputLeftCh.right;
anchors.leftMargin: 40;
onCheckedChanged: {
AudioScope.setServerEcho(outputLeftCh.checked || outputRightCh.checked);
}
}
HifiControlsUit.Button {
id: recordButton;
text: "Record";
color: hifi.buttons.black;
colorScheme: hifi.colorSchemes.dark;
anchors.right: parent.right;
anchors.bottom: parent.bottom;
anchors.rightMargin: 30;
anchors.bottomMargin: 8;
width: 95;
height: 55;
onClicked: {
toggleRecording();
}
}
HifiControlsUit.Button {
id: pauseButton;
color: hifi.buttons.black;
colorScheme: hifi.colorSchemes.dark;
anchors.right: recordButton.left;
anchors.bottom: parent.bottom;
anchors.rightMargin: 30;
anchors.bottomMargin: 8;
height: 55;
width: 95;
text: " Pause ";
onClicked: {
AudioScope.togglePause();
}
}
HifiControlsUit.CheckBox {
id: twentyFrames
boxSize: 20
anchors.left: parent.horizontalCenter;
anchors.bottom: parent.bottom;
anchors.bottomMargin: 8;
onCheckedChanged: {
if (checked){
fiftyFrames.checked = false;
fiveFrames.checked = false;
AudioScope.selectAudioScopeTwentyFrames();
_steps = 20;
AudioScope.setPause(false);
}
}
}
HifiControlsUit.Label {
id:twentyLabel
anchors.left: twentyFrames.right;
anchors.verticalCenter: twentyFrames.verticalCenter;
}
HifiControlsUit.Button {
id: timeButton;
color: hifi.buttons.black;
colorScheme: hifi.colorSchemes.dark;
text: "Display Frames";
anchors.horizontalCenter: twentyFrames.horizontalCenter;
anchors.bottom: twentyFrames.top;
anchors.bottomMargin: 8;
height: 26;
onClicked: {
_isFrameUnits = !_isFrameUnits;
updateMeasureUnits();
}
}
HifiControlsUit.CheckBox {
id: fiveFrames
boxSize: 20
anchors.horizontalCenter: parent.horizontalCenter;
anchors.bottom: parent.bottom;
anchors.bottomMargin: 8;
anchors.horizontalCenterOffset: -50;
checked: true;
onCheckedChanged: {
if (checked) {
fiftyFrames.checked = false;
twentyFrames.checked = false;
AudioScope.selectAudioScopeFiveFrames();
_steps = 5;
AudioScope.setPause(false);
}
}
}
HifiControlsUit.Label {
id:fiveLabel
anchors.left: fiveFrames.right;
anchors.verticalCenter: fiveFrames.verticalCenter;
}
HifiControlsUit.CheckBox {
id: fiftyFrames
boxSize: 20
anchors.horizontalCenter: parent.horizontalCenter;
anchors.bottom: parent.bottom;
anchors.bottomMargin: 8;
anchors.horizontalCenterOffset: 70;
onCheckedChanged: {
if (checked) {
twentyFrames.checked = false;
fiveFrames.checked = false;
AudioScope.selectAudioScopeFiftyFrames();
_steps = 50;
AudioScope.setPause(false);
}
}
}
HifiControlsUit.Label {
id:fiftyLabel
anchors.left: fiftyFrames.right;
anchors.verticalCenter: fiftyFrames.verticalCenter;
}
HifiControlsUit.Switch {
id: triggerSwitch;
height: 26;
anchors.left: parent.left;
anchors.bottom: parent.bottom;
anchors.leftMargin: 75;
anchors.bottomMargin: 8;
labelTextOff: "Off";
labelTextOn: "On";
onCheckedChanged: {
if (!checked) AudioScope.setPause(false);
AudioScope.setPause(false);
AudioScope.setAutoTrigger(checked);
AudioScope.setTriggerValues(_triggerValues.x, _triggerValues.y-root.height/2);
}
}
HifiControlsUit.Label {
text: "Trigger";
anchors.left: triggerSwitch.left;
anchors.leftMargin: -15;
anchors.bottom: triggerSwitch.top;
}
Rectangle {
id: recordIcon;
width:110;
height:40;
anchors.right: parent.right;
anchors.top: parent.top;
anchors.topMargin: 8;
color: "transparent"
Text {
id: recText
text: "REC"
color: "red"
font.pixelSize: 30;
anchors.left: recCircle.right;
anchors.leftMargin: 10;
opacity: _recOpacity;
y: -8;
}
Rectangle {
id: recCircle;
width: 25;
height: 25;
radius: width*0.5
opacity: _recOpacity;
color: "red";
}
}
Component.onCompleted: {
_steps = AudioScope.getFramesPerScope();
AudioScope.setTriggerValues(_triggerValues.x, _triggerValues.y-root.height/2);
activated.checked = true;
inputCh.checked = true;
updateMeasureUnits();
}
Connections {
target: AudioScope
onPauseChanged: {
if (!AudioScope.getPause()) {
pauseButton.text = "Pause";
pauseButton.color = hifi.buttons.black;
AudioScope.setTriggered(false);
_triggered = false;
} else {
pauseButton.text = "Continue";
pauseButton.color = hifi.buttons.blue;
}
}
onTriggered: {
_triggered = true;
collectTriggerData();
AudioScope.setPause(true);
}
}
}

View file

@ -21,6 +21,8 @@ Item {
signal newViewRequestedCallback(var request)
signal loadingChangedCallback(var loadRequest)
width: parent.width
property bool interactive: false
StylesUIt.HifiConstants {
@ -58,7 +60,8 @@ Item {
WebEngineView {
id: webViewCore
anchors.fill: parent
width: parent.width
height: parent.height
profile: HFWebEngineProfile;
settings.pluginsEnabled: true
@ -91,20 +94,19 @@ Item {
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard, userScript ]
property string newUrl: ""
Component.onCompleted: {
webChannel.registerObject("eventBridge", eventBridge);
webChannel.registerObject("eventBridgeWrapper", eventBridgeWrapper);
// Ensure the JS from the web-engine makes it to our logging
webViewCore.javaScriptConsoleMessage.connect(function(level, message, lineNumber, sourceID) {
console.log("Web Entity JS message: " + sourceID + " " + lineNumber + " " + message);
});
if (webViewCoreUserAgent !== undefined) {
webViewCore.profile.httpUserAgent = webViewCoreUserAgent
} else {
webViewCore.profile.httpUserAgent += " (HighFidelityInterface)";
}
// Ensure the JS from the web-engine makes it to our logging
webViewCore.javaScriptConsoleMessage.connect(function(level, message, lineNumber, sourceID) {
console.log("Web Entity JS message: " + sourceID + " " + lineNumber + " " + message);
});
}
onFeaturePermissionRequested: {

View file

@ -26,6 +26,7 @@ Item {
}
Connections {
id: onAttachmentsChangedConnection
target: MyAvatar
onAttachmentsChanged: reload()
}
@ -34,6 +35,12 @@ Item {
reload()
}
function setAttachmentsVariant(attachments) {
onAttachmentsChangedConnection.enabled = false;
MyAvatar.setAttachmentsVariant(attachments);
onAttachmentsChangedConnection.enabled = true;
}
Column {
width: pane.width
@ -92,11 +99,15 @@ Item {
attachments.splice(index, 1);
listView.model.remove(index, 1);
}
onUpdateAttachment: MyAvatar.setAttachmentsVariant(attachments);
onUpdateAttachment: {
setAttachmentsVariant(attachments);
}
}
}
onCountChanged: MyAvatar.setAttachmentsVariant(attachments);
onCountChanged: {
setAttachmentsVariant(attachments);
}
/*
// DEBUG
@ -220,7 +231,7 @@ Item {
};
attachments.push(template);
listView.model.append({});
MyAvatar.setAttachmentsVariant(attachments);
setAttachmentsVariant(attachments);
}
}
@ -250,7 +261,7 @@ Item {
id: cancelAction
text: "Cancel"
onTriggered: {
MyAvatar.setAttachmentsVariant(originalAttachments);
setAttachmentsVariant(originalAttachments);
closeDialog();
}
}
@ -263,7 +274,7 @@ Item {
console.log("Attachment " + i + ": " + attachments[i]);
}
MyAvatar.setAttachmentsVariant(attachments);
setAttachmentsVariant(attachments);
closeDialog();
}
}

View file

@ -2812,10 +2812,10 @@ static int getEventQueueSize(QThread* thread) {
static void dumpEventQueue(QThread* thread) {
auto threadData = QThreadData::get2(thread);
QMutexLocker locker(&threadData->postEventList.mutex);
qDebug() << "AJT: event list, size =" << threadData->postEventList.size();
qDebug() << "Event list, size =" << threadData->postEventList.size();
for (auto& postEvent : threadData->postEventList) {
QEvent::Type type = (postEvent.event ? postEvent.event->type() : QEvent::None);
qDebug() << "AJT: " << type;
qDebug() << " " << type;
}
}
#endif // DEBUG_EVENT_QUEUE

View file

@ -679,36 +679,16 @@ Menu::Menu() {
});
auto audioIO = DependencyManager::get<AudioClient>();
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoServerAudio, 0, false,
audioIO.data(), SLOT(toggleServerEcho()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoLocalAudio, 0, false,
audioIO.data(), SLOT(toggleLocalEcho()));
addActionToQMenuAndActionHash(audioDebugMenu, MenuOption::MuteEnvironment, 0,
audioIO.data(), SLOT(sendMuteEnvironmentPacket()));
auto scope = DependencyManager::get<AudioScope>();
MenuWrapper* audioScopeMenu = audioDebugMenu->addMenu("Audio Scope");
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScope, Qt::CTRL | Qt::Key_F2, false,
scope.data(), SLOT(toggle()));
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopePause, Qt::CTRL | Qt::SHIFT | Qt::Key_F2, false,
scope.data(), SLOT(togglePause()));
addDisabledActionAndSeparator(audioScopeMenu, "Display Frames");
{
QAction* fiveFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeFiveFrames,
0, true, scope.data(), SLOT(selectAudioScopeFiveFrames()));
QAction* twentyFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeTwentyFrames,
0, false, scope.data(), SLOT(selectAudioScopeTwentyFrames()));
QAction* fiftyFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeFiftyFrames,
0, false, scope.data(), SLOT(selectAudioScopeFiftyFrames()));
QActionGroup* audioScopeFramesGroup = new QActionGroup(audioScopeMenu);
audioScopeFramesGroup->addAction(fiveFrames);
audioScopeFramesGroup->addAction(twentyFrames);
audioScopeFramesGroup->addAction(fiftyFrames);
}
action = addActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioScope);
connect(action, &QAction::triggered, [] {
auto scriptEngines = DependencyManager::get<ScriptEngines>();
QUrl defaultScriptsLoc = PathUtils::defaultScriptsLocation();
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/audio/audioScope.js");
scriptEngines->loadScript(defaultScriptsLoc.toString());
});
// Developer > Physics >>>
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");

View file

@ -9,6 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qvector2d.h>
#include <limits>
#include <AudioClient.h>
@ -21,13 +22,14 @@
#include "AudioScope.h"
static const unsigned int DEFAULT_FRAMES_PER_SCOPE = 5;
static const unsigned int SCOPE_WIDTH = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * DEFAULT_FRAMES_PER_SCOPE;
static const unsigned int MULTIPLIER_SCOPE_HEIGHT = 20;
static const unsigned int SCOPE_HEIGHT = 2 * 15 * MULTIPLIER_SCOPE_HEIGHT;
AudioScope::AudioScope() :
_isEnabled(false),
_isPaused(false),
_isTriggered(false),
_autoTrigger(false),
_scopeInputOffset(0),
_scopeOutputOffset(0),
_framesPerScope(DEFAULT_FRAMES_PER_SCOPE),
@ -43,6 +45,7 @@ AudioScope::AudioScope() :
_outputRightD(DependencyManager::get<GeometryCache>()->allocateID())
{
auto audioIO = DependencyManager::get<AudioClient>();
connect(&audioIO->getReceivedAudioStream(), &MixedProcessedAudioStream::addedSilence,
this, &AudioScope::addStereoSilenceToScope);
connect(&audioIO->getReceivedAudioStream(), &MixedProcessedAudioStream::addedLastFrameRepeatedWithFade,
@ -75,6 +78,18 @@ void AudioScope::selectAudioScopeFiftyFrames() {
reallocateScope(50);
}
void AudioScope::setLocalEcho(bool localEcho) {
DependencyManager::get<AudioClient>()->setLocalEcho(localEcho);
}
void AudioScope::setServerEcho(bool serverEcho) {
DependencyManager::get<AudioClient>()->setServerEcho(serverEcho);
}
float AudioScope::getFramesPerSecond(){
return AudioConstants::NETWORK_FRAMES_PER_SEC;
}
void AudioScope::allocateScope() {
_scopeInputOffset = 0;
_scopeOutputOffset = 0;
@ -108,63 +123,14 @@ void AudioScope::freeScope() {
}
}
void AudioScope::render(RenderArgs* renderArgs, int width, int height) {
if (!_isEnabled) {
return;
}
static const glm::vec4 backgroundColor = { 0.4f, 0.4f, 0.4f, 0.6f };
static const glm::vec4 gridColor = { 0.7f, 0.7f, 0.7f, 1.0f };
static const glm::vec4 inputColor = { 0.3f, 1.0f, 0.3f, 1.0f };
static const glm::vec4 outputLeftColor = { 1.0f, 0.3f, 0.3f, 1.0f };
static const glm::vec4 outputRightColor = { 0.3f, 0.3f, 1.0f, 1.0f };
static const int gridCols = 2;
int gridRows = _framesPerScope;
int x = (width - (int)SCOPE_WIDTH) / 2;
int y = (height - (int)SCOPE_HEIGHT) / 2;
int w = (int)SCOPE_WIDTH;
int h = (int)SCOPE_HEIGHT;
gpu::Batch& batch = *renderArgs->_batch;
auto geometryCache = DependencyManager::get<GeometryCache>();
// Grid uses its own pipeline, so draw it before setting another
const float GRID_EDGE = 0.005f;
geometryCache->renderGrid(batch, glm::vec2(x, y), glm::vec2(x + w, y + h),
gridRows, gridCols, GRID_EDGE, gridColor, true, _audioScopeGrid);
geometryCache->useSimpleDrawPipeline(batch);
auto textureCache = DependencyManager::get<TextureCache>();
batch.setResourceTexture(0, textureCache->getWhiteTexture());
// FIXME - do we really need to reset this here? we know that we're called inside of ApplicationOverlay::renderOverlays
// which already set up our batch for us to have these settings
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, -1000, 1000);
batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform());
batch.resetViewTransform();
geometryCache->renderQuad(batch, x, y, w, h, backgroundColor, _audioScopeBackground);
renderLineStrip(batch, _inputID, inputColor, x, y, _samplesPerScope, _scopeInputOffset, _scopeInput);
renderLineStrip(batch, _outputLeftID, outputLeftColor, x, y, _samplesPerScope, _scopeOutputOffset, _scopeOutputLeft);
renderLineStrip(batch, _outputRightD, outputRightColor, x, y, _samplesPerScope, _scopeOutputOffset, _scopeOutputRight);
}
void AudioScope::renderLineStrip(gpu::Batch& batch, int id, const glm::vec4& color, int x, int y, int n, int offset, const QByteArray* byteArray) {
QVector<int> AudioScope::getScopeVector(const QByteArray* byteArray, int offset) {
int16_t sample;
int16_t* samples = ((int16_t*) byteArray->data()) + offset;
QVector<int> points;
if (!_isEnabled || byteArray == NULL) return points;
int16_t* samples = ((int16_t*)byteArray->data()) + offset;
int numSamplesToAverage = _framesPerScope / DEFAULT_FRAMES_PER_SCOPE;
int count = (n - offset) / numSamplesToAverage;
int remainder = (n - offset) % numSamplesToAverage;
y += SCOPE_HEIGHT / 2;
auto geometryCache = DependencyManager::get<GeometryCache>();
QVector<glm::vec2> points;
int count = (_samplesPerScope - offset) / numSamplesToAverage;
int remainder = (_samplesPerScope - offset) % numSamplesToAverage;
// Compute and draw the sample averages from the offset position
for (int i = count; --i >= 0; ) {
@ -173,7 +139,7 @@ void AudioScope::renderLineStrip(gpu::Batch& batch, int id, const glm::vec4& col
sample += *samples++;
}
sample /= numSamplesToAverage;
points << glm::vec2(x++, y - sample);
points << -sample;
}
// Compute and draw the sample average across the wrap boundary
@ -182,16 +148,17 @@ void AudioScope::renderLineStrip(gpu::Batch& batch, int id, const glm::vec4& col
for (int j = remainder; --j >= 0; ) {
sample += *samples++;
}
samples = (int16_t*) byteArray->data();
samples = (int16_t*)byteArray->data();
for (int j = numSamplesToAverage - remainder; --j >= 0; ) {
sample += *samples++;
}
sample /= numSamplesToAverage;
points << glm::vec2(x++, y - sample);
} else {
samples = (int16_t*) byteArray->data();
points << -sample;
}
else {
samples = (int16_t*)byteArray->data();
}
// Compute and draw the sample average from the beginning to the offset
@ -202,12 +169,51 @@ void AudioScope::renderLineStrip(gpu::Batch& batch, int id, const glm::vec4& col
sample += *samples++;
}
sample /= numSamplesToAverage;
points << glm::vec2(x++, y - sample);
points << -sample;
}
return points;
}
bool AudioScope::shouldTrigger(const QVector<int>& scope) {
int threshold = 4;
if (_autoTrigger && _triggerValues.x < scope.size()) {
for (int i = -4*threshold; i < +4*threshold; i++) {
int idx = _triggerValues.x + i;
idx = (idx < 0) ? 0 : (idx < scope.size() ? idx : scope.size() - 1);
int dif = abs(_triggerValues.y - scope[idx]);
if (dif < threshold) {
return true;
}
}
}
return false;
}
void AudioScope::storeTriggerValues() {
_triggerInputData = _scopeInputData;
_triggerOutputLeftData = _scopeOutputLeftData;
_triggerOutputRightData = _scopeOutputRightData;
_isTriggered = true;
emit triggered();
}
void AudioScope::computeInputData() {
_scopeInputData = getScopeVector(_scopeInput, _scopeInputOffset);
if (shouldTrigger(_scopeInputData)) {
storeTriggerValues();
}
}
void AudioScope::computeOutputData() {
_scopeOutputLeftData = getScopeVector(_scopeOutputLeft, _scopeOutputOffset);
if (shouldTrigger(_scopeOutputLeftData)) {
storeTriggerValues();
}
_scopeOutputRightData = getScopeVector(_scopeOutputRight, _scopeOutputOffset);
if (shouldTrigger(_scopeOutputRightData)) {
storeTriggerValues();
}
geometryCache->updateVertices(id, points, color);
geometryCache->renderVertices(batch, gpu::LINE_STRIP, id);
}
int AudioScope::addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamplesPerChannel,
@ -231,7 +237,7 @@ int AudioScope::addBufferToScope(QByteArray* byteArray, int frameOffset, const i
}
int AudioScope::addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples) {
// Short int pointer to mapped samples in byte array
int16_t* destination = (int16_t*)byteArray->data();
@ -271,6 +277,7 @@ void AudioScope::addStereoSamplesToScope(const QByteArray& samples) {
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, samplesData, samplesPerChannel, 1, AudioConstants::STEREO);
_scopeLastFrame = samples.right(AudioConstants::NETWORK_FRAME_BYTES_STEREO);
computeOutputData();
}
void AudioScope::addLastFrameRepeatedWithFadeToScope(int samplesPerChannel) {
@ -302,4 +309,5 @@ void AudioScope::addInputToScope(const QByteArray& inputSamples) {
_scopeInputOffset = addBufferToScope(_scopeInput, _scopeInputOffset,
reinterpret_cast<const int16_t*>(inputSamples.data()),
inputSamples.size() / sizeof(int16_t), INPUT_AUDIO_CHANNEL, NUM_INPUT_CHANNELS);
computeInputData();
}

View file

@ -24,27 +24,60 @@
class AudioScope : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
Q_PROPERTY(QVector<int> scopeInput READ getScopeInput)
Q_PROPERTY(QVector<int> scopeOutputLeft READ getScopeOutputLeft)
Q_PROPERTY(QVector<int> scopeOutputRight READ getScopeOutputRight)
Q_PROPERTY(QVector<int> triggerInput READ getTriggerInput)
Q_PROPERTY(QVector<int> triggerOutputLeft READ getTriggerOutputLeft)
Q_PROPERTY(QVector<int> triggerOutputRight READ getTriggerOutputRight)
public:
// Audio scope methods for allocation/deallocation
void allocateScope();
void freeScope();
void reallocateScope(int frames);
void render(RenderArgs* renderArgs, int width, int height);
public slots:
void toggle() { setVisible(!_isEnabled); }
void setVisible(bool visible);
bool getVisible() const { return _isEnabled; }
void togglePause() { _isPaused = !_isPaused; }
void setPause(bool paused) { _isPaused = paused; }
void togglePause() { setPause(!_isPaused); }
void setPause(bool paused) { _isPaused = paused; emit pauseChanged(); }
bool getPause() { return _isPaused; }
void toggleTrigger() { _autoTrigger = !_autoTrigger; }
bool getAutoTrigger() { return _autoTrigger; }
void setAutoTrigger(bool autoTrigger) { _isTriggered = false; _autoTrigger = autoTrigger; }
void setTriggerValues(int x, int y) { _triggerValues.x = x; _triggerValues.y = y; }
void setTriggered(bool triggered) { _isTriggered = triggered; }
bool getTriggered() { return _isTriggered; }
float getFramesPerSecond();
int getFramesPerScope() { return _framesPerScope; }
void selectAudioScopeFiveFrames();
void selectAudioScopeTwentyFrames();
void selectAudioScopeFiftyFrames();
QVector<int> getScopeInput() { return _scopeInputData; };
QVector<int> getScopeOutputLeft() { return _scopeOutputLeftData; };
QVector<int> getScopeOutputRight() { return _scopeOutputRightData; };
QVector<int> getTriggerInput() { return _triggerInputData; };
QVector<int> getTriggerOutputLeft() { return _triggerOutputLeftData; };
QVector<int> getTriggerOutputRight() { return _triggerOutputRightData; };
void setLocalEcho(bool serverEcho);
void setServerEcho(bool serverEcho);
signals:
void pauseChanged();
void triggered();
protected:
AudioScope();
@ -55,24 +88,44 @@ private slots:
void addInputToScope(const QByteArray& inputSamples);
private:
// Audio scope methods for rendering
void renderLineStrip(gpu::Batch& batch, int id, const glm::vec4& color, int x, int y, int n, int offset, const QByteArray* byteArray);
// Audio scope methods for data acquisition
int addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamples,
unsigned int sourceChannel, unsigned int sourceNumberOfChannels, float fade = 1.0f);
int addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples);
QVector<int> getScopeVector(const QByteArray* scope, int offset);
bool shouldTrigger(const QVector<int>& scope);
void computeInputData();
void computeOutputData();
void storeTriggerValues();
bool _isEnabled;
bool _isPaused;
bool _isTriggered;
bool _autoTrigger;
int _scopeInputOffset;
int _scopeOutputOffset;
int _framesPerScope;
int _samplesPerScope;
QByteArray* _scopeInput;
QByteArray* _scopeOutputLeft;
QByteArray* _scopeOutputRight;
QByteArray _scopeLastFrame;
QVector<int> _scopeInputData;
QVector<int> _scopeOutputLeftData;
QVector<int> _scopeOutputRightData;
QVector<int> _triggerInputData;
QVector<int> _triggerOutputLeftData;
QVector<int> _triggerOutputRightData;
glm::ivec2 _triggerValues;
int _audioScopeBackground;
int _audioScopeGrid;

View file

@ -114,7 +114,8 @@ MyAvatar::MyAvatar(QThread* thread) :
_skeletonModel = std::make_shared<MySkeletonModel>(this, nullptr);
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
connect(_skeletonModel.get(), &Model::rigReady, this, &Avatar::rigReady);
connect(_skeletonModel.get(), &Model::rigReset, this, &Avatar::rigReset);
using namespace recording;
_skeletonModel->flagAsCauterized();
@ -1516,9 +1517,19 @@ void MyAvatar::updateMotors() {
_characterController.clearMotors();
glm::quat motorRotation;
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
const float FLYING_MOTOR_TIMESCALE = 0.05f;
const float WALKING_MOTOR_TIMESCALE = 0.2f;
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
float horizontalMotorTimescale;
float verticalMotorTimescale;
if (_characterController.getState() == CharacterController::State::Hover ||
_characterController.computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS) {
motorRotation = getMyHead()->getHeadOrientation();
horizontalMotorTimescale = FLYING_MOTOR_TIMESCALE;
verticalMotorTimescale = FLYING_MOTOR_TIMESCALE;
} else {
// non-hovering = walking: follow camera twist about vertical but not lift
// we decompose camera's rotation and store the twist part in motorRotation
@ -1529,11 +1540,12 @@ void MyAvatar::updateMotors() {
glm::quat liftRotation;
swingTwistDecomposition(headOrientation, Vectors::UNIT_Y, liftRotation, motorRotation);
motorRotation = orientation * motorRotation;
horizontalMotorTimescale = WALKING_MOTOR_TIMESCALE;
verticalMotorTimescale = INVALID_MOTOR_TIMESCALE;
}
const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
if (_isPushing || _isBraking || !_isBeingPushed) {
_characterController.addMotor(_actionMotorVelocity, motorRotation, DEFAULT_MOTOR_TIMESCALE, INVALID_MOTOR_TIMESCALE);
_characterController.addMotor(_actionMotorVelocity, motorRotation, horizontalMotorTimescale, verticalMotorTimescale);
} else {
// _isBeingPushed must be true --> disable action motor by giving it a long timescale,
// otherwise it's attempt to "stand in in place" could defeat scripted motor/thrusts
@ -1799,6 +1811,7 @@ void MyAvatar::postUpdate(float deltaTime, const render::ScenePointer& scene) {
_skeletonModel->setCauterizeBoneSet(_headBoneSet);
_fstAnimGraphOverrideUrl = _skeletonModel->getGeometry()->getAnimGraphOverrideUrl();
initAnimGraph();
_isAnimatingScale = true;
}
if (_enableDebugDrawDefaultPose || _enableDebugDrawAnimPose) {
@ -1956,27 +1969,33 @@ void MyAvatar::updateOrientation(float deltaTime) {
// Use head/HMD roll to turn while flying, but not when standing still.
if (qApp->isHMDMode() && getCharacterController()->getState() == CharacterController::State::Hover && _hmdRollControlEnabled && hasDriveInput()) {
// Turn with head roll.
const float MIN_CONTROL_SPEED = 0.01f;
float speed = glm::length(getWorldVelocity());
if (speed >= MIN_CONTROL_SPEED) {
// Feather turn when stopping moving.
float speedFactor;
if (getDriveKey(TRANSLATE_Z) != 0.0f || _lastDrivenSpeed == 0.0f) {
_lastDrivenSpeed = speed;
speedFactor = 1.0f;
} else {
speedFactor = glm::min(speed / _lastDrivenSpeed, 1.0f);
}
const float MIN_CONTROL_SPEED = 2.0f * getSensorToWorldScale(); // meters / sec
const glm::vec3 characterForward = getWorldOrientation() * Vectors::UNIT_NEG_Z;
float forwardSpeed = glm::dot(characterForward, getWorldVelocity());
float direction = glm::dot(getWorldVelocity(), getWorldOrientation() * Vectors::UNIT_NEG_Z) > 0.0f ? 1.0f : -1.0f;
// only enable roll-turns if we are moving forward or backward at greater then MIN_CONTROL_SPEED
if (fabsf(forwardSpeed) >= MIN_CONTROL_SPEED) {
float direction = forwardSpeed > 0.0f ? 1.0f : -1.0f;
float rollAngle = glm::degrees(asinf(glm::dot(IDENTITY_UP, _hmdSensorOrientation * IDENTITY_RIGHT)));
float rollSign = rollAngle < 0.0f ? -1.0f : 1.0f;
rollAngle = fabsf(rollAngle);
rollAngle = rollAngle > _hmdRollControlDeadZone ? rollSign * (rollAngle - _hmdRollControlDeadZone) : 0.0f;
totalBodyYaw += speedFactor * direction * rollAngle * deltaTime * _hmdRollControlRate;
const float MIN_ROLL_ANGLE = _hmdRollControlDeadZone;
const float MAX_ROLL_ANGLE = 90.0f; // degrees
if (rollAngle > MIN_ROLL_ANGLE) {
// rate of turning is linearly proportional to rollAngle
rollAngle = glm::clamp(rollAngle, MIN_ROLL_ANGLE, MAX_ROLL_ANGLE);
// scale rollAngle into a value from zero to one.
float rollFactor = (rollAngle - MIN_ROLL_ANGLE) / (MAX_ROLL_ANGLE - MIN_ROLL_ANGLE);
float angularSpeed = rollSign * rollFactor * _hmdRollControlRate;
totalBodyYaw += direction * angularSpeed * deltaTime;
}
}
}
@ -2022,12 +2041,13 @@ void MyAvatar::updateActionMotor(float deltaTime) {
_isBraking = _wasPushing || (_isBraking && speed > MIN_ACTION_BRAKE_SPEED);
}
CharacterController::State state = _characterController.getState();
// compute action input
glm::vec3 forward = (getDriveKey(TRANSLATE_Z)) * IDENTITY_FORWARD;
glm::vec3 right = (getDriveKey(TRANSLATE_X)) * IDENTITY_RIGHT;
glm::vec3 direction = forward + right;
CharacterController::State state = _characterController.getState();
if (state == CharacterController::State::Hover ||
_characterController.computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS) {
// we can fly --> support vertical motion
@ -2161,41 +2181,6 @@ bool findAvatarAvatarPenetration(const glm::vec3 positionA, float radiusA, float
// target scale to match the new scale they have chosen. When they leave the domain they will not return to the scale they were
// before they entered the limiting domain.
void MyAvatar::clampTargetScaleToDomainLimits() {
// when we're about to change the target scale because the user has asked to increase or decrease their scale,
// we first make sure that we're starting from a target scale that is allowed by the current domain
auto clampedTargetScale = glm::clamp(_targetScale, _domainMinimumScale, _domainMaximumScale);
if (clampedTargetScale != _targetScale) {
qCDebug(interfaceapp, "Clamped scale to %f since original target scale %f was not allowed by domain",
(double)clampedTargetScale, (double)_targetScale);
setTargetScale(clampedTargetScale);
}
}
void MyAvatar::clampScaleChangeToDomainLimits(float desiredScale) {
auto clampedTargetScale = glm::clamp(desiredScale, _domainMinimumScale, _domainMaximumScale);
if (clampedTargetScale != desiredScale) {
qCDebug(interfaceapp, "Forcing scale to %f since %f is not allowed by domain",
clampedTargetScale, desiredScale);
}
setTargetScale(clampedTargetScale);
qCDebug(interfaceapp, "Changed scale to %f", (double)_targetScale);
emit(scaleChanged());
}
float MyAvatar::getDomainMinScale() {
return _domainMinimumScale;
}
float MyAvatar::getDomainMaxScale() {
return _domainMaximumScale;
}
void MyAvatar::setGravity(float gravity) {
_characterController.setGravity(gravity);
}
@ -2205,70 +2190,58 @@ float MyAvatar::getGravity() {
}
void MyAvatar::increaseSize() {
// make sure we're starting from an allowable scale
clampTargetScaleToDomainLimits();
float minScale = getDomainMinScale();
float maxScale = getDomainMaxScale();
// calculate what our new scale should be
float updatedTargetScale = _targetScale * (1.0f + SCALING_RATIO);
float clampedTargetScale = glm::clamp(_targetScale, minScale, maxScale);
float newTargetScale = glm::clamp(clampedTargetScale * (1.0f + SCALING_RATIO), minScale, maxScale);
// attempt to change to desired scale (clamped to the domain limits)
clampScaleChangeToDomainLimits(updatedTargetScale);
setTargetScale(newTargetScale);
}
void MyAvatar::decreaseSize() {
// make sure we're starting from an allowable scale
clampTargetScaleToDomainLimits();
float minScale = getDomainMinScale();
float maxScale = getDomainMaxScale();
// calculate what our new scale should be
float updatedTargetScale = _targetScale * (1.0f - SCALING_RATIO);
float clampedTargetScale = glm::clamp(_targetScale, minScale, maxScale);
float newTargetScale = glm::clamp(clampedTargetScale * (1.0f - SCALING_RATIO), minScale, maxScale);
// attempt to change to desired scale (clamped to the domain limits)
clampScaleChangeToDomainLimits(updatedTargetScale);
setTargetScale(newTargetScale);
}
void MyAvatar::resetSize() {
// attempt to reset avatar size to the default (clamped to domain limits)
const float DEFAULT_AVATAR_SCALE = 1.0f;
clampScaleChangeToDomainLimits(DEFAULT_AVATAR_SCALE);
setTargetScale(DEFAULT_AVATAR_SCALE);
}
void MyAvatar::restrictScaleFromDomainSettings(const QJsonObject& domainSettingsObject) {
// pull out the minimum and maximum scale and set them to restrict our scale
// pull out the minimum and maximum height and set them to restrict our scale
static const QString AVATAR_SETTINGS_KEY = "avatars";
auto avatarsObject = domainSettingsObject[AVATAR_SETTINGS_KEY].toObject();
static const QString MIN_SCALE_OPTION = "min_avatar_scale";
float settingMinScale = avatarsObject[MIN_SCALE_OPTION].toDouble(MIN_AVATAR_SCALE);
setDomainMinimumScale(settingMinScale);
static const QString MIN_HEIGHT_OPTION = "min_avatar_height";
float settingMinHeight = avatarsObject[MIN_HEIGHT_OPTION].toDouble(MIN_AVATAR_HEIGHT);
setDomainMinimumHeight(settingMinHeight);
static const QString MAX_SCALE_OPTION = "max_avatar_scale";
float settingMaxScale = avatarsObject[MAX_SCALE_OPTION].toDouble(MAX_AVATAR_SCALE);
setDomainMaximumScale(settingMaxScale);
static const QString MAX_HEIGHT_OPTION = "max_avatar_height";
float settingMaxHeight = avatarsObject[MAX_HEIGHT_OPTION].toDouble(MAX_AVATAR_HEIGHT);
setDomainMaximumHeight(settingMaxHeight);
// make sure that the domain owner didn't flip min and max
if (_domainMinimumScale > _domainMaximumScale) {
std::swap(_domainMinimumScale, _domainMaximumScale);
if (_domainMinimumHeight > _domainMaximumHeight) {
std::swap(_domainMinimumHeight, _domainMaximumHeight);
}
// Set avatar current scale
Settings settings;
settings.beginGroup("Avatar");
_targetScale = loadSetting(settings, "scale", 1.0f);
qCDebug(interfaceapp) << "This domain requires a minimum avatar scale of " << _domainMinimumScale
<< " and a maximum avatar scale of " << _domainMaximumScale
<< ". Current avatar scale is " << _targetScale;
qCDebug(interfaceapp) << "This domain requires a minimum avatar scale of " << _domainMinimumHeight
<< " and a maximum avatar scale of " << _domainMaximumHeight;
// debug to log if this avatar's scale in this domain will be clamped
float clampedScale = glm::clamp(_targetScale, _domainMinimumScale, _domainMaximumScale);
if (_targetScale != clampedScale) {
qCDebug(interfaceapp) << "Current avatar scale is clamped to " << clampedScale
<< " because " << _targetScale << " is not allowed by current domain";
// The current scale of avatar should not be more than domain's max_avatar_scale and not less than domain's min_avatar_scale .
_targetScale = clampedScale;
}
_isAnimatingScale = true;
setModelScale(_targetScale);
rebuildCollisionShape();
@ -2288,8 +2261,8 @@ void MyAvatar::saveAvatarScale() {
}
void MyAvatar::clearScaleRestriction() {
_domainMinimumScale = MIN_AVATAR_SCALE;
_domainMaximumScale = MAX_AVATAR_SCALE;
_domainMinimumHeight = MIN_AVATAR_HEIGHT;
_domainMaximumHeight = MAX_AVATAR_HEIGHT;
}
void MyAvatar::goToLocation(const QVariant& propertiesVar) {
@ -3248,6 +3221,7 @@ void MyAvatar::setModelScale(float scale) {
if (changed) {
float sensorToWorldScale = getEyeHeight() / getUserEyeHeight();
emit sensorToWorldScaleChanged(sensorToWorldScale);
emit scaleChanged();
}
}

View file

@ -110,6 +110,10 @@ class MyAvatar : public Avatar {
* @property userEyeHeight {number} Estimated height of the users eyes in sensor space. (meters)
* @property SELF_ID {string} READ-ONLY. UUID representing "my avatar". Only use for local-only entities and overlays in situations where MyAvatar.sessionUUID is not available (e.g., if not connected to a domain).
* Note: Likely to be deprecated.
* @property hmdRollControlEnabled {bool} When enabled the roll angle of your HMD will turn your avatar while flying.
* @property hmdRollControlDeadZone {number} If hmdRollControlEnabled is true, this value can be used to tune what roll angle is required to begin turning.
* This angle is specified in degrees.
* @property hmdRollControlRate {number} If hmdRollControlEnabled is true, this value determines the maximum turn rate of your avatar when rolling your HMD in degrees per second.
*/
// FIXME: `glm::vec3 position` is not accessible from QML, so this exposes position in a QML-native type
@ -158,7 +162,7 @@ class MyAvatar : public Avatar {
Q_PROPERTY(float userEyeHeight READ getUserEyeHeight)
Q_PROPERTY(QUuid SELF_ID READ getSelfID CONSTANT)
const QString DOMINANT_LEFT_HAND = "left";
const QString DOMINANT_RIGHT_HAND = "right";
@ -558,8 +562,6 @@ public slots:
void increaseSize();
void decreaseSize();
void resetSize();
float getDomainMinScale();
float getDomainMaxScale();
void setGravity(float gravity);
float getGravity();
@ -737,12 +739,12 @@ private:
bool _clearOverlayWhenMoving { true };
QString _dominantHand { DOMINANT_RIGHT_HAND };
const float ROLL_CONTROL_DEAD_ZONE_DEFAULT = 8.0f; // deg
const float ROLL_CONTROL_RATE_DEFAULT = 2.5f; // deg/sec/deg
const float ROLL_CONTROL_DEAD_ZONE_DEFAULT = 8.0f; // degrees
const float ROLL_CONTROL_RATE_DEFAULT = 114.0f; // degrees / sec
bool _hmdRollControlEnabled { true };
float _hmdRollControlDeadZone { ROLL_CONTROL_DEAD_ZONE_DEFAULT };
float _hmdRollControlRate { ROLL_CONTROL_RATE_DEFAULT };
float _lastDrivenSpeed { 0.0f };
// working copy -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
glm::mat4 _sensorToWorldMatrix { glm::mat4() };

View file

@ -83,19 +83,28 @@ void QmlCommerce::buy(const QString& assetId, int cost, const bool controlledFai
void QmlCommerce::balance() {
auto ledger = DependencyManager::get<Ledger>();
auto wallet = DependencyManager::get<Wallet>();
ledger->balance(wallet->listPublicKeys());
QStringList cachedPublicKeys = wallet->listPublicKeys();
if (!cachedPublicKeys.isEmpty()) {
ledger->balance(cachedPublicKeys);
}
}
void QmlCommerce::inventory() {
auto ledger = DependencyManager::get<Ledger>();
auto wallet = DependencyManager::get<Wallet>();
ledger->inventory(wallet->listPublicKeys());
QStringList cachedPublicKeys = wallet->listPublicKeys();
if (!cachedPublicKeys.isEmpty()) {
ledger->inventory(cachedPublicKeys);
}
}
void QmlCommerce::history() {
auto ledger = DependencyManager::get<Ledger>();
auto wallet = DependencyManager::get<Wallet>();
ledger->history(wallet->listPublicKeys());
QStringList cachedPublicKeys = wallet->listPublicKeys();
if (!cachedPublicKeys.isEmpty()) {
ledger->history(cachedPublicKeys);
}
}
void QmlCommerce::changePassphrase(const QString& oldPassphrase, const QString& newPassphrase) {

View file

@ -84,18 +84,7 @@ glm::vec2 RayPick::projectOntoXYPlane(const glm::vec3& worldPos, const glm::vec3
glm::vec2 RayPick::projectOntoOverlayXYPlane(const QUuid& overlayID, const glm::vec3& worldPos, bool unNormalized) {
glm::vec3 position = vec3FromVariant(qApp->getOverlays().getProperty(overlayID, "position").value);
glm::quat rotation = quatFromVariant(qApp->getOverlays().getProperty(overlayID, "rotation").value);
glm::vec3 dimensions;
float dpi = qApp->getOverlays().getProperty(overlayID, "dpi").value.toFloat();
if (dpi > 0) {
// Calculate physical dimensions for web3d overlay from resolution and dpi; "dimensions" property is used as a scale.
glm::vec3 resolution = glm::vec3(vec2FromVariant(qApp->getOverlays().getProperty(overlayID, "resolution").value), 1);
glm::vec3 scale = glm::vec3(vec2FromVariant(qApp->getOverlays().getProperty(overlayID, "dimensions").value), 0.01f);
const float INCHES_TO_METERS = 1.0f / 39.3701f;
dimensions = (resolution * INCHES_TO_METERS / dpi) * scale;
} else {
dimensions = glm::vec3(vec2FromVariant(qApp->getOverlays().getProperty(overlayID, "dimensions").value), 0.01);
}
glm::vec3 dimensions = glm::vec3(vec2FromVariant(qApp->getOverlays().getProperty(overlayID, "dimensions").value), 0.01f);
return projectOntoXYPlane(worldPos, position, rotation, dimensions, ENTITY_ITEM_DEFAULT_REGISTRATION_POINT, unNormalized);
}

View file

@ -58,6 +58,21 @@ Audio::Audio() : _devices(_contextIsHMD) {
enableNoiseReduction(enableNoiseReductionSetting.get());
}
bool Audio::startRecording(const QString& filepath) {
auto client = DependencyManager::get<AudioClient>().data();
return client->startRecording(filepath);
}
bool Audio::getRecording() {
auto client = DependencyManager::get<AudioClient>().data();
return client->getRecording();
}
void Audio::stopRecording() {
auto client = DependencyManager::get<AudioClient>().data();
client->stopRecording();
}
void Audio::setMuted(bool isMuted) {
if (_isMuted != isMuted) {
auto client = DependencyManager::get<AudioClient>().data();

View file

@ -16,6 +16,7 @@
#include "AudioDevices.h"
#include "AudioEffectOptions.h"
#include "SettingHandle.h"
#include "AudioFileWav.h"
namespace scripting {
@ -55,6 +56,10 @@ public:
Q_INVOKABLE void setReverb(bool enable);
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);
Q_INVOKABLE bool startRecording(const QString& filename);
Q_INVOKABLE void stopRecording();
Q_INVOKABLE bool getRecording();
signals:
void nop();
void mutedChanged(bool isMuted);
@ -83,7 +88,6 @@ private:
bool _isMuted { false };
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
bool _contextIsHMD { false };
AudioDevices* getDevices() { return &_devices; }
AudioDevices _devices;
};

View file

@ -82,7 +82,6 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
// Now render the overlay components together into a single texture
renderDomainConnectionStatusBorder(renderArgs); // renders the connected domain line
renderAudioScope(renderArgs); // audio scope in the very back - NOTE: this is the debug audio scope, not the VU meter
renderOverlays(renderArgs); // renders Scripts Overlay and AudioScope
renderQmlUi(renderArgs); // renders a unit quad with the QML UI texture, and the text overlays from scripts
});
@ -118,25 +117,6 @@ void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
geometryCache->renderUnitQuad(batch, glm::vec4(1), _qmlGeometryId);
}
void ApplicationOverlay::renderAudioScope(RenderArgs* renderArgs) {
PROFILE_RANGE(app, __FUNCTION__);
gpu::Batch& batch = *renderArgs->_batch;
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->useSimpleDrawPipeline(batch);
auto textureCache = DependencyManager::get<TextureCache>();
batch.setResourceTexture(0, textureCache->getWhiteTexture());
int width = renderArgs->_viewport.z;
int height = renderArgs->_viewport.w;
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP);
batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform());
batch.resetViewTransform();
// Render the audio scope
DependencyManager::get<AudioScope>()->render(renderArgs, width, height);
}
void ApplicationOverlay::renderOverlays(RenderArgs* renderArgs) {
PROFILE_RANGE(app, __FUNCTION__);

View file

@ -32,7 +32,6 @@ private:
void renderStatsAndLogs(RenderArgs* renderArgs);
void renderDomainConnectionStatusBorder(RenderArgs* renderArgs);
void renderQmlUi(RenderArgs* renderArgs);
void renderAudioScope(RenderArgs* renderArgs);
void renderOverlays(RenderArgs* renderArgs);
void buildFramebufferObject();

View file

@ -305,13 +305,6 @@ public slots:
OverlayID getKeyboardFocusOverlay();
void setKeyboardFocusOverlay(const OverlayID& id);
void mousePressPointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void mouseMovePointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void mouseReleasePointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void hoverEnterPointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void hoverOverPointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void hoverLeavePointerEvent(const OverlayID& overlayID, const PointerEvent& event);
signals:
/**jsdoc
* Emitted when an overlay is deleted
@ -358,6 +351,14 @@ private:
OverlayID _currentHoverOverOverlayID { UNKNOWN_OVERLAY_ID };
RayToOverlayIntersectionResult findRayIntersectionForMouseEvent(PickRay ray);
private slots:
void mousePressPointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void mouseMovePointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void mouseReleasePointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void hoverEnterPointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void hoverOverPointerEvent(const OverlayID& overlayID, const PointerEvent& event);
void hoverLeavePointerEvent(const OverlayID& overlayID, const PointerEvent& event);
};
#endif // hifi_Overlays_h

View file

@ -55,17 +55,15 @@
#include <plugins/InputConfiguration.h>
#include "ui/Snapshot.h"
#include "SoundCache.h"
#include "raypick/PointerScriptingInterface.h"
static const float DPI = 30.47f;
static const float INCHES_TO_METERS = 1.0f / 39.3701f;
static int MAX_WINDOW_SIZE = 4096;
static const float METERS_TO_INCHES = 39.3701f;
static const float OPAQUE_ALPHA_THRESHOLD = 0.99f;
const QString Web3DOverlay::TYPE = "web3d";
const QString Web3DOverlay::QML = "Web3DOverlay.qml";
Web3DOverlay::Web3DOverlay() : _dpi(DPI) {
Web3DOverlay::Web3DOverlay() {
_touchDevice.setCapabilities(QTouchDevice::Position);
_touchDevice.setType(QTouchDevice::TouchScreen);
_touchDevice.setName("Web3DOverlayTouchDevice");
@ -82,7 +80,6 @@ Web3DOverlay::Web3DOverlay(const Web3DOverlay* Web3DOverlay) :
_url(Web3DOverlay->_url),
_scriptURL(Web3DOverlay->_scriptURL),
_dpi(Web3DOverlay->_dpi),
_resolution(Web3DOverlay->_resolution),
_showKeyboardFocusHighlight(Web3DOverlay->_showKeyboardFocusHighlight)
{
_geometryId = DependencyManager::get<GeometryCache>()->allocateID();
@ -154,7 +151,7 @@ void Web3DOverlay::buildWebSurface() {
setupQmlSurface();
}
_webSurface->getSurfaceContext()->setContextProperty("globalPosition", vec3toVariant(getWorldPosition()));
_webSurface->resize(QSize(_resolution.x, _resolution.y));
onResizeWebSurface();
_webSurface->resume();
});
@ -244,8 +241,16 @@ void Web3DOverlay::setMaxFPS(uint8_t maxFPS) {
}
void Web3DOverlay::onResizeWebSurface() {
_mayNeedResize = false;
_webSurface->resize(QSize(_resolution.x, _resolution.y));
glm::vec2 dims = glm::vec2(getDimensions());
dims *= METERS_TO_INCHES * _dpi;
// ensure no side is never larger then MAX_WINDOW_SIZE
float max = (dims.x > dims.y) ? dims.x : dims.y;
if (max > MAX_WINDOW_SIZE) {
dims *= MAX_WINDOW_SIZE / max;
}
_webSurface->resize(QSize(dims.x, dims.y));
}
unsigned int Web3DOverlay::deviceIdByTouchPoint(qreal x, qreal y) {
@ -266,14 +271,14 @@ void Web3DOverlay::render(RenderArgs* args) {
return;
}
if (_currentMaxFPS != _desiredMaxFPS) {
setMaxFPS(_desiredMaxFPS);
}
if (_mayNeedResize) {
emit resizeWebSurface();
}
if (_currentMaxFPS != _desiredMaxFPS) {
setMaxFPS(_desiredMaxFPS);
}
vec4 color(toGlm(getColor()), getAlpha());
if (!_texture) {
@ -310,7 +315,7 @@ void Web3DOverlay::render(RenderArgs* args) {
Transform Web3DOverlay::evalRenderTransform() {
Transform transform = Parent::evalRenderTransform();
transform.setScale(1.0f);
transform.postScale(glm::vec3(getSize(), 1.0f));
transform.postScale(glm::vec3(getDimensions(), 1.0f));
return transform;
}
@ -434,18 +439,10 @@ void Web3DOverlay::setProperties(const QVariantMap& properties) {
}
}
auto resolution = properties["resolution"];
if (resolution.isValid()) {
bool valid;
auto res = vec2FromVariant(resolution, valid);
if (valid) {
_resolution = res;
}
}
auto dpi = properties["dpi"];
if (dpi.isValid()) {
_dpi = dpi.toFloat();
_mayNeedResize = true;
}
auto maxFPS = properties["maxFPS"];
@ -467,8 +464,6 @@ void Web3DOverlay::setProperties(const QVariantMap& properties) {
_inputMode = Touch;
}
}
_mayNeedResize = true;
}
QVariant Web3DOverlay::getProperty(const QString& property) {
@ -478,9 +473,6 @@ QVariant Web3DOverlay::getProperty(const QString& property) {
if (property == "scriptURL") {
return _scriptURL;
}
if (property == "resolution") {
return vec2toVariant(_resolution);
}
if (property == "dpi") {
return _dpi;
}
@ -536,17 +528,18 @@ void Web3DOverlay::setScriptURL(const QString& scriptURL) {
}
}
glm::vec2 Web3DOverlay::getSize() const {
return _resolution / _dpi * INCHES_TO_METERS * getDimensions();
};
bool Web3DOverlay::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance, BoxFace& face, glm::vec3& surfaceNormal) {
// FIXME - face and surfaceNormal not being returned
glm::vec2 dimensions = getDimensions();
glm::quat rotation = getWorldOrientation();
glm::vec3 position = getWorldPosition();
// Don't call applyTransformTo() or setTransform() here because this code runs too frequently.
// Produce the dimensions of the overlay based on the image's aspect ratio and the overlay's scale.
return findRayRectangleIntersection(origin, direction, getWorldOrientation(), getWorldPosition(), getSize(), distance);
if (findRayRectangleIntersection(origin, direction, rotation, position, dimensions, distance)) {
surfaceNormal = rotation * Vectors::UNIT_Z;
face = glm::dot(surfaceNormal, direction) > 0 ? MIN_Z_FACE : MAX_Z_FACE;
return true;
} else {
return false;
}
}
Web3DOverlay* Web3DOverlay::createClone() const {
@ -555,4 +548,4 @@ Web3DOverlay* Web3DOverlay::createClone() const {
void Web3DOverlay::emitScriptEvent(const QVariant& message) {
QMetaObject::invokeMethod(this, "scriptEventReceived", Q_ARG(QVariant, message));
}
}

View file

@ -52,8 +52,6 @@ public:
void setProperties(const QVariantMap& properties) override;
QVariant getProperty(const QString& property) override;
glm::vec2 getSize() const override;
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance,
BoxFace& face, glm::vec3& surfaceNormal) override;
@ -93,10 +91,9 @@ private:
gpu::TexturePointer _texture;
QString _url;
QString _scriptURL;
float _dpi;
vec2 _resolution{ 640, 480 };
float _dpi { 30.0f };
int _geometryId { 0 };
bool _showKeyboardFocusHighlight{ true };
bool _showKeyboardFocusHighlight { true };
QTouchDevice _touchDevice;

View file

@ -231,6 +231,9 @@ public:
const glm::mat4& getGeometryToRigTransform() const { return _geometryToRigTransform; }
const AnimPose& getModelOffsetPose() const { return _modelOffset; }
const AnimPose& getGeometryOffsetPose() const { return _geometryOffset; }
void setEnableDebugDrawIKTargets(bool enableDebugDrawIKTargets) { _enableDebugDrawIKTargets = enableDebugDrawIKTargets; }
void setEnableDebugDrawIKConstraints(bool enableDebugDrawIKConstraints) { _enableDebugDrawIKConstraints = enableDebugDrawIKConstraints; }
void setEnableDebugDrawIKChains(bool enableDebugDrawIKChains) { _enableDebugDrawIKChains = enableDebugDrawIKChains; }

View file

@ -79,6 +79,7 @@ Setting::Handle<int> staticJitterBufferFrames("staticJitterBufferFrames",
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
Mutex _deviceMutex;
Mutex _recordMutex;
// thread-safe
QList<QAudioDeviceInfo> getAvailableDevices(QAudio::Mode mode) {
@ -222,8 +223,7 @@ AudioClient::AudioClient() :
// initialize wasapi; if getAvailableDevices is called from the CheckDevicesThread before this, it will crash
getAvailableDevices(QAudio::AudioInput);
getAvailableDevices(QAudio::AudioOutput);
// start a thread to detect any device changes
_checkDevicesTimer = new QTimer(this);
connect(_checkDevicesTimer, &QTimer::timeout, [this] {
@ -1845,11 +1845,9 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
qCDebug(audiostream, "Read %d samples from buffer (%d available, %d requested)", networkSamplesPopped, _receivedAudioStream.getSamplesAvailable(), samplesRequested);
AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput();
lastPopOutput.readSamples(scratchBuffer, networkSamplesPopped);
for (int i = 0; i < networkSamplesPopped; i++) {
mixBuffer[i] = convertToFloat(scratchBuffer[i]);
}
samplesRequested = networkSamplesPopped;
}
@ -1911,6 +1909,13 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
bytesWritten = maxSize;
}
// send output buffer for recording
if (_audio->_isRecording) {
Lock lock(_recordMutex);
_audio->_audioFileWav.addRawAudioChunk(reinterpret_cast<char*>(scratchBuffer), bytesWritten);
}
int bytesAudioOutputUnplayed = _audio->_audioOutput->bufferSize() - _audio->_audioOutput->bytesFree();
float msecsAudioOutputUnplayed = bytesAudioOutputUnplayed / (float)_audio->_outputFormat.bytesForDuration(USECS_PER_MSEC);
_audio->_stats.updateOutputMsUnplayed(msecsAudioOutputUnplayed);
@ -1922,6 +1927,22 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
return bytesWritten;
}
bool AudioClient::startRecording(const QString& filepath) {
if (!_audioFileWav.create(_outputFormat, filepath)) {
qDebug() << "Error creating audio file: " + filepath;
return false;
}
_isRecording = true;
return true;
}
void AudioClient::stopRecording() {
if (_isRecording) {
_isRecording = false;
_audioFileWav.close();
}
}
void AudioClient::loadSettings() {
_receivedAudioStream.setDynamicJitterBufferEnabled(dynamicJitterBufferEnabled.get());
_receivedAudioStream.setStaticJitterBufferFrames(staticJitterBufferFrames.get());

View file

@ -47,11 +47,13 @@
#include <AudioConstants.h>
#include <AudioGate.h>
#include <shared/RateCounter.h>
#include <plugins/CodecPlugin.h>
#include "AudioIOStats.h"
#include "AudioFileWav.h"
#ifdef _WIN32
#pragma warning( push )
@ -67,7 +69,6 @@ class QAudioInput;
class QAudioOutput;
class QIODevice;
class Transform;
class NLPacket;
@ -118,6 +119,8 @@ public:
const MixedProcessedAudioStream& getReceivedAudioStream() const { return _receivedAudioStream; }
MixedProcessedAudioStream& getReceivedAudioStream() { return _receivedAudioStream; }
const QAudioFormat& getOutputFormat() const { return _outputFormat; }
float getLastInputLoudness() const { return _lastInputLoudness; } // TODO: relative to noise floor?
float getTimeSinceLastClip() const { return _timeSinceLastClip; }
@ -142,7 +145,7 @@ public:
void setIsPlayingBackRecording(bool isPlayingBackRecording) { _isPlayingBackRecording = isPlayingBackRecording; }
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
bool outputLocalInjector(const AudioInjectorPointer& injector) override;
QAudioDeviceInfo getActiveAudioDevice(QAudio::Mode mode) const;
@ -155,6 +158,13 @@ public:
bool getNamedAudioDeviceForModeExists(QAudio::Mode mode, const QString& deviceName);
void setRecording(bool isRecording) { _isRecording = isRecording; };
bool getRecording() { return _isRecording; };
bool startRecording(const QString& filename);
void stopRecording();
#ifdef Q_OS_WIN
static QString getWinDeviceName(wchar_t* guid);
#endif
@ -184,13 +194,17 @@ public slots:
void toggleMute();
bool isMuted() { return _muted; }
virtual void setIsStereoInput(bool stereo) override;
void setNoiseReduction(bool isNoiseGateEnabled);
bool isNoiseReductionEnabled() const { return _isNoiseGateEnabled; }
bool getLocalEcho() { return _shouldEchoLocally; }
void setLocalEcho(bool localEcho) { _shouldEchoLocally = localEcho; }
void toggleLocalEcho() { _shouldEchoLocally = !_shouldEchoLocally; }
bool getServerEcho() { return _shouldEchoToServer; }
void setServerEcho(bool serverEcho) { _shouldEchoToServer = serverEcho; }
void toggleServerEcho() { _shouldEchoToServer = !_shouldEchoToServer; }
void processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
@ -239,6 +253,8 @@ signals:
void muteEnvironmentRequested(glm::vec3 position, float radius);
void outputBufferReceived(const QByteArray _outputBuffer);
protected:
AudioClient();
~AudioClient();
@ -354,9 +370,8 @@ private:
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
float* _localOutputMixBuffer { NULL };
Mutex _localAudioMutex;
AudioLimiter _audioLimiter;
// Adds Reverb
void configureReverb();
void updateReverbOptions();
@ -391,6 +406,8 @@ private:
QList<QAudioDeviceInfo> _inputDevices;
QList<QAudioDeviceInfo> _outputDevices;
AudioFileWav _audioFileWav;
bool _hasReceivedFirstPacket { false };
QVector<AudioInjectorPointer> _activeLocalAudioInjectors;
@ -412,6 +429,8 @@ private:
QTimer* _checkDevicesTimer { nullptr };
QTimer* _checkPeakValuesTimer { nullptr };
bool _isRecording { false };
};

View file

@ -0,0 +1,69 @@
//
// AudioWavFile.h
// libraries/audio-client/src
//
// Created by Luis Cuenca on 12/1/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AudioFileWav.h"
bool AudioFileWav::create(const QAudioFormat& audioFormat, const QString& filepath) {
if (_file.isOpen()) {
_file.close();
}
_file.setFileName(filepath);
if (!_file.open(QIODevice::WriteOnly)) {
return false;
}
addHeader(audioFormat);
return true;
}
bool AudioFileWav::addRawAudioChunk(char* chunk, int size) {
if (_file.isOpen()) {
QDataStream stream(&_file);
stream.writeRawData(chunk, size);
return true;
}
return false;
}
void AudioFileWav::close() {
QDataStream stream(&_file);
stream.setByteOrder(QDataStream::LittleEndian);
// fill RIFF and size data on header
_file.seek(4);
stream << quint32(_file.size() - 8);
_file.seek(40);
stream << quint32(_file.size() - 44);
_file.close();
}
void AudioFileWav::addHeader(const QAudioFormat& audioFormat) {
QDataStream stream(&_file);
stream.setByteOrder(QDataStream::LittleEndian);
// RIFF
stream.writeRawData("RIFF", 4);
stream << quint32(0);
stream.writeRawData("WAVE", 4);
// Format description PCM = 16
stream.writeRawData("fmt ", 4);
stream << quint32(16);
stream << quint16(1);
stream << quint16(audioFormat.channelCount());
stream << quint32(audioFormat.sampleRate());
stream << quint32(audioFormat.sampleRate() * audioFormat.channelCount() * audioFormat.sampleSize() / 8); // bytes per second
stream << quint16(audioFormat.channelCount() * audioFormat.sampleSize() / 8); // block align
stream << quint16(audioFormat.sampleSize()); // bits Per Sample
// Init data chunck
stream.writeRawData("data", 4);
stream << quint32(0);
}

View file

@ -0,0 +1,34 @@
//
// AudioWavFile.h
// libraries/audio-client/src
//
// Created by Luis Cuenca on 12/1/2017.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioFileWav_h
#define hifi_AudioFileWav_h
#include <QObject>
#include <QFile>
#include <QDataStream>
#include <QVector>
#include <QAudioFormat>
class AudioFileWav : public QObject {
Q_OBJECT
public:
AudioFileWav() {}
bool create(const QAudioFormat& audioFormat, const QString& filepath);
bool addRawAudioChunk(char* chunk, int size);
void close();
private:
void addHeader(const QAudioFormat& audioFormat);
QFile _file;
};
#endif // hifi_AudioFileWav_h

View file

@ -162,6 +162,7 @@ AABox Avatar::getBounds() const {
}
void Avatar::animateScaleChanges(float deltaTime) {
if (_isAnimatingScale) {
float currentScale = getModelScale();
float desiredScale = getDomainLimitedScale();
@ -172,7 +173,7 @@ void Avatar::animateScaleChanges(float deltaTime) {
float animatedScale = (1.0f - blendFactor) * currentScale + blendFactor * desiredScale;
// snap to the end when we get close enough
const float MIN_RELATIVE_ERROR = 0.03f;
const float MIN_RELATIVE_ERROR = 0.001f;
float relativeError = fabsf(desiredScale - currentScale) / desiredScale;
if (relativeError < MIN_RELATIVE_ERROR) {
animatedScale = desiredScale;
@ -698,6 +699,7 @@ void Avatar::fixupModelsInScene(const render::ScenePointer& scene) {
_skeletonModel->removeFromScene(scene, transaction);
_skeletonModel->addToScene(scene, transaction);
canTryFade = true;
_isAnimatingScale = true;
}
for (auto attachmentModel : _attachmentModels) {
if (attachmentModel->isRenderable() && attachmentModel->needsFixupInScene()) {
@ -1195,6 +1197,8 @@ void Avatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
void Avatar::setModelURLFinished(bool success) {
invalidateJointIndicesCache();
_isAnimatingScale = true;
if (!success && _skeletonModelURL != AvatarData::defaultFullAvatarModelUrl()) {
const int MAX_SKELETON_DOWNLOAD_ATTEMPTS = 4; // NOTE: we don't want to be as generous as ResourceCache is, we only want 4 attempts
if (_skeletonModel->getResourceDownloadAttemptsRemaining() <= 0 ||
@ -1213,6 +1217,15 @@ void Avatar::setModelURLFinished(bool success) {
}
}
// rig is ready
void Avatar::rigReady() {
buildUnscaledEyeHeightCache();
}
// rig has been reset.
void Avatar::rigReset() {
clearUnscaledEyeHeightCache();
}
// create new model, can return an instance of a SoftAttachmentModel rather then Model
static std::shared_ptr<Model> allocateAttachmentModel(bool isSoft, const Rig& rigOverride, bool isCauterized) {
@ -1580,53 +1593,91 @@ void Avatar::ensureInScene(AvatarSharedPointer self, const render::ScenePointer&
}
}
// thread-safe
float Avatar::getEyeHeight() const {
return getModelScale() * getUnscaledEyeHeight();
}
if (QThread::currentThread() != thread()) {
float result = DEFAULT_AVATAR_EYE_HEIGHT;
BLOCKING_INVOKE_METHOD(const_cast<Avatar*>(this), "getEyeHeight", Q_RETURN_ARG(float, result));
return result;
// thread-safe
float Avatar::getUnscaledEyeHeight() const {
return _unscaledEyeHeightCache.get();
}
void Avatar::buildUnscaledEyeHeightCache() {
float skeletonHeight = getUnscaledEyeHeightFromSkeleton();
// Sanity check by looking at the model extents.
Extents meshExtents = _skeletonModel->getUnscaledMeshExtents();
float meshHeight = meshExtents.size().y;
// if we determine the mesh is much larger then the skeleton, then we use the mesh height instead.
// This helps prevent absurdly large avatars from exceeding the domain height limit.
const float MESH_SLOP_RATIO = 1.5f;
if (meshHeight > skeletonHeight * MESH_SLOP_RATIO) {
_unscaledEyeHeightCache.set(meshHeight);
} else {
_unscaledEyeHeightCache.set(skeletonHeight);
}
}
void Avatar::clearUnscaledEyeHeightCache() {
_unscaledEyeHeightCache.set(DEFAULT_AVATAR_EYE_HEIGHT);
}
float Avatar::getUnscaledEyeHeightFromSkeleton() const {
// TODO: if performance becomes a concern we can cache this value rather then computing it everytime.
// Makes assumption that the y = 0 plane in geometry is the ground plane.
// We also make that assumption in Rig::computeAvatarBoundingCapsule()
float avatarScale = getModelScale();
if (_skeletonModel) {
auto& rig = _skeletonModel->getRig();
// Normally the model offset transform will contain the avatar scale factor, we explicitly remove it here.
AnimPose modelOffsetWithoutAvatarScale(glm::vec3(1.0f), rig.getModelOffsetPose().rot(), rig.getModelOffsetPose().trans());
AnimPose geomToRigWithoutAvatarScale = modelOffsetWithoutAvatarScale * rig.getGeometryOffsetPose();
// This factor can be used to scale distances in the geometry frame into the unscaled rig frame.
// Typically it will be the unit conversion from cm to m.
float scaleFactor = geomToRigWithoutAvatarScale.scale().x; // in practice this always a uniform scale factor.
int headTopJoint = rig.indexOfJoint("HeadTop_End");
int headJoint = rig.indexOfJoint("Head");
int eyeJoint = rig.indexOfJoint("LeftEye") != -1 ? rig.indexOfJoint("LeftEye") : rig.indexOfJoint("RightEye");
int toeJoint = rig.indexOfJoint("LeftToeBase") != -1 ? rig.indexOfJoint("LeftToeBase") : rig.indexOfJoint("RightToeBase");
// Makes assumption that the y = 0 plane in geometry is the ground plane.
// We also make that assumption in Rig::computeAvatarBoundingCapsule()
const float GROUND_Y = 0.0f;
// Values from the skeleton are in the geometry coordinate frame.
auto skeleton = rig.getAnimSkeleton();
if (eyeJoint >= 0 && toeJoint >= 0) {
// measure from eyes to toes.
float eyeHeight = rig.getAbsoluteDefaultPose(eyeJoint).trans().y - rig.getAbsoluteDefaultPose(toeJoint).trans().y;
return eyeHeight;
// Measure from eyes to toes.
float eyeHeight = skeleton->getAbsoluteDefaultPose(eyeJoint).trans().y - skeleton->getAbsoluteDefaultPose(toeJoint).trans().y;
return scaleFactor * eyeHeight;
} else if (eyeJoint >= 0) {
// measure eyes to y = 0 plane.
float groundHeight = transformPoint(rig.getGeometryToRigTransform(), glm::vec3(0.0f)).y;
float eyeHeight = rig.getAbsoluteDefaultPose(eyeJoint).trans().y - groundHeight;
return eyeHeight;
// Measure Eye joint to y = 0 plane.
float eyeHeight = skeleton->getAbsoluteDefaultPose(eyeJoint).trans().y - GROUND_Y;
return scaleFactor * eyeHeight;
} else if (headTopJoint >= 0 && toeJoint >= 0) {
// measure toe to top of head. Note: default poses already include avatar scale factor
// Measure from ToeBase joint to HeadTop_End joint, then remove forehead distance.
const float ratio = DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD / DEFAULT_AVATAR_HEIGHT;
float height = rig.getAbsoluteDefaultPose(headTopJoint).trans().y - rig.getAbsoluteDefaultPose(toeJoint).trans().y;
return height - height * ratio;
float height = skeleton->getAbsoluteDefaultPose(headTopJoint).trans().y - skeleton->getAbsoluteDefaultPose(toeJoint).trans().y;
return scaleFactor * (height - height * ratio);
} else if (headTopJoint >= 0) {
// Measure from HeadTop_End joint to the ground, then remove forehead distance.
const float ratio = DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD / DEFAULT_AVATAR_HEIGHT;
float groundHeight = transformPoint(rig.getGeometryToRigTransform(), glm::vec3(0.0f)).y;
float headHeight = rig.getAbsoluteDefaultPose(headTopJoint).trans().y - groundHeight;
return headHeight - headHeight * ratio;
float headHeight = skeleton->getAbsoluteDefaultPose(headTopJoint).trans().y - GROUND_Y;
return scaleFactor * (headHeight - headHeight * ratio);
} else if (headJoint >= 0) {
float groundHeight = transformPoint(rig.getGeometryToRigTransform(), glm::vec3(0.0f)).y;
// Measure Head joint to the ground, then add in distance from neck to eye.
const float DEFAULT_AVATAR_NECK_TO_EYE = DEFAULT_AVATAR_NECK_TO_TOP_OF_HEAD - DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD;
const float ratio = DEFAULT_AVATAR_NECK_TO_EYE / DEFAULT_AVATAR_NECK_HEIGHT;
float neckHeight = rig.getAbsoluteDefaultPose(headJoint).trans().y - groundHeight;
return neckHeight + neckHeight * ratio;
float neckHeight = skeleton->getAbsoluteDefaultPose(headJoint).trans().y - GROUND_Y;
return scaleFactor * (neckHeight + neckHeight * ratio);
} else {
return avatarScale * DEFAULT_AVATAR_EYE_HEIGHT;
return DEFAULT_AVATAR_EYE_HEIGHT;
}
} else {
return avatarScale * DEFAULT_AVATAR_EYE_HEIGHT;
return DEFAULT_AVATAR_EYE_HEIGHT;
}
}

View file

@ -255,12 +255,16 @@ public:
bool isFading() const { return _isFading; }
void updateFadingStatus(render::ScenePointer scene);
/**jsdoc
* Provides read only access to the current eye height of the avatar.
* @function Avatar.getEyeHeight
* @returns {number} eye height of avatar in meters
*/
Q_INVOKABLE float getEyeHeight() const;
Q_INVOKABLE virtual float getEyeHeight() const override;
// returns eye height of avatar in meters, ignoring avatar scale.
// if _targetScale is 1 then this will be identical to getEyeHeight.
virtual float getUnscaledEyeHeight() const override;
// returns true, if an acurate eye height estimage can be obtained by inspecting the avatar model skeleton and geometry,
// not all subclasses of AvatarData have access to this data.
virtual bool canMeasureEyeHeight() const override { return true; }
virtual float getModelScale() const { return _modelScale; }
virtual void setModelScale(float scale) { _modelScale = scale; }
@ -276,9 +280,17 @@ public slots:
glm::vec3 getRightPalmPosition() const;
glm::quat getRightPalmRotation() const;
// hooked up to Model::setURLFinished signal
void setModelURLFinished(bool success);
// hooked up to Model::rigReady & rigReset signals
void rigReady();
void rigReset();
protected:
float getUnscaledEyeHeightFromSkeleton() const;
void buildUnscaledEyeHeightCache();
void clearUnscaledEyeHeightCache();
virtual const QString& getSessionDisplayNameForTransport() const override { return _empty; } // Save a tiny bit of bandwidth. Mixer won't look at what we send.
QString _empty{};
virtual void maybeUpdateSessionDisplayNameFromTransport(const QString& sessionDisplayName) override { _sessionDisplayName = sessionDisplayName; } // don't use no-op setter!
@ -349,7 +361,7 @@ protected:
RateCounter<> _skeletonModelSimulationRate;
RateCounter<> _jointDataSimulationRate;
private:
protected:
class AvatarEntityDataHash {
public:
AvatarEntityDataHash(uint32_t h) : hash(h) {};
@ -379,6 +391,8 @@ private:
float _displayNameTargetAlpha { 1.0f };
float _displayNameAlpha { 1.0f };
ThreadSafeValueCache<float> _unscaledEyeHeightCache { DEFAULT_AVATAR_EYE_HEIGHT };
};
#endif // hifi_Avatar_h

View file

@ -13,4 +13,6 @@ OtherAvatar::OtherAvatar(QThread* thread) : Avatar(thread) {
_headData = new Head(this);
_skeletonModel = std::make_shared<SkeletonModel>(this, nullptr);
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
connect(_skeletonModel.get(), &Model::rigReady, this, &Avatar::rigReady);
connect(_skeletonModel.get(), &Model::rigReset, this, &Avatar::rigReset);
}

View file

@ -117,6 +117,55 @@ void AvatarData::setTargetScale(float targetScale) {
}
}
float AvatarData::getDomainLimitedScale() const {
if (canMeasureEyeHeight()) {
const float minScale = getDomainMinScale();
const float maxScale = getDomainMaxScale();
return glm::clamp(_targetScale, minScale, maxScale);
} else {
// We can't make a good estimate.
return _targetScale;
}
}
void AvatarData::setDomainMinimumHeight(float domainMinimumHeight) {
_domainMinimumHeight = glm::clamp(domainMinimumHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
}
void AvatarData::setDomainMaximumHeight(float domainMaximumHeight) {
_domainMaximumHeight = glm::clamp(domainMaximumHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
}
float AvatarData::getDomainMinScale() const {
float unscaledHeight = getUnscaledHeight();
const float EPSILON = 1.0e-4f;
if (unscaledHeight <= EPSILON) {
unscaledHeight = DEFAULT_AVATAR_HEIGHT;
}
return _domainMinimumHeight / unscaledHeight;
}
float AvatarData::getDomainMaxScale() const {
float unscaledHeight = getUnscaledHeight();
const float EPSILON = 1.0e-4f;
if (unscaledHeight <= EPSILON) {
unscaledHeight = DEFAULT_AVATAR_HEIGHT;
}
return _domainMaximumHeight / unscaledHeight;
}
float AvatarData::getUnscaledHeight() const {
const float eyeHeight = getUnscaledEyeHeight();
const float ratio = eyeHeight / DEFAULT_AVATAR_HEIGHT;
return eyeHeight + ratio * DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD;
}
float AvatarData::getHeight() const {
const float eyeHeight = getEyeHeight();
const float ratio = eyeHeight / DEFAULT_AVATAR_HEIGHT;
return eyeHeight + ratio * DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD;
}
glm::vec3 AvatarData::getHandPosition() const {
return getWorldOrientation() * _handPosition + getWorldPosition();
}

View file

@ -35,6 +35,7 @@
#include <QtScript/QScriptValueIterator>
#include <QReadWriteLock>
#include <AvatarConstants.h>
#include <JointData.h>
#include <NLPacket.h>
#include <Node.h>
@ -257,9 +258,6 @@ namespace AvatarDataPacket {
size_t maxJointDataSize(size_t numJoints);
}
static const float MAX_AVATAR_SCALE = 1000.0f;
static const float MIN_AVATAR_SCALE = .005f;
const float MAX_AUDIO_LOUDNESS = 1000.0f; // close enough for mouth animation
const int AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS = 1000;
@ -484,12 +482,52 @@ public:
// Scale
virtual void setTargetScale(float targetScale);
float getDomainLimitedScale() const { return glm::clamp(_targetScale, _domainMinimumScale, _domainMaximumScale); }
float getDomainLimitedScale() const;
void setDomainMinimumScale(float domainMinimumScale)
{ _domainMinimumScale = glm::clamp(domainMinimumScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE); _scaleChanged = usecTimestampNow(); }
void setDomainMaximumScale(float domainMaximumScale)
{ _domainMaximumScale = glm::clamp(domainMaximumScale, MIN_AVATAR_SCALE, MAX_AVATAR_SCALE); _scaleChanged = usecTimestampNow(); }
/**jsdoc
* returns the minimum scale allowed for this avatar in the current domain.
* This value can change as the user changes avatars or when changing domains.
* @function AvatarData.getDomainMinScale
* @returns {number} minimum scale allowed for this avatar in the current domain.
*/
Q_INVOKABLE float getDomainMinScale() const;
/**jsdoc
* returns the maximum scale allowed for this avatar in the current domain.
* This value can change as the user changes avatars or when changing domains.
* @function AvatarData.getDomainMaxScale
* @returns {number} maximum scale allowed for this avatar in the current domain.
*/
Q_INVOKABLE float getDomainMaxScale() const;
// returns eye height of avatar in meters, ignoreing avatar scale.
// if _targetScale is 1 then this will be identical to getEyeHeight;
virtual float getUnscaledEyeHeight() const { return DEFAULT_AVATAR_EYE_HEIGHT; }
// returns true, if an acurate eye height estimage can be obtained by inspecting the avatar model skeleton and geometry,
// not all subclasses of AvatarData have access to this data.
virtual bool canMeasureEyeHeight() const { return false; }
/**jsdoc
* Provides read only access to the current eye height of the avatar.
* This height is only an estimate and might be incorrect for avatars that are missing standard joints.
* @function AvatarData.getEyeHeight
* @returns {number} eye height of avatar in meters
*/
Q_INVOKABLE virtual float getEyeHeight() const { return _targetScale * getUnscaledEyeHeight(); }
/**jsdoc
* Provides read only access to the current height of the avatar.
* This height is only an estimate and might be incorrect for avatars that are missing standard joints.
* @function AvatarData.getHeight
* @returns {number} height of avatar in meters
*/
Q_INVOKABLE virtual float getHeight() const;
float getUnscaledHeight() const;
void setDomainMinimumHeight(float domainMinimumHeight);
void setDomainMaximumHeight(float domainMaximumHeight);
// Hand State
Q_INVOKABLE void setHandState(char s) { _handState = s; }
@ -698,8 +736,8 @@ protected:
// Body scale
float _targetScale;
float _domainMinimumScale { MIN_AVATAR_SCALE };
float _domainMaximumScale { MAX_AVATAR_SCALE };
float _domainMinimumHeight { MIN_AVATAR_HEIGHT };
float _domainMaximumHeight { MAX_AVATAR_HEIGHT };
// Hand state (are we grabbing something or not)
char _handState;

View file

@ -437,9 +437,11 @@ glm::mat4 CompositorHelper::getReticleTransform(const glm::mat4& eyePose, const
} else {
d = glm::normalize(overlaySurfacePoint);
}
reticlePosition = headPosition + (d * getReticleDepth());
// Our sensor to world matrix always has uniform scale
float sensorSpaceReticleDepth = getReticleDepth() / extractScale(_sensorToWorldMatrix).x;
reticlePosition = headPosition + (d * sensorSpaceReticleDepth);
quat reticleOrientation = cancelOutRoll(glm::quat_cast(_currentDisplayPlugin->getHeadPose()));
vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize * getReticleDepth());
vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize * sensorSpaceReticleDepth);
return glm::inverse(eyePose) * createMatFromScaleQuatAndPos(reticleScale, reticleOrientation, reticlePosition);
} else {
static const float CURSOR_PIXEL_SIZE = 32.0f;

View file

@ -1605,6 +1605,48 @@ void EntityItem::setParentID(const QUuid& value) {
if (tree && !oldParentID.isNull()) {
tree->removeFromChildrenOfAvatars(getThisPointer());
}
uint32_t oldParentNoBootstrapping = 0;
uint32_t newParentNoBootstrapping = 0;
if (!value.isNull() && tree) {
EntityItemPointer entity = tree->findEntityByEntityItemID(value);
if (entity) {
newParentNoBootstrapping = entity->getDirtyFlags() & Simulation::NO_BOOTSTRAPPING;
}
}
if (!oldParentID.isNull() && tree) {
EntityItemPointer entity = tree->findEntityByEntityItemID(oldParentID);
if (entity) {
oldParentNoBootstrapping = entity->getDirtyFlags() & Simulation::NO_BOOTSTRAPPING;
}
}
if (!value.isNull() && (value == Physics::getSessionUUID() || value == AVATAR_SELF_ID)) {
newParentNoBootstrapping |= Simulation::NO_BOOTSTRAPPING;
}
if ((bool)(oldParentNoBootstrapping ^ newParentNoBootstrapping)) {
if ((bool)(newParentNoBootstrapping & Simulation::NO_BOOTSTRAPPING)) {
markDirtyFlags(Simulation::NO_BOOTSTRAPPING);
forEachDescendant([&](SpatiallyNestablePointer object) {
if (object->getNestableType() == NestableType::Entity) {
EntityItemPointer entity = std::static_pointer_cast<EntityItem>(object);
entity->markDirtyFlags(Simulation::DIRTY_COLLISION_GROUP | Simulation::NO_BOOTSTRAPPING);
}
});
} else {
clearDirtyFlags(Simulation::NO_BOOTSTRAPPING);
forEachDescendant([&](SpatiallyNestablePointer object) {
if (object->getNestableType() == NestableType::Entity) {
EntityItemPointer entity = std::static_pointer_cast<EntityItem>(object);
entity->markDirtyFlags(Simulation::DIRTY_COLLISION_GROUP);
entity->clearDirtyFlags(Simulation::NO_BOOTSTRAPPING);
}
});
}
}
SpatiallyNestable::setParentID(value);
// children are forced to be kinematic
// may need to not collide with own avatar
@ -1834,39 +1876,8 @@ void EntityItem::computeCollisionGroupAndFinalMask(int16_t& group, int16_t& mask
}
}
if (userMask & USER_COLLISION_GROUP_MY_AVATAR) {
bool iAmHoldingThis = false;
// if this entity is a descendant of MyAvatar, don't collide with MyAvatar. This avoids the
// "bootstrapping" problem where you can shoot yourself across the room by grabbing something
// and holding it against your own avatar.
if (isChildOfMyAvatar()) {
iAmHoldingThis = true;
}
// also, don't bootstrap our own avatar with a hold action
QList<EntityDynamicPointer> holdActions = getActionsOfType(DYNAMIC_TYPE_HOLD);
QList<EntityDynamicPointer>::const_iterator i = holdActions.begin();
while (i != holdActions.end()) {
EntityDynamicPointer action = *i;
if (action->isMine()) {
iAmHoldingThis = true;
break;
}
i++;
}
QList<EntityDynamicPointer> farGrabActions = getActionsOfType(DYNAMIC_TYPE_FAR_GRAB);
i = farGrabActions.begin();
while (i != farGrabActions.end()) {
EntityDynamicPointer action = *i;
if (action->isMine()) {
iAmHoldingThis = true;
break;
}
i++;
}
if (iAmHoldingThis) {
userMask &= ~USER_COLLISION_GROUP_MY_AVATAR;
}
if ((bool)(_dirtyFlags & Simulation::NO_BOOTSTRAPPING)) {
userMask &= ~USER_COLLISION_GROUP_MY_AVATAR;
}
mask = Physics::getDefaultCollisionMask(group) & (int16_t)(userMask);
}
@ -1961,7 +1972,20 @@ bool EntityItem::addActionInternal(EntitySimulationPointer simulation, EntityDyn
if (success) {
_allActionsDataCache = newDataCache;
_dirtyFlags |= Simulation::DIRTY_PHYSICS_ACTIVATION;
_dirtyFlags |= Simulation::DIRTY_COLLISION_GROUP; // may need to not collide with own avatar
auto actionType = action->getType();
if (actionType == DYNAMIC_TYPE_HOLD || actionType == DYNAMIC_TYPE_FAR_GRAB) {
if (!(bool)(_dirtyFlags & Simulation::NO_BOOTSTRAPPING)) {
_dirtyFlags |= Simulation::NO_BOOTSTRAPPING;
_dirtyFlags |= Simulation::DIRTY_COLLISION_GROUP; // may need to not collide with own avatar
forEachDescendant([&](SpatiallyNestablePointer child) {
if (child->getNestableType() == NestableType::Entity) {
EntityItemPointer entity = std::static_pointer_cast<EntityItem>(child);
entity->markDirtyFlags(Simulation::NO_BOOTSTRAPPING | Simulation::DIRTY_COLLISION_GROUP);
}
});
}
}
} else {
qCDebug(entities) << "EntityItem::addActionInternal -- serializeActions failed";
}
@ -2002,6 +2026,29 @@ bool EntityItem::removeAction(EntitySimulationPointer simulation, const QUuid& a
return success;
}
bool EntityItem::stillHasGrabActions() const {
QList<EntityDynamicPointer> holdActions = getActionsOfType(DYNAMIC_TYPE_HOLD);
QList<EntityDynamicPointer>::const_iterator i = holdActions.begin();
while (i != holdActions.end()) {
EntityDynamicPointer action = *i;
if (action->isMine()) {
return true;
}
i++;
}
QList<EntityDynamicPointer> farGrabActions = getActionsOfType(DYNAMIC_TYPE_FAR_GRAB);
i = farGrabActions.begin();
while (i != farGrabActions.end()) {
EntityDynamicPointer action = *i;
if (action->isMine()) {
return true;
}
i++;
}
return false;
}
bool EntityItem::removeActionInternal(const QUuid& actionID, EntitySimulationPointer simulation) {
_previouslyDeletedActions.insert(actionID, usecTimestampNow());
if (_objectActions.contains(actionID)) {
@ -2015,7 +2062,6 @@ bool EntityItem::removeActionInternal(const QUuid& actionID, EntitySimulationPoi
action->setOwnerEntity(nullptr);
action->setIsMine(false);
_objectActions.remove(actionID);
if (simulation) {
action->removeFromSimulation(simulation);
@ -2024,7 +2070,23 @@ bool EntityItem::removeActionInternal(const QUuid& actionID, EntitySimulationPoi
bool success = true;
serializeActions(success, _allActionsDataCache);
_dirtyFlags |= Simulation::DIRTY_PHYSICS_ACTIVATION;
_dirtyFlags |= Simulation::DIRTY_COLLISION_GROUP; // may need to not collide with own avatar
auto removedActionType = action->getType();
if ((removedActionType == DYNAMIC_TYPE_HOLD || removedActionType == DYNAMIC_TYPE_FAR_GRAB) && !stillHasGrabActions()) {
_dirtyFlags &= ~Simulation::NO_BOOTSTRAPPING;
_dirtyFlags |= Simulation::DIRTY_COLLISION_GROUP; // may need to not collide with own avatar
forEachDescendant([&](SpatiallyNestablePointer child) {
if (child->getNestableType() == NestableType::Entity) {
EntityItemPointer entity = std::static_pointer_cast<EntityItem>(child);
entity->markDirtyFlags(Simulation::DIRTY_COLLISION_GROUP);
entity->clearDirtyFlags(Simulation::NO_BOOTSTRAPPING);
}
});
} else {
// NO-OP: we assume NO_BOOTSTRAPPING bits and collision group are correct
// because they should have been set correctly when the action was added
// and/or when children were linked
}
_objectActions.remove(actionID);
setDynamicDataNeedsTransmit(true);
return success;
}

View file

@ -470,6 +470,7 @@ protected:
void setSimulated(bool simulated) { _simulated = simulated; }
const QByteArray getDynamicDataInternal() const;
bool stillHasGrabActions() const;
void setDynamicDataInternal(QByteArray dynamicData);
virtual void dimensionsChanged() override;

View file

@ -2486,7 +2486,7 @@ QByteArray EntityItemProperties::getStaticCertificateJSON() const {
ADD_STRING_PROPERTY(collisionSoundURL, CollisionSoundURL);
ADD_STRING_PROPERTY(compoundShapeURL, CompoundShapeURL);
ADD_INT_PROPERTY(editionNumber, EditionNumber);
ADD_INT_PROPERTY(instanceNumber, EntityInstanceNumber);
ADD_INT_PROPERTY(entityInstanceNumber, EntityInstanceNumber);
ADD_STRING_PROPERTY(itemArtist, ItemArtist);
ADD_STRING_PROPERTY(itemCategories, ItemCategories);
ADD_STRING_PROPERTY(itemDescription, ItemDescription);

View file

@ -27,6 +27,7 @@ namespace Simulation {
const uint32_t DIRTY_PHYSICS_ACTIVATION = 0x0800; // should activate object in physics engine
const uint32_t DIRTY_SIMULATOR_ID = 0x1000; // the simulatorID has changed
const uint32_t DIRTY_SIMULATION_OWNERSHIP_PRIORITY = 0x2000; // our own bid priority has changed
const uint32_t NO_BOOTSTRAPPING = 0x4000;
const uint32_t DIRTY_TRANSFORM = DIRTY_POSITION | DIRTY_ROTATION;
const uint32_t DIRTY_VELOCITIES = DIRTY_LINEAR_VELOCITY | DIRTY_ANGULAR_VELOCITY;

View file

@ -74,6 +74,8 @@ PacketVersion versionForPacketType(PacketType packetType) {
return static_cast<PacketVersion>(AudioVersion::HighDynamicRangeVolume);
case PacketType::ICEPing:
return static_cast<PacketVersion>(IcePingVersion::SendICEPeerID);
case PacketType::DomainSettings:
return 18; // replace min_avatar_scale and max_avatar_scale with min_avatar_height and max_avatar_height
default:
return 17;
}

View file

@ -700,7 +700,7 @@ uint32_t EntityMotionState::getIncomingDirtyFlags() {
void EntityMotionState::clearIncomingDirtyFlags() {
assert(entityTreeIsLocked());
if (_body && _entity) {
_entity->clearDirtyFlags();
_entity->clearDirtyFlags(DIRTY_PHYSICS_FLAGS);
}
}

View file

@ -163,7 +163,7 @@ void Model::setScale(const glm::vec3& scale) {
_scaledToFit = false;
}
const float SCALE_CHANGE_EPSILON = 0.01f;
const float SCALE_CHANGE_EPSILON = 0.001f;
void Model::setScaleInternal(const glm::vec3& scale) {
if (glm::distance(_scale, scale) > SCALE_CHANGE_EPSILON) {
@ -286,6 +286,7 @@ void Model::reset() {
if (isLoaded()) {
const FBXGeometry& geometry = getFBXGeometry();
_rig.reset(geometry);
emit rigReset();
}
}
@ -322,6 +323,7 @@ bool Model::updateGeometry() {
_blendedVertexBuffers.push_back(buffer);
}
needFullUpdate = true;
emit rigReady();
}
return needFullUpdate;
}

View file

@ -204,6 +204,9 @@ public:
/// Returns the extents of the model's mesh
Extents getMeshExtents() const;
/// Returns the unscaled extents of the model's mesh
Extents getUnscaledMeshExtents() const;
void setTranslation(const glm::vec3& translation);
void setRotation(const glm::quat& rotation);
void setTransformNoUpdateRenderItems(const Transform& transform); // temporary HACK
@ -270,15 +273,14 @@ signals:
void setURLFinished(bool success);
void setCollisionModelURLFinished(bool success);
void requestRenderUpdate();
void rigReady();
void rigReset();
protected:
void setBlendshapeCoefficients(const QVector<float>& coefficients) { _blendshapeCoefficients = coefficients; }
const QVector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
/// Returns the unscaled extents of the model's mesh
Extents getUnscaledMeshExtents() const;
/// Clear the joint states
void clearJointState(int index);

View file

@ -12,6 +12,8 @@
#ifndef hifi_AvatarConstants_h
#define hifi_AvatarConstants_h
#include "GLMHelpers.h"
// 50th Percentile Man
const float DEFAULT_AVATAR_HEIGHT = 1.755f; // meters
const float DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD = 0.11f; // meters
@ -52,5 +54,10 @@ const float DEFAULT_AVATAR_JUMP_HEIGHT = (DEFAULT_AVATAR_JUMP_SPEED * DEFAULT_AV
const float DEFAULT_AVATAR_FALL_HEIGHT = 20.0f; // meters
const float DEFAULT_AVATAR_MIN_HOVER_HEIGHT = 2.5f; // meters
static const float MAX_AVATAR_SCALE = 1000.0f;
static const float MIN_AVATAR_SCALE = 0.005f;
static const float MAX_AVATAR_HEIGHT = 1000.0f * DEFAULT_AVATAR_HEIGHT; // meters
static const float MIN_AVATAR_HEIGHT = 0.005f * DEFAULT_AVATAR_HEIGHT; // meters
#endif // hifi_AvatarConstants_h

View file

@ -1149,6 +1149,7 @@ bool OffscreenQmlSurface::handlePointerEvent(const PointerEvent& event, class QT
touchEvent.setTarget(_rootItem);
touchEvent.setTouchPoints(touchPoints);
touchEvent.setTouchPointStates(touchPointStates);
touchEvent.setTimestamp((ulong)QDateTime::currentMSecsSinceEpoch());
touchEvent.ignore();
}

View file

@ -0,0 +1,17 @@
var qml = Script.resourcesPath() + '/qml/AudioScope.qml';
var window = new OverlayWindow({
title: 'Audio Scope',
source: qml,
width: 1200,
height: 500
});
window.closed.connect(function () {
if (Audio.getRecording()) {
Audio.stopRecording();
}
AudioScope.setVisible(false);
AudioScope.setLocalEcho(false);
AudioScope.setServerEcho(false);
AudioScope.selectAudioScopeFiveFrames();
Script.stop();
});

View file

@ -10,7 +10,7 @@
getControllerJointIndex, enableDispatcherModule, disableDispatcherModule,
Messages, makeDispatcherModuleParameters, makeRunningValues, Settings, entityHasActions,
Vec3, Overlays, flatten, Xform, getControllerWorldLocation, ensureDynamic, entityIsCloneable,
cloneEntity, DISPATCHER_PROPERTIES
cloneEntity, DISPATCHER_PROPERTIES, TEAR_AWAY_DISTANCE
*/
Script.include("/~/system/libraries/Xform.js");
@ -138,9 +138,9 @@ EquipHotspotBuddy.prototype.update = function(deltaTime, timestamp, controllerDa
var dimensions;
if (overlayInfoSet.type === "sphere") {
dimensions = overlayInfoSet.hotspot.radius * 2 * overlayInfoSet.currentSize * EQUIP_SPHERE_SCALE_FACTOR;
dimensions = (overlayInfoSet.hotspot.radius / 2) * overlayInfoSet.currentSize * EQUIP_SPHERE_SCALE_FACTOR;
} else {
dimensions = overlayInfoSet.hotspot.radius * 2 * overlayInfoSet.currentSize;
dimensions = (overlayInfoSet.hotspot.radius / 2) * overlayInfoSet.currentSize;
}
overlayInfoSet.overlays.forEach(function(overlay) {
@ -162,7 +162,7 @@ EquipHotspotBuddy.prototype.update = function(deltaTime, timestamp, controllerDa
var ATTACH_POINT_SETTINGS = "io.highfidelity.attachPoints";
var EQUIP_RADIUS = 0.2; // radius used for palm vs equip-hotspot for equipping.
var EQUIP_RADIUS = 1.0; // radius used for palm vs equip-hotspot for equipping.
var HAPTIC_PULSE_STRENGTH = 1.0;
var HAPTIC_PULSE_DURATION = 13.0;
@ -322,7 +322,9 @@ EquipHotspotBuddy.prototype.update = function(deltaTime, timestamp, controllerDa
}
} else {
var wearableProps = getWearableData(props);
var sensorToScaleFactor = MyAvatar.sensorToWorldScale;
if (wearableProps && wearableProps.joints) {
result.push({
key: entityID.toString() + "0",
entityID: entityID,
@ -332,7 +334,7 @@ EquipHotspotBuddy.prototype.update = function(deltaTime, timestamp, controllerDa
z: 0
},
worldPosition: entityXform.pos,
radius: EQUIP_RADIUS,
radius: EQUIP_RADIUS * sensorToScaleFactor,
joints: wearableProps.joints,
modelURL: null,
modelScale: null

View file

@ -12,6 +12,10 @@
(function () {
var dispatcherUtils = Script.require("/~/system/libraries/controllerDispatcherUtils.js");
function clamp(val, min, max) {
return Math.max(min, Math.min(max, val));
}
function ScaleAvatar(hand) {
this.hand = hand;
this.scalingStartAvatarScale = 0;
@ -61,7 +65,7 @@
controllerData.controllerLocations[this.otherHand()].position));
var newAvatarScale = (scalingCurrentDistance / this.scalingStartDistance) * this.scalingStartAvatarScale;
MyAvatar.scale = newAvatarScale;
MyAvatar.scale = clamp(newAvatarScale, MyAvatar.getDomainMinScale(), MyAvatar.getDomainMaxScale());
MyAvatar.scaleChanged();
}
return dispatcherUtils.makeRunningValues(true, [], []);

View file

@ -327,6 +327,19 @@
});
}
// fix for 10108 - marketplace category cannot scroll
function injectAddScrollbarToCategories() {
$('#categories-dropdown').on('show.bs.dropdown', function () {
$('body > div.container').css('display', 'none')
$('#categories-dropdown > ul.dropdown-menu').css({ 'overflow': 'auto', 'height': 'calc(100vh - 110px)' })
});
$('#categories-dropdown').on('hide.bs.dropdown', function () {
$('body > div.container').css('display', '')
$('#categories-dropdown > ul.dropdown-menu').css({ 'overflow': '', 'height': '' })
});
}
function injectHiFiCode() {
if (commerceMode) {
maybeAddLogInButton();
@ -358,6 +371,7 @@
}
injectUnfocusOnSearch();
injectAddScrollbarToCategories();
}
function injectHiFiItemPageCode() {

View file

@ -118,15 +118,16 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location, visible) {
Overlays.deleteOverlay(this.webOverlayID);
}
var WEB_ENTITY_Z_OFFSET = (tabletDepth / 2) * (1 / sensorScaleFactor);
var WEB_ENTITY_Y_OFFSET = 0.004 * (1 / sensorScaleFactor);
var WEB_ENTITY_Z_OFFSET = (tabletDepth / 2.0) / sensorScaleFactor;
var WEB_ENTITY_Y_OFFSET = 0.004;
var screenWidth = 0.82 * tabletWidth;
var screenHeight = 0.81 * tabletHeight;
this.webOverlayID = Overlays.addOverlay("web3d", {
name: "WebTablet Web",
url: url,
localPosition: { x: 0, y: WEB_ENTITY_Y_OFFSET, z: -WEB_ENTITY_Z_OFFSET },
localRotation: Quat.angleAxis(180, Y_AXIS),
resolution: this.getTabletTextureResolution(),
dimensions: {x: screenWidth, y: screenHeight, z: 0.1},
dpi: tabletDpi,
color: { red: 255, green: 255, blue: 255 },
alpha: 1.0,
@ -136,12 +137,15 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location, visible) {
visible: visible
});
var HOME_BUTTON_Y_OFFSET = ((tabletHeight / 2) - (tabletHeight / 20)) * (1 / sensorScaleFactor);
this.homeButtonID = Overlays.addOverlay("sphere", {
var HOME_BUTTON_Y_OFFSET = ((tabletHeight / 2) - (tabletHeight / 20)) * (1 / sensorScaleFactor) - 0.003;
// FIXME: Circle3D overlays currently at the wrong dimensions, so we need to account for that here
var homeButtonDim = 4.0 * tabletScaleFactor / 3.0;
this.homeButtonID = Overlays.addOverlay("circle3d", {
name: "homeButton",
localPosition: {x: -0.001, y: -HOME_BUTTON_Y_OFFSET, z: 0.0},
localRotation: {x: 0, y: 1, z: 0, w: 0},
dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor},
localPosition: { x: 0.0, y: -HOME_BUTTON_Y_OFFSET, z: -WEB_ENTITY_Z_OFFSET },
localRotation: { x: 0, y: 1, z: 0, w: 0},
dimensions: { x: homeButtonDim, y: homeButtonDim, z: homeButtonDim },
solid: true,
alpha: 0.0,
visible: visible,
drawInFront: false,
@ -151,14 +155,14 @@ WebTablet = function (url, width, dpi, hand, clientOnly, location, visible) {
this.homeButtonHighlightID = Overlays.addOverlay("circle3d", {
name: "homeButtonHighlight",
localPosition: { x: 0, y: -HOME_BUTTON_Y_OFFSET + 0.003, z: -0.0158 },
localPosition: { x: 0, y: -HOME_BUTTON_Y_OFFSET, z: -WEB_ENTITY_Z_OFFSET },
localRotation: { x: 0, y: 1, z: 0, w: 0 },
dimensions: { x: 4 * tabletScaleFactor, y: 4 * tabletScaleFactor, z: 4 * tabletScaleFactor },
dimensions: { x: homeButtonDim, y: homeButtonDim, z: homeButtonDim },
color: { red: 255, green: 255, blue: 255 },
solid: true,
innerRadius: 0.9,
ignoreIntersection: true,
alpha: 1.0,
color: { red: 255, green: 255, blue: 255 },
visible: visible,
drawInFront: false,
parentID: this.tabletEntityID,
@ -265,11 +269,16 @@ WebTablet.prototype.setLandscape = function(newLandscapeValue) {
this.landscape = newLandscapeValue;
Overlays.editOverlay(this.tabletEntityID,
{ rotation: this.landscape ? Quat.multiply(Camera.orientation, ROT_LANDSCAPE) :
Quat.multiply(Camera.orientation, ROT_Y_180) });
{ rotation: Quat.multiply(Camera.orientation, this.landscape ? ROT_LANDSCAPE : ROT_Y_180) });
var tabletWidth = getTabletWidthFromSettings() * MyAvatar.sensorToWorldScale;
var tabletScaleFactor = tabletWidth / TABLET_NATURAL_DIMENSIONS.x;
var tabletHeight = TABLET_NATURAL_DIMENSIONS.y * tabletScaleFactor;
var screenWidth = 0.82 * tabletWidth;
var screenHeight = 0.81 * tabletHeight;
Overlays.editOverlay(this.webOverlayID, {
resolution: this.getTabletTextureResolution(),
rotation: Quat.multiply(Camera.orientation, ROT_LANDSCAPE_WINDOW)
rotation: Quat.multiply(Camera.orientation, ROT_LANDSCAPE_WINDOW),
dimensions: {x: this.landscape ? screenHeight : screenWidth, y: this.landscape ? screenWidth : screenHeight, z: 0.1}
});
};
@ -505,31 +514,17 @@ WebTablet.prototype.getPosition = function () {
};
WebTablet.prototype.mousePressEvent = function (event) {
var pickRay = Camera.computePickRay(event.x, event.y);
var entityPickResults;
entityPickResults = Overlays.findRayIntersection(pickRay, true, [this.tabletEntityID]);
if (entityPickResults.intersects && (entityPickResults.entityID === this.tabletEntityID ||
entityPickResults.overlayID === this.tabletEntityID)) {
var overlayPickResults = Overlays.findRayIntersection(pickRay, true, [this.webOverlayID, this.homeButtonID], []);
if (overlayPickResults.intersects && overlayPickResults.overlayID === this.homeButtonID) {
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
var onHomeScreen = tablet.onHomeScreen();
var isMessageOpen = tablet.isMessageDialogOpen();
if (onHomeScreen) {
if (isMessageOpen === false) {
HMD.closeTablet();
}
} else {
if (isMessageOpen === false) {
tablet.gotoHomeScreen();
this.setHomeButtonTexture();
}
if (!HMD.active) {
var pickRay = Camera.computePickRay(event.x, event.y);
var tabletBackPickResults = Overlays.findRayIntersection(pickRay, true, [this.tabletEntityID]);
if (tabletBackPickResults.intersects) {
var overlayPickResults = Overlays.findRayIntersection(pickRay, true, [this.webOverlayID, this.homeButtonID]);
if (!overlayPickResults.intersects) {
this.dragging = true;
var invCameraXform = new Xform(Camera.orientation, Camera.position).inv();
this.initialLocalIntersectionPoint = invCameraXform.xformPoint(tabletBackPickResults.intersection);
this.initialLocalPosition = Overlays.getProperty(this.tabletEntityID, "localPosition");
}
} else if (!HMD.active && (!overlayPickResults.intersects || overlayPickResults.overlayID !== this.webOverlayID)) {
this.dragging = true;
var invCameraXform = new Xform(Camera.orientation, Camera.position).inv();
this.initialLocalIntersectionPoint = invCameraXform.xformPoint(entityPickResults.intersection);
this.initialLocalPosition = Overlays.getProperty(this.tabletEntityID, "localPosition");
}
}
};

View file

@ -272,22 +272,8 @@ projectOntoEntityXYPlane = function (entityID, worldPos, props) {
projectOntoOverlayXYPlane = function projectOntoOverlayXYPlane(overlayID, worldPos) {
var position = Overlays.getProperty(overlayID, "position");
var rotation = Overlays.getProperty(overlayID, "rotation");
var dimensions;
var dpi = Overlays.getProperty(overlayID, "dpi");
if (dpi) {
// Calculate physical dimensions for web3d overlay from resolution and dpi; "dimensions" property is used as a scale.
var resolution = Overlays.getProperty(overlayID, "resolution");
resolution.z = 1; // Circumvent divide-by-zero.
var scale = Overlays.getProperty(overlayID, "dimensions");
scale.z = 0.01; // overlay dimensions are 2D, not 3D.
dimensions = Vec3.multiplyVbyV(Vec3.multiply(resolution, INCHES_TO_METERS / dpi), scale);
} else {
dimensions = Overlays.getProperty(overlayID, "dimensions");
if (dimensions.z) {
dimensions.z = 0.01; // overlay dimensions are 2D, not 3D.
}
}
var dimensions = Overlays.getProperty(overlayID, "dimensions");
dimensions.z = 0.01; // we are projecting onto the XY plane of the overlay, so ignore the z dimension
return projectOntoXYPlane(worldPos, position, rotation, dimensions, DEFAULT_REGISTRATION_POINT);
};

View file

@ -1031,6 +1031,71 @@ SelectionDisplay = (function() {
that.updateHandles();
};
// Function: Calculate New Bound Extremes
// uses dot product to discover new top and bottom on the new referential (max and min)
that.calculateNewBoundExtremes = function(boundPointList, referenceVector) {
if (boundPointList.length < 2) {
return [null, null];
}
var refMax = boundPointList[0];
var refMin = boundPointList[1];
var dotMax = Vec3.dot(boundPointList[0], referenceVector);
var dotMin = Vec3.dot(boundPointList[1], referenceVector);
if (dotMin > dotMax) {
dotMax = dotMin;
dotMin = Vec3.dot(boundPointList[0], referenceVector);
refMax = boundPointList[1];
refMin = boundPointList[0];
}
for (var i = 2; i < boundPointList.length ; i++) {
var dotAux = Vec3.dot(boundPointList[i], referenceVector);
if (dotAux > dotMax) {
dotMax = dotAux;
refMax = boundPointList[i];
} else if (dotAux < dotMin) {
dotMin = dotAux;
refMin = boundPointList[i];
}
}
return [refMin, refMax];
}
// Function: Project Bounding Box Points
// Projects all 6 bounding box points: Top, Bottom, Left, Right, Near, Far (assumes center 0,0,0) onto
// one of the basis of the new avatar referencial
// dimensions - dimensions of the AABB (axis aligned bounding box) on the standard basis
// [1, 0, 0], [0, 1, 0], [0, 0, 1]
// v - projection vector
// rotateHandleOffset - offset for the rotation handle gizmo position
that.projectBoundingBoxPoints = function(dimensions, v, rotateHandleOffset) {
var projT_v = Vec3.dot(Vec3.multiply((dimensions.y / 2) + rotateHandleOffset, Vec3.UNIT_Y), v);
projT_v = Vec3.multiply(projT_v, v);
var projB_v = Vec3.dot(Vec3.multiply(-(dimensions.y / 2) - rotateHandleOffset, Vec3.UNIT_Y), v);
projB_v = Vec3.multiply(projB_v, v);
var projL_v = Vec3.dot(Vec3.multiply((dimensions.x / 2) + rotateHandleOffset, Vec3.UNIT_X), v);
projL_v = Vec3.multiply(projL_v, v);
var projR_v = Vec3.dot(Vec3.multiply(-1.0 * (dimensions.x / 2) - 1.0 * rotateHandleOffset, Vec3.UNIT_X), v);
projR_v = Vec3.multiply(projR_v, v);
var projN_v = Vec3.dot(Vec3.multiply((dimensions.z / 2) + rotateHandleOffset, Vec3.FRONT), v);
projN_v = Vec3.multiply(projN_v, v);
var projF_v = Vec3.dot(Vec3.multiply(-1.0 * (dimensions.z / 2) - 1.0 * rotateHandleOffset, Vec3.FRONT), v);
projF_v = Vec3.multiply(projF_v, v);
var projList = [projT_v, projB_v, projL_v, projR_v, projN_v, projF_v];
return that.calculateNewBoundExtremes(projList, v);
};
// FUNCTION: UPDATE ROTATION HANDLES
that.updateRotationHandles = function() {
var diagonal = (Vec3.length(SelectionManager.worldDimensions) / 2) * 1.1;
@ -1043,10 +1108,10 @@ SelectionDisplay = (function() {
} else {
outerAlpha = 0.5;
}
// prev 0.05
var rotateHandleOffset = 0.05;
var top, far, left, bottom, near, right, boundsCenter, objectCenter, BLN, BRN, BLF, TLN, TRN, TLF, TRF;
var boundsCenter, objectCenter;
var dimensions, rotation;
if (spaceMode === SPACE_LOCAL) {
@ -1058,280 +1123,66 @@ SelectionDisplay = (function() {
dimensions = SelectionManager.worldDimensions;
var position = objectCenter;
top = objectCenter.y + (dimensions.y / 2);
far = objectCenter.z + (dimensions.z / 2);
left = objectCenter.x + (dimensions.x / 2);
bottom = objectCenter.y - (dimensions.y / 2);
near = objectCenter.z - (dimensions.z / 2);
right = objectCenter.x - (dimensions.x / 2);
boundsCenter = objectCenter;
var yawCorner;
var pitchCorner;
var rollCorner;
// determine which bottom corner we are closest to
/*------------------------------
example:
BRF +--------+ BLF
| |
| |
BRN +--------+ BLN
*
------------------------------*/
var cameraPosition = Camera.getPosition();
if (cameraPosition.x > objectCenter.x) {
// must be BRF or BRN
if (cameraPosition.z < objectCenter.z) {
var look = Vec3.normalize(Vec3.subtract(cameraPosition, objectCenter));
yawHandleRotation = Quat.fromVec3Degrees({
x: 270,
y: 90,
z: 0
});
pitchHandleRotation = Quat.fromVec3Degrees({
x: 0,
y: 90,
z: 0
});
rollHandleRotation = Quat.fromVec3Degrees({
x: 0,
y: 0,
z: 0
});
// place yaw, pitch and roll rotations on the avatar referential
var avatarReferential = Quat.multiply(MyAvatar.orientation, Quat.fromVec3Degrees({
x: 0,
y: 180,
z: 0
}));
var upVector = Quat.getUp(avatarReferential);
var rightVector = Vec3.multiply(-1, Quat.getRight(avatarReferential));
var frontVector = Quat.getFront(avatarReferential);
// project all 6 bounding box points: Top, Bottom, Left, Right, Near, Far (assumes center 0,0,0)
// onto the new avatar referential
yawCorner = {
x: left + rotateHandleOffset,
y: bottom - rotateHandleOffset,
z: near - rotateHandleOffset
};
pitchCorner = {
x: right - rotateHandleOffset,
y: top + rotateHandleOffset,
z: near - rotateHandleOffset
};
rollCorner = {
x: left + rotateHandleOffset,
y: top + rotateHandleOffset,
z: far + rotateHandleOffset
};
yawCenter = {
x: boundsCenter.x,
y: bottom,
z: boundsCenter.z
};
pitchCenter = {
x: right,
y: boundsCenter.y,
z: boundsCenter.z
};
rollCenter = {
x: boundsCenter.x,
y: boundsCenter.y,
z: far
};
Overlays.editOverlay(pitchHandle, {
url: ROTATE_ARROW_WEST_SOUTH_URL
});
Overlays.editOverlay(rollHandle, {
url: ROTATE_ARROW_WEST_SOUTH_URL
});
} else {
yawHandleRotation = Quat.fromVec3Degrees({
x: 270,
y: 0,
z: 0
});
pitchHandleRotation = Quat.fromVec3Degrees({
x: 180,
y: 270,
z: 0
});
rollHandleRotation = Quat.fromVec3Degrees({
x: 0,
y: 0,
z: 90
});
yawCorner = {
x: left + rotateHandleOffset,
y: bottom - rotateHandleOffset,
z: far + rotateHandleOffset
};
pitchCorner = {
x: right - rotateHandleOffset,
y: top + rotateHandleOffset,
z: far + rotateHandleOffset
};
rollCorner = {
x: left + rotateHandleOffset,
y: top + rotateHandleOffset,
z: near - rotateHandleOffset
};
yawCenter = {
x: boundsCenter.x,
y: bottom,
z: boundsCenter.z
};
pitchCenter = {
x: right,
y: boundsCenter.y,
z: boundsCenter.z
};
rollCenter = {
x: boundsCenter.x,
y: boundsCenter.y,
z: near
};
Overlays.editOverlay(pitchHandle, {
url: ROTATE_ARROW_WEST_NORTH_URL
});
Overlays.editOverlay(rollHandle, {
url: ROTATE_ARROW_WEST_NORTH_URL
});
}
} else {
// must be BLF or BLN
if (cameraPosition.z < objectCenter.z) {
yawHandleRotation = Quat.fromVec3Degrees({
x: 270,
y: 180,
z: 0
});
pitchHandleRotation = Quat.fromVec3Degrees({
x: 90,
y: 0,
z: 90
});
rollHandleRotation = Quat.fromVec3Degrees({
x: 0,
y: 0,
z: 180
});
yawCorner = {
x: right - rotateHandleOffset,
y: bottom - rotateHandleOffset,
z: near - rotateHandleOffset
};
pitchCorner = {
x: left + rotateHandleOffset,
y: top + rotateHandleOffset,
z: near - rotateHandleOffset
};
rollCorner = {
x: right - rotateHandleOffset,
y: top + rotateHandleOffset,
z: far + rotateHandleOffset
};
yawCenter = {
x: boundsCenter.x,
y: bottom,
z: boundsCenter.z
};
pitchCenter = {
x: left,
y: boundsCenter.y,
z: boundsCenter.z
};
rollCenter = {
x: boundsCenter.x,
y: boundsCenter.y,
z: far
};
Overlays.editOverlay(pitchHandle, {
url: ROTATE_ARROW_WEST_NORTH_URL
});
Overlays.editOverlay(rollHandle, {
url: ROTATE_ARROW_WEST_NORTH_URL
});
} else {
yawHandleRotation = Quat.fromVec3Degrees({
x: 270,
y: 270,
z: 0
});
pitchHandleRotation = Quat.fromVec3Degrees({
x: 180,
y: 270,
z: 0
});
rollHandleRotation = Quat.fromVec3Degrees({
x: 0,
y: 0,
z: 180
});
yawCorner = {
x: right - rotateHandleOffset,
y: bottom - rotateHandleOffset,
z: far + rotateHandleOffset
};
rollCorner = {
x: right - rotateHandleOffset,
y: top + rotateHandleOffset,
z: near - rotateHandleOffset
};
pitchCorner = {
x: left + rotateHandleOffset,
y: top + rotateHandleOffset,
z: far + rotateHandleOffset
};
yawCenter = {
x: boundsCenter.x,
y: bottom,
z: boundsCenter.z
};
rollCenter = {
x: boundsCenter.x,
y: boundsCenter.y,
z: near
};
pitchCenter = {
x: left,
y: boundsCenter.y,
z: boundsCenter.z
};
Overlays.editOverlay(pitchHandle, {
url: ROTATE_ARROW_WEST_NORTH_URL
});
Overlays.editOverlay(rollHandle, {
url: ROTATE_ARROW_WEST_NORTH_URL
});
}
}
// UP
var projUP = that.projectBoundingBoxPoints(dimensions, upVector, rotateHandleOffset);
// RIGHT
var projRIGHT = that.projectBoundingBoxPoints(dimensions, rightVector, rotateHandleOffset);
// FRONT
var projFRONT = that.projectBoundingBoxPoints(dimensions, frontVector, rotateHandleOffset);
// YAW
yawCenter = Vec3.sum(boundsCenter, projUP[0]);
yawCorner = Vec3.sum(boundsCenter, Vec3.sum(Vec3.sum(projUP[0], projRIGHT[1]), projFRONT[1]));
yawHandleRotation = Quat.lookAt(
yawCorner,
Vec3.sum(yawCorner, upVector),
Vec3.subtract(yawCenter,yawCorner));
yawHandleRotation = Quat.multiply(Quat.angleAxis(45, upVector), yawHandleRotation);
// PTCH
pitchCorner = Vec3.sum(boundsCenter, Vec3.sum(Vec3.sum(projUP[1], projRIGHT[0]), projFRONT[1]));
pitchCenter = Vec3.sum(boundsCenter, projRIGHT[0]);
pitchHandleRotation = Quat.lookAt(
pitchCorner,
Vec3.sum(pitchCorner, rightVector),
Vec3.subtract(pitchCenter,pitchCorner));
pitchHandleRotation = Quat.multiply(Quat.angleAxis(45, rightVector), pitchHandleRotation);
// ROLL
rollCorner = Vec3.sum(boundsCenter, Vec3.sum(Vec3.sum(projUP[1], projRIGHT[1]), projFRONT[0]));
rollCenter = Vec3.sum(boundsCenter, projFRONT[0]);
rollHandleRotation = Quat.lookAt(
rollCorner,
Vec3.sum(rollCorner, frontVector),
Vec3.subtract(rollCenter,rollCorner));
rollHandleRotation = Quat.multiply(Quat.angleAxis(45, frontVector), rollHandleRotation);
var rotateHandlesVisible = true;
var rotationOverlaysVisible = false;
@ -1382,6 +1233,8 @@ SelectionDisplay = (function() {
position: rollCorner,
rotation: rollHandleRotation
});
};
// FUNCTION: UPDATE HANDLE SIZES
@ -3422,7 +3275,7 @@ SelectionDisplay = (function() {
y: innerRadius * ROTATION_DISPLAY_SIZE_Y_MULTIPLIER
},
lineHeight: innerRadius * ROTATION_DISPLAY_LINE_HEIGHT_MULTIPLIER,
text: normalizeDegrees(angleFromZero) + "°"
text: normalizeDegrees(-angleFromZero) + "°"
};
if (wantDebug) {
print(" TranslatedPos: " + position.x + ", " + position.y + ", " + position.z);
@ -3483,6 +3336,13 @@ SelectionDisplay = (function() {
initialPosition = SelectionManager.worldPosition;
rotationNormal = { x: 0, y: 0, z: 0 };
rotationNormal[rotAroundAxis] = 1;
//get the correct axis according to the avatar referencial
var avatarReferential = Quat.multiply(MyAvatar.orientation, Quat.fromVec3Degrees({
x: 0,
y: 0,
z: 0
}));
rotationNormal = Vec3.multiplyQbyV(avatarReferential, rotationNormal);
// Size the overlays to the current selection size
var diagonal = (Vec3.length(SelectionManager.worldDimensions) / 2) * 1.1;
@ -3584,11 +3444,11 @@ SelectionDisplay = (function() {
var snapAngle = snapToInner ? innerSnapAngle : 1.0;
angleFromZero = Math.floor(angleFromZero / snapAngle) * snapAngle;
var vec3Degrees = { x: 0, y: 0, z: 0 };
vec3Degrees[rotAroundAxis] = angleFromZero;
var rotChange = Quat.fromVec3Degrees(vec3Degrees);
var rotChange = Quat.angleAxis(angleFromZero, rotationNormal);
updateSelectionsRotation(rotChange);
//present angle in avatar referencial
angleFromZero = -angleFromZero;
updateRotationDegreesOverlay(angleFromZero, handleRotation, rotCenter);
// update the rotation display accordingly...

View file

@ -169,32 +169,12 @@ function calculateTouchTargetFromOverlay(touchTip, overlayID) {
// calclulate normalized position
var invRot = Quat.inverse(overlayRotation);
var localPos = Vec3.multiplyQbyV(invRot, Vec3.subtract(position, overlayPosition));
var dpi = Overlays.getProperty(overlayID, "dpi");
var dimensions;
if (dpi) {
// Calculate physical dimensions for web3d overlay from resolution and dpi; "dimensions" property
// is used as a scale.
var resolution = Overlays.getProperty(overlayID, "resolution");
if (resolution === undefined) {
return;
}
resolution.z = 1; // Circumvent divide-by-zero.
var scale = Overlays.getProperty(overlayID, "dimensions");
if (scale === undefined) {
return;
}
scale.z = 0.01; // overlay dimensions are 2D, not 3D.
dimensions = Vec3.multiplyVbyV(Vec3.multiply(resolution, INCHES_TO_METERS / dpi), scale);
} else {
dimensions = Overlays.getProperty(overlayID, "dimensions");
if (dimensions === undefined) {
return;
}
if (!dimensions.z) {
dimensions.z = 0.01; // sometimes overlay dimensions are 2D, not 3D.
}
var dimensions = Overlays.getProperty(overlayID, "dimensions");
if (dimensions === undefined) {
return;
}
dimensions.z = 0.01; // we are projecting onto the XY plane of the overlay, so ignore the z dimension
var invDimensions = { x: 1 / dimensions.x, y: 1 / dimensions.y, z: 1 / dimensions.z };
var normalizedPosition = Vec3.sum(Vec3.multiplyVbyV(localPos, invDimensions), DEFAULT_REGISTRATION_POINT);

View file

@ -185,7 +185,7 @@ logTrace = function(str) {
// (the vector that would move the point outside the sphere)
// otherwise returns false
findSphereHit = function(point, sphereRadius) {
var EPSILON = 0.000001; //smallish positive number - used as margin of error for some computations
var EPSILON = 0.000001; //smallish positive number - used as margin of error for some computations
var vectorLength = Vec3.length(point);
if (vectorLength < EPSILON) {
return true;
@ -400,25 +400,28 @@ resizeTablet = function (width, newParentJointIndex, sensorToWorldScaleOverride)
});
// update webOverlay
var WEB_ENTITY_Z_OFFSET = (tabletDepth / 2) * sensorScaleOffsetOverride;
var WEB_ENTITY_Y_OFFSET = 0.004 * sensorScaleOffsetOverride;
var WEB_ENTITY_Z_OFFSET = (tabletDepth / 2.0) * sensorScaleOffsetOverride;
var WEB_ENTITY_Y_OFFSET = 0.004 * sensorScaleFactor * sensorScaleOffsetOverride;
var screenWidth = 0.82 * tabletWidth;
var screenHeight = 0.81 * tabletHeight;
var landscape = Tablet.getTablet("com.highfidelity.interface.tablet.system").landscape;
Overlays.editOverlay(HMD.tabletScreenID, {
localPosition: { x: 0, y: WEB_ENTITY_Y_OFFSET, z: -WEB_ENTITY_Z_OFFSET },
dimensions: {x: landscape ? screenHeight : screenWidth, y: landscape ? screenWidth : screenHeight, z: 0.1},
dpi: tabletDpi
});
// update homeButton
var HOME_BUTTON_Y_OFFSET = ((tabletHeight / 2) - (tabletHeight / 20)) * sensorScaleOffsetOverride;
var homeButtonDim = 4 * tabletScaleFactor;
var HOME_BUTTON_Y_OFFSET = ((tabletHeight / 2) - (tabletHeight / 20) - 0.003 * sensorScaleFactor) * sensorScaleOffsetOverride;
// FIXME: Circle3D overlays currently at the wrong dimensions, so we need to account for that here
var homeButtonDim = 4.0 * tabletScaleFactor / 3.0;
Overlays.editOverlay(HMD.homeButtonID, {
localPosition: {x: -0.001, y: -HOME_BUTTON_Y_OFFSET, z: 0.0},
dimensions: { x: homeButtonDim, y: homeButtonDim, z: homeButtonDim}
localPosition: { x: 0, y: -HOME_BUTTON_Y_OFFSET, z: -WEB_ENTITY_Z_OFFSET },
dimensions: { x: homeButtonDim, y: homeButtonDim, z: homeButtonDim }
});
// Circle3D overlays render at 1.5x their proper dimensions
var highlightDim = homeButtonDim / 3.0;
Overlays.editOverlay(HMD.homeButtonHighlightID, {
localPosition: { x: 0, y: -HOME_BUTTON_Y_OFFSET + 0.003, z: -0.0158 },
dimensions: { x: highlightDim, y: highlightDim, z: highlightDim }
localPosition: { x: 0, y: -HOME_BUTTON_Y_OFFSET, z: -WEB_ENTITY_Z_OFFSET },
dimensions: { x: homeButtonDim, y: homeButtonDim, z: homeButtonDim }
});
};

View file

@ -428,8 +428,10 @@
tablet.pushOntoStack(MARKETPLACE_PURCHASES_QML_PATH);
break;
case 'checkout_itemLinkClicked':
case 'checkout_continueShopping':
tablet.gotoWebScreen(MARKETPLACE_URL + '/items/' + message.itemId, MARKETPLACES_INJECT_SCRIPT_URL);
break;
case 'checkout_continueShopping':
tablet.gotoWebScreen(MARKETPLACE_URL_INITIAL, MARKETPLACES_INJECT_SCRIPT_URL);
//tablet.popFromStack();
break;
case 'purchases_itemInfoClicked':

View file

@ -47,7 +47,7 @@
}
return false;
}
if (Overlays.getProperty(HMD.homeButtonID, "type") != "sphere" ||
if (Overlays.getProperty(HMD.homeButtonID, "type") != "circle3d" ||
Overlays.getProperty(HMD.tabletScreenID, "type") != "web3d") {
if (debugTablet) {
print("TABLET is invalid due to other");