mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-09 01:07:09 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into depthReticleWork
Conflicts: interface/src/ui/ApplicationCompositor.h
This commit is contained in:
commit
19d0968ccb
39 changed files with 571 additions and 464 deletions
94
examples/audioExamples/injectorLoadTest.js
Normal file
94
examples/audioExamples/injectorLoadTest.js
Normal file
|
@ -0,0 +1,94 @@
|
|||
//
|
||||
// injectorLoadTest.js
|
||||
// audio
|
||||
//
|
||||
// Created by Eric Levin 2/1/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
|
||||
// This script tests what happens when many audio injectors are created and played
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
|
||||
Script.include("../libraries/utils.js");
|
||||
|
||||
|
||||
var numSoundsToPlayPerBatch = 35 // Number of simultaneously playing sounds
|
||||
var numSoundsPlaying = 0;
|
||||
var timeBetweenBatch = 30000;
|
||||
// A green box represents an injector that is playing
|
||||
|
||||
var basePosition = {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
};
|
||||
|
||||
var soundBoxes = [];
|
||||
|
||||
var testSound = SoundCache.getSound("https://s3-us-west-1.amazonaws.com/hifi-content/eric/Sounds/dove.wav");
|
||||
var totalInjectors = 0;
|
||||
var clipDuration;
|
||||
|
||||
if(!testSound.downloaded) {
|
||||
|
||||
print("SOUND IS NOT READY YET")
|
||||
testSound.ready.connect(function() {
|
||||
playSounds();
|
||||
clipDuration = testSound.duration;
|
||||
});
|
||||
} else {
|
||||
// otherwise play sounds right away
|
||||
playSounds();
|
||||
clipDuration = testSound.duration;
|
||||
}
|
||||
|
||||
function playSounds() {
|
||||
print("PLAY SOUNDS!")
|
||||
for (var i = 0; i < numSoundsToPlayPerBatch; i++) {
|
||||
playSound();
|
||||
}
|
||||
|
||||
Script.setTimeout(function() {
|
||||
numSoundsPlaying = 0;
|
||||
}, clipDuration);
|
||||
print("EBL Total Number of Injectors: " + totalInjectors);
|
||||
|
||||
Script.setTimeout(function() {
|
||||
playSounds();
|
||||
}, timeBetweenBatch);
|
||||
}
|
||||
|
||||
|
||||
function playSound() {
|
||||
var position = Vec3.sum(basePosition, {x: randFloat(-.1, .1), y: randFloat(-1, 1), z: randFloat(-3, -.1)});
|
||||
var injector = Audio.playSound(testSound, {
|
||||
position: position,
|
||||
volume: 0.2
|
||||
});
|
||||
|
||||
numSoundsPlaying++;
|
||||
print("NUM SOUNDS PLAYING: " + numSoundsPlaying);
|
||||
print("*******************************************");
|
||||
print("INJECTOR VALUE: ")
|
||||
print(JSON.stringify(injector));
|
||||
totalInjectors++;
|
||||
|
||||
var soundBox = Entities.addEntity({
|
||||
type: "Box",
|
||||
name: "Debug Sound Box",
|
||||
color: {red: 200, green: 10, blue: 200},
|
||||
dimensions: {x: 0.1, y: 0.1, z: 0.1},
|
||||
position: position
|
||||
});
|
||||
|
||||
soundBoxes.push(soundBox);
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
soundBoxes.forEach( function(soundBox) {
|
||||
Entities.deleteEntity(soundBox);
|
||||
});
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
|
@ -1237,9 +1237,9 @@
|
|||
<div class="property">
|
||||
<div class="label">Position</div>
|
||||
<div class="value">
|
||||
<div class="input-area ">X<input class="coord" type='number' id="property-pos-x" step="0.1"><div class="prop-x"></div></div>
|
||||
<div class="input-area ">Y<input class="coord" type='number' id="property-pos-y" step="0.1"><div class="prop-y"></div></div>
|
||||
<div class="input-area ">Z<input class="coord" type='number' id="property-pos-z" step="0.1"><div class="prop-z"></div></div>
|
||||
<div class="input-area ">X<input class="coord" type='number' id="property-pos-x"><div class="prop-x"></div></div>
|
||||
<div class="input-area ">Y<input class="coord" type='number' id="property-pos-y"><div class="prop-y"></div></div>
|
||||
<div class="input-area ">Z<input class="coord" type='number' id="property-pos-z" ><div class="prop-z"></div></div>
|
||||
<div>
|
||||
<input type="button" id="move-selection-to-grid" value="Selection to Grid">
|
||||
<input type="button" id="move-all-to-grid" value="All to Grid">
|
||||
|
|
|
@ -14,9 +14,6 @@ Render.RenderShadowTask.enabled = true;
|
|||
var RDT = Render.RenderDeferredTask;
|
||||
RDT.AmbientOcclusion.enabled = true;
|
||||
RDT.DebugDeferredBuffer.enabled = false;
|
||||
["DrawOpaqueDeferred", "DrawTransparentDeferred", "DrawOverlay3DOpaque", "DrawOverlay3DTransparent"]
|
||||
.map(function(name) { return RDT[name]; })
|
||||
.forEach(function(job) { job.maxDrawn = job.numDrawn; });
|
||||
|
||||
// Set up the qml ui
|
||||
var qml = Script.resolvePath('main.qml');
|
||||
|
@ -39,4 +36,4 @@ function setDebugBufferSize(x) {
|
|||
Render.RenderDeferredTask.DebugDeferredBuffer.size = {x: x, y: -1, z: 1, w: 1};
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(function() { Render.fromJSON(oldConfig); } );
|
||||
Script.scriptEnding.connect(function() { Render.load(oldConfig); } );
|
||||
|
|
|
@ -12,98 +12,109 @@ import QtQuick 2.5
|
|||
import QtQuick.Controls 1.4
|
||||
|
||||
Column {
|
||||
spacing: 8
|
||||
|
||||
Repeater {
|
||||
model: [ "Opaque:DrawOpaqueDeferred", "Transparent:DrawTransparentDeferred",
|
||||
"Opaque Overlays:DrawOverlay3DOpaque", "Transparent Overlays:DrawOverlay3DTransparent" ]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: true
|
||||
config: Render.getConfig(modelData.split(":")[1])
|
||||
property: "maxDrawn"
|
||||
max: config.numDrawn
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
CheckBox {
|
||||
text: qsTr("Display Status")
|
||||
onCheckedChanged: { Render.getConfig("DrawStatus").showDisplay = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: qsTr("Network/Physics Status")
|
||||
onCheckedChanged: { Render.getConfig("DrawStatus").showNetwork = checked }
|
||||
}
|
||||
}
|
||||
|
||||
ConfigSlider {
|
||||
label: qsTr("Tone Mapping Exposure")
|
||||
config: Render.getConfig("ToneMapping")
|
||||
property: "exposure"
|
||||
min: -10; max: 10
|
||||
id: root
|
||||
spacing: 16
|
||||
Switch {
|
||||
checked: true
|
||||
onClicked: ui.visible = checked
|
||||
}
|
||||
|
||||
Column {
|
||||
id: ambientOcclusion
|
||||
property var config: Render.getConfig("AmbientOcclusion")
|
||||
id: ui
|
||||
spacing: 8
|
||||
|
||||
Label { text: qsTr("Ambient Occlusion") }
|
||||
// TODO: Add gpuTimer
|
||||
CheckBox { text: qsTr("Dithering"); checked: ambientOcclusion.config.ditheringEnabled }
|
||||
Repeater {
|
||||
model: [
|
||||
"Resolution Level:resolutionLevel:4",
|
||||
"Obscurance Level:obscuranceLevel:1",
|
||||
"Radius:radius:2",
|
||||
"Falloff Bias:falloffBias:0.2",
|
||||
"Edge Sharpness:edgeSharpness:1",
|
||||
"Blur Radius:blurRadius:6",
|
||||
"Blur Deviation:blurDeviation:3"
|
||||
]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
config: ambientOcclusion.config
|
||||
property: modelData.split(":")[1]
|
||||
max: modelData.split(":")[2]
|
||||
}
|
||||
}
|
||||
Repeater {
|
||||
model: [
|
||||
"Samples:numSamples:32",
|
||||
"Spiral Turns:numSpiralTurns:30:"
|
||||
]
|
||||
model: [ "Opaque:DrawOpaqueDeferred", "Transparent:DrawTransparentDeferred",
|
||||
"Opaque Overlays:DrawOverlay3DOpaque", "Transparent Overlays:DrawOverlay3DTransparent" ]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: true
|
||||
config: ambientOcclusion.config
|
||||
property: modelData.split(":")[1]
|
||||
max: modelData.split(":")[2]
|
||||
config: Render.getConfig(modelData.split(":")[1])
|
||||
property: "maxDrawn"
|
||||
max: config.numDrawn
|
||||
min: -1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Column {
|
||||
id: debug
|
||||
property var config: Render.getConfig("DebugDeferredBuffer")
|
||||
|
||||
function setDebugMode(mode) {
|
||||
debug.config.enabled = (mode != 0);
|
||||
debug.config.mode = mode;
|
||||
Row {
|
||||
CheckBox {
|
||||
text: qsTr("Display Status")
|
||||
onCheckedChanged: { Render.getConfig("DrawStatus").showDisplay = checked }
|
||||
}
|
||||
CheckBox {
|
||||
text: qsTr("Network/Physics Status")
|
||||
onCheckedChanged: { Render.getConfig("DrawStatus").showNetwork = checked }
|
||||
}
|
||||
}
|
||||
|
||||
Label { text: qsTr("Debug Buffer") }
|
||||
ExclusiveGroup { id: bufferGroup }
|
||||
Repeater {
|
||||
model: [
|
||||
"Off", "Diffuse", "Metallic", "Roughness", "Normal", "Depth",
|
||||
"Lighting", "Shadow", "Pyramid Depth", "Ambient Occlusion", "Custom Shader"
|
||||
]
|
||||
RadioButton {
|
||||
text: qsTr(modelData)
|
||||
exclusiveGroup: bufferGroup
|
||||
checked: index == 0
|
||||
onCheckedChanged: if (checked) debug.setDebugMode(index);
|
||||
ConfigSlider {
|
||||
label: qsTr("Tone Mapping Exposure")
|
||||
config: Render.getConfig("ToneMapping")
|
||||
property: "exposure"
|
||||
min: -10; max: 10
|
||||
}
|
||||
|
||||
Column {
|
||||
id: ambientOcclusion
|
||||
property var config: Render.getConfig("AmbientOcclusion")
|
||||
|
||||
Label { text: qsTr("Ambient Occlusion") }
|
||||
// TODO: Add gpuTimer
|
||||
CheckBox { text: qsTr("Dithering"); checked: ambientOcclusion.config.ditheringEnabled }
|
||||
Repeater {
|
||||
model: [
|
||||
"Resolution Level:resolutionLevel:4",
|
||||
"Obscurance Level:obscuranceLevel:1",
|
||||
"Radius:radius:2",
|
||||
"Falloff Bias:falloffBias:0.2",
|
||||
"Edge Sharpness:edgeSharpness:1",
|
||||
"Blur Radius:blurRadius:6",
|
||||
"Blur Deviation:blurDeviation:3"
|
||||
]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
config: ambientOcclusion.config
|
||||
property: modelData.split(":")[1]
|
||||
max: modelData.split(":")[2]
|
||||
}
|
||||
}
|
||||
Repeater {
|
||||
model: [
|
||||
"Samples:numSamples:32",
|
||||
"Spiral Turns:numSpiralTurns:30:"
|
||||
]
|
||||
ConfigSlider {
|
||||
label: qsTr(modelData.split(":")[0])
|
||||
integral: true
|
||||
config: ambientOcclusion.config
|
||||
property: modelData.split(":")[1]
|
||||
max: modelData.split(":")[2]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Column {
|
||||
id: debug
|
||||
property var config: Render.getConfig("DebugDeferredBuffer")
|
||||
|
||||
function setDebugMode(mode) {
|
||||
debug.config.enabled = (mode != 0);
|
||||
debug.config.mode = mode;
|
||||
}
|
||||
|
||||
Label { text: qsTr("Debug Buffer") }
|
||||
ExclusiveGroup { id: bufferGroup }
|
||||
Repeater {
|
||||
model: [
|
||||
"Off", "Diffuse", "Metallic", "Roughness", "Normal", "Depth",
|
||||
"Lighting", "Shadow", "Pyramid Depth", "Ambient Occlusion", "Custom Shader"
|
||||
]
|
||||
RadioButton {
|
||||
text: qsTr(modelData)
|
||||
exclusiveGroup: bufferGroup
|
||||
checked: index == 0
|
||||
onCheckedChanged: if (checked && index > 0) debug.setDebugMode(index - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,8 @@ Window {
|
|||
destroyOnInvisible: true
|
||||
width: 800
|
||||
height: 600
|
||||
|
||||
property alias webView: webview
|
||||
|
||||
Component.onCompleted: {
|
||||
visible = true
|
||||
addressBar.text = webview.url
|
||||
|
@ -28,6 +29,7 @@ Window {
|
|||
}
|
||||
|
||||
Item {
|
||||
id:item
|
||||
anchors.fill: parent
|
||||
Rectangle {
|
||||
anchors.left: parent.left
|
||||
|
@ -125,12 +127,10 @@ Window {
|
|||
console.log("New icon: " + icon)
|
||||
}
|
||||
|
||||
profile: WebEngineProfile {
|
||||
id: webviewProfile
|
||||
storageName: "qmlUserBrowser"
|
||||
}
|
||||
|
||||
profile: desktop.browserProfile
|
||||
|
||||
}
|
||||
|
||||
} // item
|
||||
|
||||
Keys.onPressed: {
|
||||
|
|
12
interface/resources/qml/ForceLoad.qml
Normal file
12
interface/resources/qml/ForceLoad.qml
Normal file
|
@ -0,0 +1,12 @@
|
|||
import QtQuick 2.0
|
||||
import QtMultimedia 5.5
|
||||
|
||||
Item {
|
||||
Audio {
|
||||
id: audio
|
||||
autoLoad: true
|
||||
autoPlay: true
|
||||
loops: Audio.Infinite
|
||||
}
|
||||
}
|
||||
|
|
@ -267,15 +267,9 @@ Item {
|
|||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
visible: root.showAcuity
|
||||
visible: root.expanded
|
||||
text: "LOD: " + root.lodStatus;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
visible: root.expanded && !root.showAcuity
|
||||
text: root.lodStatsRenderText;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ WebEngineView {
|
|||
root.javaScriptConsoleMessage.connect(function(level, message, lineNumber, sourceID) {
|
||||
console.log("Web Window JS message: " + sourceID + " " + lineNumber + " " + message);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
// FIXME hack to get the URL with the auth token included. Remove when we move to Qt 5.6
|
||||
|
@ -36,6 +37,10 @@ WebEngineView {
|
|||
}
|
||||
}
|
||||
|
||||
onFeaturePermissionRequested: {
|
||||
grantFeaturePermission(securityOrigin, feature, true);
|
||||
}
|
||||
|
||||
onLoadingChanged: {
|
||||
// Required to support clicking on "hifi://" links
|
||||
if (WebEngineView.LoadStartedStatus == loadRequest.status) {
|
||||
|
@ -48,9 +53,12 @@ WebEngineView {
|
|||
}
|
||||
}
|
||||
|
||||
profile: WebEngineProfile {
|
||||
id: webviewProfile
|
||||
httpUserAgent: "Mozilla/5.0 (HighFidelityInterface)"
|
||||
storageName: "qmlWebEngine"
|
||||
onNewViewRequested:{
|
||||
var component = Qt.createComponent("../Browser.qml");
|
||||
var newWindow = component.createObject(desktop);
|
||||
request.openIn(newWindow.webView)
|
||||
}
|
||||
|
||||
|
||||
profile: desktop.browserProfile
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ Desktop {
|
|||
id: desktop
|
||||
|
||||
Component.onCompleted: {
|
||||
WebEngine.settings.javascriptCanOpenWindows = false;
|
||||
WebEngine.settings.javascriptCanOpenWindows = true;
|
||||
WebEngine.settings.javascriptCanAccessClipboard = false;
|
||||
WebEngine.settings.spatialNavigationEnabled = true;
|
||||
WebEngine.settings.localContentCanAccessRemoteUrls = true;
|
||||
|
@ -19,6 +19,12 @@ Desktop {
|
|||
property alias toolWindow: toolWindow
|
||||
ToolWindow { id: toolWindow }
|
||||
|
||||
property var browserProfile: WebEngineProfile {
|
||||
id: webviewProfile
|
||||
httpUserAgent: "Chrome/48.0 (HighFidelityInterface)"
|
||||
storageName: "qmlWebEngine"
|
||||
}
|
||||
|
||||
Action {
|
||||
text: "Open Browser"
|
||||
shortcut: "Ctrl+B"
|
||||
|
|
|
@ -210,7 +210,7 @@ static const QString INFO_EDIT_ENTITIES_PATH = "html/edit-commands.html";
|
|||
|
||||
static const unsigned int THROTTLED_SIM_FRAMERATE = 15;
|
||||
static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SIM_FRAMERATE;
|
||||
static const unsigned int CAPPED_SIM_FRAMERATE = 60;
|
||||
static const unsigned int CAPPED_SIM_FRAMERATE = 120;
|
||||
static const int CAPPED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / CAPPED_SIM_FRAMERATE;
|
||||
|
||||
static const uint32_t INVALID_FRAME = UINT32_MAX;
|
||||
|
@ -1615,13 +1615,7 @@ void Application::paintGL() {
|
|||
});
|
||||
}
|
||||
|
||||
// Some LOD-like controls need to know a smoothly varying "potential" frame rate that doesn't
|
||||
// include time waiting for sync, and which can report a number above target if we've got the headroom.
|
||||
// In my tests, the following is mostly less than 0.5ms, and never more than 3ms. I don't think its worth measuring during runtime.
|
||||
const float paintWaitAndQTTimerAllowance = 0.001f; // seconds
|
||||
// Store both values now for use by next cycle.
|
||||
_lastInstantaneousFps = instantaneousFps;
|
||||
_lastUnsynchronizedFps = 1.0f / (((usecTimestampNow() - now) / (float)USECS_PER_SECOND) + paintWaitAndQTTimerAllowance);
|
||||
_pendingPaint = false;
|
||||
}
|
||||
|
||||
|
@ -1713,6 +1707,7 @@ bool Application::event(QEvent* event) {
|
|||
|
||||
if ((int)event->type() == (int)Paint) {
|
||||
paintGL();
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!_keyboardFocusedItem.isInvalidID()) {
|
||||
|
@ -3099,11 +3094,7 @@ void Application::update(float deltaTime) {
|
|||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::update()");
|
||||
|
||||
if (DependencyManager::get<LODManager>()->getUseAcuity()) {
|
||||
updateLOD();
|
||||
} else {
|
||||
DependencyManager::get<LODManager>()->updatePIDRenderDistance(getTargetFrameRate(), getLastInstanteousFps(), deltaTime, isThrottleRendering());
|
||||
}
|
||||
updateLOD();
|
||||
|
||||
{
|
||||
PerformanceTimer perfTimer("devices");
|
||||
|
@ -3607,7 +3598,7 @@ glm::vec3 Application::getSunDirection() {
|
|||
// FIXME, preprocessor guard this check to occur only in DEBUG builds
|
||||
static QThread * activeRenderingThread = nullptr;
|
||||
|
||||
PickRay Application::computePickRay(float x, float y) {
|
||||
PickRay Application::computePickRay(float x, float y) const {
|
||||
vec2 pickPoint { x, y };
|
||||
PickRay result;
|
||||
if (isHMDMode()) {
|
||||
|
@ -4729,7 +4720,7 @@ glm::uvec2 Application::getCanvasSize() const {
|
|||
return glm::uvec2(_glWidget->width(), _glWidget->height());
|
||||
}
|
||||
|
||||
QRect Application::getApplicationGeometry() const {
|
||||
QRect Application::getRenderingGeometry() const {
|
||||
auto geometry = _glWidget->geometry();
|
||||
auto topLeft = geometry.topLeft();
|
||||
auto topLeftScreen = _glWidget->mapToGlobal(topLeft);
|
||||
|
|
|
@ -114,7 +114,7 @@ public:
|
|||
bool eventFilter(QObject* object, QEvent* event) override;
|
||||
|
||||
glm::uvec2 getCanvasSize() const;
|
||||
QRect getApplicationGeometry() const;
|
||||
QRect getRenderingGeometry() const;
|
||||
|
||||
glm::uvec2 getUiSize() const;
|
||||
QSize getDeviceSize() const;
|
||||
|
@ -157,7 +157,6 @@ public:
|
|||
float getFps() const { return _fps; }
|
||||
float getTargetFrameRate(); // frames/second
|
||||
float getLastInstanteousFps() const { return _lastInstantaneousFps; }
|
||||
float getLastUnsynchronizedFps() const { return _lastUnsynchronizedFps; }
|
||||
|
||||
float getFieldOfView() { return _fieldOfView.get(); }
|
||||
void setFieldOfView(float fov);
|
||||
|
@ -169,7 +168,7 @@ public:
|
|||
|
||||
virtual ViewFrustum* getCurrentViewFrustum() override { return getDisplayViewFrustum(); }
|
||||
virtual QThread* getMainThread() override { return thread(); }
|
||||
virtual PickRay computePickRay(float x, float y) override;
|
||||
virtual PickRay computePickRay(float x, float y) const override;
|
||||
virtual glm::vec3 getAvatarPosition() const override;
|
||||
virtual qreal getDevicePixelRatio() override;
|
||||
|
||||
|
@ -405,7 +404,6 @@ private:
|
|||
QElapsedTimer _timerStart;
|
||||
QElapsedTimer _lastTimeUpdated;
|
||||
float _lastInstantaneousFps { 0.0f };
|
||||
float _lastUnsynchronizedFps { 0.0f };
|
||||
|
||||
ShapeManager _shapeManager;
|
||||
PhysicalEntitySimulation _entitySimulation;
|
||||
|
|
|
@ -20,29 +20,8 @@
|
|||
|
||||
Setting::Handle<float> desktopLODDecreaseFPS("desktopLODDecreaseFPS", DEFAULT_DESKTOP_LOD_DOWN_FPS);
|
||||
Setting::Handle<float> hmdLODDecreaseFPS("hmdLODDecreaseFPS", DEFAULT_HMD_LOD_DOWN_FPS);
|
||||
// There are two different systems in use, based on lodPreference:
|
||||
// pid: renderDistance is adjusted by a PID such that frame rate targets are met.
|
||||
// acuity: a pseudo-acuity target is held, or adjusted to match minimum frame rates (and a PID controlls avatar rendering distance)
|
||||
// If unspecified, acuity is used only if user has specified non-default minumum frame rates.
|
||||
Setting::Handle<int> lodPreference("lodPreference", (int)LODManager::LODPreference::acuity);
|
||||
const float SMALLEST_REASONABLE_HORIZON = 50.0f; // meters
|
||||
Setting::Handle<float> renderDistanceInverseHighLimit("renderDistanceInverseHighLimit", 1.0f / SMALLEST_REASONABLE_HORIZON);
|
||||
void LODManager::setRenderDistanceInverseHighLimit(float newValue) {
|
||||
renderDistanceInverseHighLimit.set(newValue); // persist it, and tell all the controllers that use it
|
||||
_renderDistanceController.setControlledValueHighLimit(newValue);
|
||||
}
|
||||
|
||||
LODManager::LODManager() {
|
||||
|
||||
setRenderDistanceInverseHighLimit(renderDistanceInverseHighLimit.get());
|
||||
setRenderDistanceInverseLowLimit(1.0f / (float)TREE_SCALE);
|
||||
// Advice for tuning parameters:
|
||||
// See PIDController.h. There's a section on tuning in the reference.
|
||||
// Turn on logging with the following (or from js with LODManager.setRenderDistanceControllerHistory("render pid", 240))
|
||||
//setRenderDistanceControllerHistory("render pid", 60 * 4);
|
||||
// Note that extra logging/hysteresis is turned off in Avatar.cpp when the above logging is on.
|
||||
setRenderDistanceKP(0.000012f); // Usually about 0.6 of largest that doesn't oscillate when other parameters 0.
|
||||
setRenderDistanceKI(0.00002f); // Big enough to bring us to target with the above KP.
|
||||
}
|
||||
|
||||
float LODManager::getLODDecreaseFPS() {
|
||||
|
@ -234,53 +213,7 @@ QString LODManager::getLODFeedbackText() {
|
|||
return result;
|
||||
}
|
||||
|
||||
static float renderDistance = (float)TREE_SCALE;
|
||||
static int renderedCount = 0;
|
||||
static int lastRenderedCount = 0;
|
||||
bool LODManager::getUseAcuity() { return lodPreference.get() == (int)LODManager::LODPreference::acuity; }
|
||||
void LODManager::setUseAcuity(bool newValue) { lodPreference.set(newValue ? (int)LODManager::LODPreference::acuity : (int)LODManager::LODPreference::pid); }
|
||||
float LODManager::getRenderDistance() {
|
||||
return renderDistance;
|
||||
}
|
||||
int LODManager::getRenderedCount() {
|
||||
return lastRenderedCount;
|
||||
}
|
||||
QString LODManager::getLODStatsRenderText() {
|
||||
const QString label = "Rendered objects: ";
|
||||
return label + QString::number(getRenderedCount()) + " w/in " + QString::number((int)getRenderDistance()) + "m";
|
||||
}
|
||||
// compare autoAdjustLOD()
|
||||
void LODManager::updatePIDRenderDistance(float targetFps, float measuredFps, float deltaTime, bool isThrottled) {
|
||||
float distance;
|
||||
if (!isThrottled) {
|
||||
_renderDistanceController.setMeasuredValueSetpoint(targetFps); // No problem updating in flight.
|
||||
// The PID controller raises the controlled value when the measured value goes up.
|
||||
// The measured value is frame rate. When the controlled value (1 / render cutoff distance)
|
||||
// goes up, the render cutoff distance gets closer, the number of rendered avatars is less, and frame rate
|
||||
// goes up.
|
||||
distance = 1.0f / _renderDistanceController.update(measuredFps, deltaTime);
|
||||
} else {
|
||||
// Here we choose to just use the maximum render cutoff distance if throttled.
|
||||
distance = 1.0f / _renderDistanceController.getControlledValueLowLimit();
|
||||
}
|
||||
_renderDistanceAverage.updateAverage(distance);
|
||||
renderDistance = _renderDistanceAverage.getAverage(); // average only once per cycle
|
||||
lastRenderedCount = renderedCount;
|
||||
renderedCount = 0;
|
||||
}
|
||||
|
||||
bool LODManager::shouldRender(const RenderArgs* args, const AABox& bounds) {
|
||||
// NOTE: this branch of code is the alternate form of LOD that uses PID controllers.
|
||||
if (!getUseAcuity()) {
|
||||
float distanceToCamera = glm::length(bounds.calcCenter() - args->_viewFrustum->getPosition());
|
||||
float largestDimension = bounds.getLargestDimension();
|
||||
const float scenerySize = 300; // meters
|
||||
bool isRendered = (largestDimension > scenerySize) || // render scenery regardless of distance
|
||||
(distanceToCamera < renderDistance + largestDimension);
|
||||
renderedCount += isRendered ? 1 : 0;
|
||||
return isRendered;
|
||||
}
|
||||
|
||||
// FIXME - eventually we want to use the render accuracy as an indicator for the level of detail
|
||||
// to use in rendering.
|
||||
float renderAccuracy = args->_viewFrustum->calculateRenderAccuracy(bounds, args->_sizeScale, args->_boundaryLevelAdjust);
|
||||
|
@ -299,12 +232,6 @@ void LODManager::setBoundaryLevelAdjust(int boundaryLevelAdjust) {
|
|||
void LODManager::loadSettings() {
|
||||
setDesktopLODDecreaseFPS(desktopLODDecreaseFPS.get());
|
||||
setHMDLODDecreaseFPS(hmdLODDecreaseFPS.get());
|
||||
|
||||
if (lodPreference.get() == (int)LODManager::LODPreference::unspecified) {
|
||||
setUseAcuity((getDesktopLODDecreaseFPS() != DEFAULT_DESKTOP_LOD_DOWN_FPS) || (getHMDLODDecreaseFPS() != DEFAULT_HMD_LOD_DOWN_FPS));
|
||||
}
|
||||
Menu::getInstance()->getActionForOption(MenuOption::LodTools)->setEnabled(getUseAcuity());
|
||||
Menu::getInstance()->getSubMenuFromName(MenuOption::RenderResolution, Menu::getInstance()->getSubMenuFromName("Render", Menu::getInstance()->getMenu("Developer")))->setEnabled(getUseAcuity());
|
||||
}
|
||||
|
||||
void LODManager::saveSettings() {
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
#include <PIDController.h>
|
||||
#include <SimpleMovingAverage.h>
|
||||
|
||||
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 15.0;
|
||||
const float DEFAULT_HMD_LOD_DOWN_FPS = 30.0;
|
||||
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 30.0;
|
||||
const float DEFAULT_HMD_LOD_DOWN_FPS = 45.0;
|
||||
const float MAX_LIKELY_DESKTOP_FPS = 59.0; // this is essentially, V-synch - 1 fps
|
||||
const float MAX_LIKELY_HMD_FPS = 74.0; // this is essentially, V-synch - 1 fps
|
||||
const float INCREASE_LOD_GAP = 15.0f;
|
||||
|
@ -76,27 +76,6 @@ public:
|
|||
Q_INVOKABLE float getLODDecreaseFPS();
|
||||
Q_INVOKABLE float getLODIncreaseFPS();
|
||||
|
||||
enum class LODPreference {
|
||||
pid = 0,
|
||||
acuity,
|
||||
unspecified
|
||||
};
|
||||
static bool getUseAcuity();
|
||||
static void setUseAcuity(bool newValue);
|
||||
Q_INVOKABLE void setRenderDistanceKP(float newValue) { _renderDistanceController.setKP(newValue); }
|
||||
Q_INVOKABLE void setRenderDistanceKI(float newValue) { _renderDistanceController.setKI(newValue); }
|
||||
Q_INVOKABLE void setRenderDistanceKD(float newValue) { _renderDistanceController.setKD(newValue); }
|
||||
Q_INVOKABLE bool getRenderDistanceControllerIsLogging() { return _renderDistanceController.getIsLogging(); }
|
||||
Q_INVOKABLE void setRenderDistanceControllerHistory(QString label, int size) { return _renderDistanceController.setHistorySize(label, size); }
|
||||
Q_INVOKABLE float getRenderDistanceInverseLowLimit() { return _renderDistanceController.getControlledValueLowLimit(); }
|
||||
Q_INVOKABLE void setRenderDistanceInverseLowLimit(float newValue) { _renderDistanceController.setControlledValueLowLimit(newValue); }
|
||||
Q_INVOKABLE float getRenderDistanceInverseHighLimit() { return _renderDistanceController.getControlledValueHighLimit(); }
|
||||
Q_INVOKABLE void setRenderDistanceInverseHighLimit(float newValue);
|
||||
void updatePIDRenderDistance(float targetFps, float measuredFps, float deltaTime, bool isThrottled);
|
||||
float getRenderDistance();
|
||||
int getRenderedCount();
|
||||
QString getLODStatsRenderText();
|
||||
|
||||
static bool shouldRender(const RenderArgs* args, const AABox& bounds);
|
||||
void autoAdjustLOD(float currentFPS);
|
||||
|
||||
|
@ -126,9 +105,6 @@ private:
|
|||
SimpleMovingAverage _fpsAverageStartWindow = START_DELAY_SAMPLES_OF_FRAMES;
|
||||
SimpleMovingAverage _fpsAverageDownWindow = DOWN_SHIFT_SAMPLES_OF_FRAMES;
|
||||
SimpleMovingAverage _fpsAverageUpWindow = UP_SHIFT_SAMPLES_OF_FRAMES;
|
||||
|
||||
PIDController _renderDistanceController{};
|
||||
SimpleMovingAverage _renderDistanceAverage{ 10 };
|
||||
};
|
||||
|
||||
#endif // hifi_LODManager_h
|
||||
|
|
|
@ -162,7 +162,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
void AvatarManager::simulateAvatarFades(float deltaTime) {
|
||||
QVector<AvatarSharedPointer>::iterator fadingIterator = _avatarFades.begin();
|
||||
|
||||
const float SHRINK_RATE = 0.9f;
|
||||
const float SHRINK_RATE = 0.15f;
|
||||
const float MIN_FADE_SCALE = MIN_AVATAR_SCALE;
|
||||
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
|
|
|
@ -421,8 +421,8 @@ private:
|
|||
AtRestDetector _hmdAtRestDetector;
|
||||
bool _lastIsMoving { false };
|
||||
bool _hoverReferenceCameraFacingIsCaptured { false };
|
||||
glm::vec3 _hoverReferenceCameraFacing; // hmd sensor space
|
||||
|
||||
glm::vec3 _hoverReferenceCameraFacing { 0.0f, 0.0f, -1.0f }; // hmd sensor space
|
||||
|
||||
float AVATAR_MOVEMENT_ENERGY_CONSTANT { 0.001f };
|
||||
float AUDIO_ENERGY_CONSTANT { 0.000001f };
|
||||
float MAX_AVATAR_MOVEMENT_PER_FRAME { 30.0f };
|
||||
|
|
|
@ -144,6 +144,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
} else {
|
||||
handParams.isRightEnabled = false;
|
||||
}
|
||||
handParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
|
||||
|
||||
_rig->updateFromHandParameters(handParams, deltaTime);
|
||||
|
||||
|
|
|
@ -335,6 +335,7 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
|
|||
|
||||
QPointF ApplicationCompositor::getMouseEventPosition(QMouseEvent* event) {
|
||||
if (qApp->isHMDMode()) {
|
||||
QMutexLocker locker(&_reticlePositionInHMDLock);
|
||||
return QPointF(_reticlePositionInHMD.x, _reticlePositionInHMD.y);
|
||||
}
|
||||
return event->localPos();
|
||||
|
@ -349,7 +350,7 @@ void ApplicationCompositor::handleLeaveEvent() {
|
|||
|
||||
if (shouldCaptureMouse()) {
|
||||
QWidget* mainWidget = (QWidget*)qApp->getWindow();
|
||||
QRect mainWidgetFrame = qApp->getApplicationGeometry();
|
||||
QRect mainWidgetFrame = qApp->getRenderingGeometry();
|
||||
QRect uncoveredRect = mainWidgetFrame;
|
||||
foreach(QWidget* widget, QApplication::topLevelWidgets()) {
|
||||
if (widget->isWindow() && widget->isVisible() && widget != mainWidget) {
|
||||
|
@ -388,6 +389,7 @@ bool ApplicationCompositor::handleRealMouseMoveEvent(bool sendFakeEvent) {
|
|||
|
||||
// If we're in HMD mode
|
||||
if (shouldCaptureMouse()) {
|
||||
QMutexLocker locker(&_reticlePositionInHMDLock);
|
||||
auto newPosition = QCursor::pos();
|
||||
auto changeInRealMouse = newPosition - _lastKnownRealMouse;
|
||||
auto newReticlePosition = _reticlePositionInHMD + toGlm(changeInRealMouse);
|
||||
|
@ -403,6 +405,7 @@ bool ApplicationCompositor::handleRealMouseMoveEvent(bool sendFakeEvent) {
|
|||
|
||||
glm::vec2 ApplicationCompositor::getReticlePosition() {
|
||||
if (qApp->isHMDMode()) {
|
||||
QMutexLocker locker(&_reticlePositionInHMDLock);
|
||||
return _reticlePositionInHMD;
|
||||
}
|
||||
return toGlm(QCursor::pos());
|
||||
|
@ -410,6 +413,7 @@ glm::vec2 ApplicationCompositor::getReticlePosition() {
|
|||
|
||||
void ApplicationCompositor::setReticlePosition(glm::vec2 position, bool sendFakeEvent) {
|
||||
if (qApp->isHMDMode()) {
|
||||
QMutexLocker locker(&_reticlePositionInHMDLock);
|
||||
const float MOUSE_EXTENTS_VERT_ANGULAR_SIZE = 170.0f; // 5deg from poles
|
||||
const float MOUSE_EXTENTS_VERT_PIXELS = VIRTUAL_SCREEN_SIZE_Y * (MOUSE_EXTENTS_VERT_ANGULAR_SIZE / DEFAULT_HMD_UI_VERT_ANGULAR_SIZE);
|
||||
const float MOUSE_EXTENTS_HORZ_ANGULAR_SIZE = 360.0f; // full sphere
|
||||
|
|
|
@ -9,11 +9,13 @@
|
|||
#ifndef hifi_ApplicationCompositor_h
|
||||
#define hifi_ApplicationCompositor_h
|
||||
|
||||
#include <atomic>
|
||||
#include <cstdint>
|
||||
|
||||
#include <QCursor>
|
||||
#include <QMouseEvent>
|
||||
#include <QObject>
|
||||
#include <QPropertyAnimation>
|
||||
#include <cstdint>
|
||||
|
||||
#include <EntityItemID.h>
|
||||
#include <GeometryCache.h>
|
||||
|
@ -81,19 +83,19 @@ public:
|
|||
float getAlpha() const { return _alpha; }
|
||||
void setAlpha(float alpha) { _alpha = alpha; }
|
||||
|
||||
Q_INVOKABLE bool getReticleVisible() { return _reticleVisible; }
|
||||
Q_INVOKABLE void setReticleVisible(bool visible) { _reticleVisible = visible; }
|
||||
bool getReticleVisible() { return _reticleVisible; }
|
||||
void setReticleVisible(bool visible) { _reticleVisible = visible; }
|
||||
|
||||
Q_INVOKABLE float getReticleDepth() { return _reticleDepth; }
|
||||
Q_INVOKABLE void setReticleDepth(float depth) { _reticleDepth = depth; }
|
||||
float getReticleDepth() { return _reticleDepth; }
|
||||
void setReticleDepth(float depth) { _reticleDepth = depth; }
|
||||
|
||||
Q_INVOKABLE glm::vec2 getReticlePosition();
|
||||
Q_INVOKABLE void setReticlePosition(glm::vec2 position, bool sendFakeEvent = true);
|
||||
glm::vec2 getReticlePosition();
|
||||
void setReticlePosition(glm::vec2 position, bool sendFakeEvent = true);
|
||||
|
||||
Q_INVOKABLE void setReticleApparentPosition(glm::vec3 position) { _drawAt3D = true; _drawAt3DPosition = position; }
|
||||
Q_INVOKABLE void restoreReticleApparentPosition() { _drawAt3D = false; }
|
||||
void setReticleApparentPosition(glm::vec3 position) { _drawAt3D = true; _drawAt3DPosition = position; }
|
||||
void restoreReticleApparentPosition() { _drawAt3D = false; }
|
||||
|
||||
Q_INVOKABLE glm::vec2 getReticleMaximumPosition() const;
|
||||
glm::vec2 getReticleMaximumPosition() const;
|
||||
|
||||
ReticleInterface* getReticleInterface() { return _reticleInterface; }
|
||||
|
||||
|
@ -141,21 +143,22 @@ private:
|
|||
|
||||
std::unique_ptr<QPropertyAnimation> _alphaPropertyAnimation;
|
||||
|
||||
bool _reticleVisible { true };
|
||||
float _reticleDepth { 1.0f };
|
||||
bool _drawAt3D { false };
|
||||
glm::vec3 _drawAt3DPosition;
|
||||
std::atomic<bool> _reticleVisible { true };
|
||||
std::atomic<float> _reticleDepth { 1.0f };
|
||||
|
||||
// NOTE: when the compositor is running in HMD mode, it will control the reticle position as a custom
|
||||
// application specific position, when it's in desktop mode, the reticle position will simply move
|
||||
// the system mouse.
|
||||
glm::vec2 _reticlePositionInHMD{ 0.0f, 0.0f };
|
||||
glm::vec2 _reticlePositionInHMD { 0.0f, 0.0f };
|
||||
mutable QMutex _reticlePositionInHMDLock { QMutex::Recursive };
|
||||
|
||||
bool _drawAt3D { false };
|
||||
glm::vec3 _drawAt3DPosition;
|
||||
|
||||
QPointF _lastKnownRealMouse;
|
||||
QPoint _lastKnownCursorPos;
|
||||
bool _ignoreMouseMove { false };
|
||||
|
||||
ReticleInterface* _reticleInterface;
|
||||
|
||||
};
|
||||
|
||||
// Scripting interface available to control the Reticle
|
||||
|
|
|
@ -252,12 +252,8 @@ void ApplicationOverlay::buildFramebufferObject() {
|
|||
PROFILE_RANGE(__FUNCTION__);
|
||||
|
||||
auto uiSize = qApp->getUiSize();
|
||||
QSize desiredSize (uiSize.x, uiSize.y);
|
||||
int currentWidth = _overlayFramebuffer ? _overlayFramebuffer->getWidth() : 0;
|
||||
int currentHeight = _overlayFramebuffer ? _overlayFramebuffer->getHeight() : 0;
|
||||
QSize frameBufferCurrentSize(currentWidth, currentHeight);
|
||||
|
||||
if (_overlayFramebuffer && desiredSize == frameBufferCurrentSize) {
|
||||
if (_overlayFramebuffer && uiSize == _overlayFramebuffer->getSize()) {
|
||||
// Already built
|
||||
return;
|
||||
}
|
||||
|
@ -271,8 +267,8 @@ void ApplicationOverlay::buildFramebufferObject() {
|
|||
_overlayFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
|
||||
auto colorFormat = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
|
||||
auto width = desiredSize.width();
|
||||
auto height = desiredSize.height();
|
||||
auto width = uiSize.x;
|
||||
auto height = uiSize.y;
|
||||
|
||||
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_LINEAR);
|
||||
_overlayColorTexture = gpu::TexturePointer(gpu::Texture::create2D(colorFormat, width, height, defaultSampler));
|
||||
|
|
|
@ -87,13 +87,6 @@ void setupPreferences() {
|
|||
}
|
||||
|
||||
static const QString LOD_TUNING("Level of Detail Tuning");
|
||||
CheckPreference* acuityToggle;
|
||||
{
|
||||
auto getter = []()->bool { return DependencyManager::get<LODManager>()->getUseAcuity(); };
|
||||
auto setter = [](bool value) { DependencyManager::get<LODManager>()->setUseAcuity(value); };
|
||||
preferences->addPreference(acuityToggle = new CheckPreference(LOD_TUNING, "Render based on visual acuity", getter, setter));
|
||||
}
|
||||
|
||||
{
|
||||
auto getter = []()->float { return DependencyManager::get<LODManager>()->getDesktopLODDecreaseFPS(); };
|
||||
auto setter = [](float value) { DependencyManager::get<LODManager>()->setDesktopLODDecreaseFPS(value); };
|
||||
|
@ -101,7 +94,6 @@ void setupPreferences() {
|
|||
preference->setMin(0);
|
||||
preference->setMax(120);
|
||||
preference->setStep(1);
|
||||
preference->setEnabler(acuityToggle);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
|
@ -112,18 +104,6 @@ void setupPreferences() {
|
|||
preference->setMin(0);
|
||||
preference->setMax(120);
|
||||
preference->setStep(1);
|
||||
preference->setEnabler(acuityToggle);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
{
|
||||
auto getter = []()->float { return 1.0f / DependencyManager::get<LODManager>()->getRenderDistanceInverseHighLimit(); };
|
||||
auto setter = [](float value) { DependencyManager::get<LODManager>()->setRenderDistanceInverseHighLimit(1.0f / value); };
|
||||
auto preference = new SpinnerPreference(LOD_TUNING, "Minimum Display Distance", getter, setter);
|
||||
preference->setMin(5);
|
||||
preference->setMax(32768);
|
||||
preference->setStep(1);
|
||||
preference->setEnabler(acuityToggle, true);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
|
@ -274,7 +254,7 @@ void setupPreferences() {
|
|||
{
|
||||
auto getter = []()->float { return DependencyManager::get<AudioClient>()->getOutputBufferSize(); };
|
||||
auto setter = [](float value) { DependencyManager::get<AudioClient>()->setOutputBufferSize(value); };
|
||||
auto preference = new SpinnerPreference(AUDIO, "Output Buffer Size (frames)", getter, setter);
|
||||
auto preference = new SpinnerPreference(AUDIO, "Output Buffer Initial Size (frames)", getter, setter);
|
||||
preference->setMin(1);
|
||||
preference->setMax(20);
|
||||
preference->setStep(1);
|
||||
|
|
|
@ -283,9 +283,7 @@ void Stats::updateStats(bool force) {
|
|||
STAT_UPDATE(localLeaves, (int)OctreeElement::getLeafNodeCount());
|
||||
// LOD Details
|
||||
STAT_UPDATE(lodStatus, "You can see " + DependencyManager::get<LODManager>()->getLODFeedbackText());
|
||||
STAT_UPDATE(lodStatsRenderText, DependencyManager::get<LODManager>()->getLODStatsRenderText());
|
||||
}
|
||||
STAT_UPDATE(showAcuity, (_expanded || force) && DependencyManager::get<LODManager>()->getUseAcuity());
|
||||
|
||||
bool performanceTimerIsActive = PerformanceTimer::isActive();
|
||||
bool displayPerf = _expanded && Menu::getInstance()->isOptionChecked(MenuOption::DisplayDebugTimingDetails);
|
||||
|
|
|
@ -30,7 +30,6 @@ class Stats : public QQuickItem {
|
|||
Q_PROPERTY(QString monospaceFont READ monospaceFont CONSTANT)
|
||||
Q_PROPERTY(float audioPacketlossUpstream READ getAudioPacketLossUpstream)
|
||||
Q_PROPERTY(float audioPacketlossDownstream READ getAudioPacketLossDownstream)
|
||||
Q_PROPERTY(bool showAcuity READ getShowAcuity WRITE setShowAcuity NOTIFY showAcuityChanged)
|
||||
|
||||
STATS_PROPERTY(int, serverCount, 0)
|
||||
STATS_PROPERTY(int, renderrate, 0)
|
||||
|
@ -80,7 +79,6 @@ class Stats : public QQuickItem {
|
|||
STATS_PROPERTY(QString, packetStats, QString())
|
||||
STATS_PROPERTY(QString, lodStatus, QString())
|
||||
STATS_PROPERTY(QString, timingStats, QString())
|
||||
STATS_PROPERTY(QString, lodStatsRenderText, QString())
|
||||
STATS_PROPERTY(int, serverElements, 0)
|
||||
STATS_PROPERTY(int, serverInternal, 0)
|
||||
STATS_PROPERTY(int, serverLeaves, 0)
|
||||
|
@ -112,15 +110,12 @@ public:
|
|||
emit expandedChanged();
|
||||
}
|
||||
}
|
||||
bool getShowAcuity() { return _showAcuity; }
|
||||
void setShowAcuity(bool newValue) { _showAcuity = newValue; }
|
||||
|
||||
public slots:
|
||||
void forceUpdateStats() { updateStats(true); }
|
||||
|
||||
signals:
|
||||
void expandedChanged();
|
||||
void showAcuityChanged();
|
||||
void timingExpandedChanged();
|
||||
void serverCountChanged();
|
||||
void renderrateChanged();
|
||||
|
@ -128,7 +123,6 @@ signals:
|
|||
void simrateChanged();
|
||||
void avatarSimrateChanged();
|
||||
void avatarCountChanged();
|
||||
void lodStatsRenderTextChanged();
|
||||
void packetInCountChanged();
|
||||
void packetOutCountChanged();
|
||||
void mbpsInChanged();
|
||||
|
@ -182,7 +176,6 @@ private:
|
|||
int _recentMaxPackets{ 0 } ; // recent max incoming voxel packets to process
|
||||
bool _resetRecentMaxPacketsSoon{ true };
|
||||
bool _expanded{ false };
|
||||
bool _showAcuity{ false };
|
||||
bool _timingExpanded{ false };
|
||||
QString _monospaceFont;
|
||||
const AudioIOStats* _audioStats;
|
||||
|
|
|
@ -670,7 +670,7 @@ void AnimInverseKinematics::initConstraints() {
|
|||
stConstraint->setTwistLimits(-MAX_SHOULDER_TWIST, MAX_SHOULDER_TWIST);
|
||||
|
||||
std::vector<float> minDots;
|
||||
const float MAX_SHOULDER_SWING = PI / 20.0f;
|
||||
const float MAX_SHOULDER_SWING = PI / 6.0f;
|
||||
minDots.push_back(cosf(MAX_SHOULDER_SWING));
|
||||
stConstraint->setSwingLimits(minDots);
|
||||
|
||||
|
|
|
@ -1080,8 +1080,31 @@ void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm
|
|||
|
||||
void Rig::updateFromHandParameters(const HandParameters& params, float dt) {
|
||||
if (_animSkeleton && _animNode) {
|
||||
|
||||
const float HAND_RADIUS = 0.05f;
|
||||
const float BODY_RADIUS = params.bodyCapsuleRadius;
|
||||
const float MIN_LENGTH = 1.0e-4f;
|
||||
|
||||
// project the hips onto the xz plane.
|
||||
auto hipsTrans = _internalPoseSet._absolutePoses[_animSkeleton->nameToJointIndex("Hips")].trans;
|
||||
const glm::vec2 bodyCircleCenter(hipsTrans.x, hipsTrans.z);
|
||||
|
||||
if (params.isLeftEnabled) {
|
||||
_animVars.set("leftHandPosition", params.leftPosition);
|
||||
|
||||
// project the hand position onto the xz plane.
|
||||
glm::vec2 handCircleCenter(params.leftPosition.x, params.leftPosition.z);
|
||||
|
||||
// check for 2d overlap of the hand and body circles.
|
||||
auto circleToCircle = handCircleCenter - bodyCircleCenter;
|
||||
const float circleToCircleLength = glm::length(circleToCircle);
|
||||
const float penetrationDistance = HAND_RADIUS + BODY_RADIUS - circleToCircleLength;
|
||||
if (penetrationDistance > 0.0f && circleToCircleLength > MIN_LENGTH) {
|
||||
// push the hands out of the body
|
||||
handCircleCenter += penetrationDistance * glm::normalize(circleToCircle);
|
||||
}
|
||||
|
||||
glm::vec3 handPosition(handCircleCenter.x, params.leftPosition.y, handCircleCenter.y);
|
||||
_animVars.set("leftHandPosition", handPosition);
|
||||
_animVars.set("leftHandRotation", params.leftOrientation);
|
||||
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
} else {
|
||||
|
@ -1089,8 +1112,23 @@ void Rig::updateFromHandParameters(const HandParameters& params, float dt) {
|
|||
_animVars.unset("leftHandRotation");
|
||||
_animVars.set("leftHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
|
||||
}
|
||||
|
||||
if (params.isRightEnabled) {
|
||||
_animVars.set("rightHandPosition", params.rightPosition);
|
||||
|
||||
// project the hand position onto the xz plane.
|
||||
glm::vec2 handCircleCenter(params.rightPosition.x, params.rightPosition.z);
|
||||
|
||||
// check for 2d overlap of the hand and body circles.
|
||||
auto circleToCircle = handCircleCenter - bodyCircleCenter;
|
||||
const float circleToCircleLength = glm::length(circleToCircle);
|
||||
const float penetrationDistance = HAND_RADIUS + BODY_RADIUS - circleToCircleLength;
|
||||
if (penetrationDistance > 0.0f && circleToCircleLength > MIN_LENGTH) {
|
||||
// push the hands out of the body
|
||||
handCircleCenter += penetrationDistance * glm::normalize(circleToCircle);
|
||||
}
|
||||
|
||||
glm::vec3 handPosition(handCircleCenter.x, params.rightPosition.y, handCircleCenter.y);
|
||||
_animVars.set("rightHandPosition", handPosition);
|
||||
_animVars.set("rightHandRotation", params.rightOrientation);
|
||||
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
} else {
|
||||
|
|
|
@ -67,6 +67,7 @@ public:
|
|||
struct HandParameters {
|
||||
bool isLeftEnabled;
|
||||
bool isRightEnabled;
|
||||
float bodyCapsuleRadius;
|
||||
glm::vec3 leftPosition = glm::vec3(); // rig space
|
||||
glm::quat leftOrientation = glm::quat(); // rig space (z forward)
|
||||
glm::vec3 rightPosition = glm::vec3(); // rig space
|
||||
|
|
|
@ -80,8 +80,9 @@ AudioClient::AudioClient() :
|
|||
_isStereoInput(false),
|
||||
_outputStarveDetectionStartTimeMsec(0),
|
||||
_outputStarveDetectionCount(0),
|
||||
_outputBufferSizeFrames("audioOutputBufferSize", DEFAULT_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES),
|
||||
_outputStarveDetectionEnabled("audioOutputStarveDetectionEnabled",
|
||||
_outputBufferSizeFrames("audioOutputBufferSizeFrames", DEFAULT_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES),
|
||||
_sessionOutputBufferSizeFrames(_outputBufferSizeFrames.get()),
|
||||
_outputStarveDetectionEnabled("audioOutputBufferStarveDetectionEnabled",
|
||||
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED),
|
||||
_outputStarveDetectionPeriodMsec("audioOutputStarveDetectionPeriod",
|
||||
DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_PERIOD),
|
||||
|
@ -109,6 +110,7 @@ AudioClient::AudioClient() :
|
|||
|
||||
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples,
|
||||
this, &AudioClient::processReceivedSamples, Qt::DirectConnection);
|
||||
connect(this, &AudioClient::changeDevice, this, [=](const QAudioDeviceInfo& outputDeviceInfo) { switchOutputToAudioDevice(outputDeviceInfo); });
|
||||
|
||||
_inputDevices = getDeviceNames(QAudio::AudioInput);
|
||||
_outputDevices = getDeviceNames(QAudio::AudioOutput);
|
||||
|
@ -277,9 +279,9 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
||||
const QAudioFormat& desiredAudioFormat,
|
||||
QAudioFormat& adjustedAudioFormat) {
|
||||
// FIXME: directly using 24khz has a bug somewhere that causes channels to be swapped.
|
||||
// Continue using our internal resampler, for now.
|
||||
if (true || !audioDevice.isFormatSupported(desiredAudioFormat)) {
|
||||
// There had been a note here that 2khz was swapping channels. That doesn't seem to be happening
|
||||
// any more for me. If it does, then we'll want to always resample.
|
||||
if (!audioDevice.isFormatSupported(desiredAudioFormat)) {
|
||||
qCDebug(audioclient) << "The desired format for audio I/O is" << desiredAudioFormat;
|
||||
qCDebug(audioclient, "The desired audio format is not supported by this device");
|
||||
|
||||
|
@ -287,7 +289,7 @@ bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
|||
adjustedAudioFormat = desiredAudioFormat;
|
||||
adjustedAudioFormat.setChannelCount(2);
|
||||
|
||||
if (false && audioDevice.isFormatSupported(adjustedAudioFormat)) {
|
||||
if (audioDevice.isFormatSupported(adjustedAudioFormat)) {
|
||||
return true;
|
||||
} else {
|
||||
adjustedAudioFormat.setChannelCount(1);
|
||||
|
@ -971,10 +973,8 @@ void AudioClient::outputNotify() {
|
|||
_outputStarveDetectionStartTimeMsec = now;
|
||||
_outputStarveDetectionCount = 0;
|
||||
|
||||
int oldOutputBufferSizeFrames = _outputBufferSizeFrames.get();
|
||||
int newOutputBufferSizeFrames = oldOutputBufferSizeFrames + 1;
|
||||
setOutputBufferSize(newOutputBufferSizeFrames);
|
||||
newOutputBufferSizeFrames = _outputBufferSizeFrames.get();
|
||||
int oldOutputBufferSizeFrames = _sessionOutputBufferSizeFrames;
|
||||
int newOutputBufferSizeFrames = setOutputBufferSize(oldOutputBufferSizeFrames + 1, false);
|
||||
if (newOutputBufferSizeFrames > oldOutputBufferSizeFrames) {
|
||||
qCDebug(audioclient) << "Starve detection threshold met, increasing buffer size to " << newOutputBufferSizeFrames;
|
||||
}
|
||||
|
@ -1038,15 +1038,19 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
|
||||
// setup our general output device for audio-mixer audio
|
||||
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
_audioOutput->setBufferSize(_outputBufferSizeFrames.get() * _outputFrameSize * sizeof(int16_t));
|
||||
int osDefaultBufferSize = _audioOutput->bufferSize();
|
||||
int requestedSize = _sessionOutputBufferSizeFrames *_outputFrameSize * sizeof(int16_t);
|
||||
_audioOutput->setBufferSize(requestedSize);
|
||||
|
||||
connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify);
|
||||
|
||||
qCDebug(audioclient) << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize;
|
||||
|
||||
_audioOutputIODevice.start();
|
||||
_audioOutput->start(&_audioOutputIODevice);
|
||||
|
||||
qCDebug(audioclient) << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize <<
|
||||
"requested bytes:" << requestedSize << "actual bytes:" << _audioOutput->bufferSize() <<
|
||||
"os default:" << osDefaultBufferSize << "period size:" << _audioOutput->periodSize();
|
||||
|
||||
// setup a loopback audio output device
|
||||
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
|
||||
|
@ -1060,19 +1064,23 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
return supportedFormat;
|
||||
}
|
||||
|
||||
void AudioClient::setOutputBufferSize(int numFrames) {
|
||||
int AudioClient::setOutputBufferSize(int numFrames, bool persist) {
|
||||
numFrames = std::min(std::max(numFrames, MIN_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES), MAX_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES);
|
||||
if (numFrames != _outputBufferSizeFrames.get()) {
|
||||
if (numFrames != _sessionOutputBufferSizeFrames) {
|
||||
qCDebug(audioclient) << "Audio output buffer size (frames): " << numFrames;
|
||||
_outputBufferSizeFrames.set(numFrames);
|
||||
_sessionOutputBufferSizeFrames = numFrames;
|
||||
if (persist) {
|
||||
_outputBufferSizeFrames.set(numFrames);
|
||||
}
|
||||
|
||||
if (_audioOutput) {
|
||||
// The buffer size can't be adjusted after QAudioOutput::start() has been called, so
|
||||
// recreate the device by switching to the default.
|
||||
QAudioDeviceInfo outputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioOutput);
|
||||
switchOutputToAudioDevice(outputDeviceInfo);
|
||||
emit changeDevice(outputDeviceInfo); // On correct thread, please, as setOutputBufferSize can be called from main thread.
|
||||
}
|
||||
}
|
||||
return numFrames;
|
||||
}
|
||||
|
||||
// The following constant is operating system dependent due to differences in
|
||||
|
@ -1143,6 +1151,9 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
|||
}
|
||||
|
||||
int bytesAudioOutputUnplayed = _audio->_audioOutput->bufferSize() - _audio->_audioOutput->bytesFree();
|
||||
if (!bytesAudioOutputUnplayed) {
|
||||
qCDebug(audioclient) << "empty audio buffer";
|
||||
}
|
||||
if (bytesAudioOutputUnplayed == 0 && bytesWritten == 0) {
|
||||
_unfulfilledReads++;
|
||||
}
|
||||
|
|
|
@ -57,11 +57,7 @@ static const int NUM_AUDIO_CHANNELS = 2;
|
|||
static const int DEFAULT_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 3;
|
||||
static const int MIN_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 1;
|
||||
static const int MAX_AUDIO_OUTPUT_BUFFER_SIZE_FRAMES = 20;
|
||||
#if defined(Q_OS_ANDROID) || defined(Q_OS_WIN)
|
||||
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED = false;
|
||||
#else
|
||||
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED = true;
|
||||
#endif
|
||||
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_ENABLED = true;
|
||||
static const int DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_THRESHOLD = 3;
|
||||
static const quint64 DEFAULT_AUDIO_OUTPUT_STARVE_DETECTION_PERIOD = 10 * 1000; // 10 Seconds
|
||||
|
||||
|
@ -156,7 +152,7 @@ public slots:
|
|||
void processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
|
||||
void sendMuteEnvironmentPacket();
|
||||
|
||||
void setOutputBufferSize(int numFrames);
|
||||
int setOutputBufferSize(int numFrames, bool persist = true);
|
||||
|
||||
virtual bool outputLocalInjector(bool isStereo, AudioInjector* injector);
|
||||
|
||||
|
@ -184,6 +180,7 @@ signals:
|
|||
void outputBytesToNetwork(int numBytes);
|
||||
void inputBytesFromNetwork(int numBytes);
|
||||
|
||||
void changeDevice(const QAudioDeviceInfo& outputDeviceInfo);
|
||||
void deviceChanged();
|
||||
|
||||
void receivedFirstPacket();
|
||||
|
@ -230,6 +227,7 @@ private:
|
|||
int _outputStarveDetectionCount;
|
||||
|
||||
Setting::Handle<int> _outputBufferSizeFrames;
|
||||
int _sessionOutputBufferSizeFrames;
|
||||
Setting::Handle<bool> _outputStarveDetectionEnabled;
|
||||
Setting::Handle<int> _outputStarveDetectionPeriodMsec;
|
||||
// Maximum number of starves per _outputStarveDetectionPeriod before increasing buffer size
|
||||
|
|
|
@ -28,6 +28,11 @@
|
|||
|
||||
const float DPI = 30.47f;
|
||||
const float METERS_TO_INCHES = 39.3701f;
|
||||
static uint32_t _currentWebCount { 0 };
|
||||
// Don't allow more than 100 concurrent web views
|
||||
static const uint32_t MAX_CONCURRENT_WEB_VIEWS = 100;
|
||||
// If a web-view hasn't been rendered for 30 seconds, de-allocate the framebuffer
|
||||
static uint64_t MAX_NO_RENDER_INTERVAL = 30 * USECS_PER_SECOND;
|
||||
|
||||
EntityItemPointer RenderableWebEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
|
||||
EntityItemPointer entity{ new RenderableWebEntityItem(entityID) };
|
||||
|
@ -41,28 +46,123 @@ RenderableWebEntityItem::RenderableWebEntityItem(const EntityItemID& entityItemI
|
|||
}
|
||||
|
||||
RenderableWebEntityItem::~RenderableWebEntityItem() {
|
||||
if (_webSurface) {
|
||||
_webSurface->pause();
|
||||
_webSurface->disconnect(_connection);
|
||||
// The lifetime of the QML surface MUST be managed by the main thread
|
||||
// Additionally, we MUST use local variables copied by value, rather than
|
||||
// member variables, since they would implicitly refer to a this that
|
||||
// is no longer valid
|
||||
auto webSurface = _webSurface;
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface] {
|
||||
webSurface->deleteLater();
|
||||
});
|
||||
}
|
||||
|
||||
QObject::disconnect(_mousePressConnection);
|
||||
QObject::disconnect(_mouseReleaseConnection);
|
||||
QObject::disconnect(_mouseMoveConnection);
|
||||
QObject::disconnect(_hoverLeaveConnection);
|
||||
destroyWebSurface();
|
||||
qDebug() << "Destroyed web entity " << getID();
|
||||
}
|
||||
|
||||
bool RenderableWebEntityItem::buildWebSurface(EntityTreeRenderer* renderer) {
|
||||
if (_currentWebCount >= MAX_CONCURRENT_WEB_VIEWS) {
|
||||
qWarning() << "Too many concurrent web views to create new view";
|
||||
return false;
|
||||
}
|
||||
|
||||
qDebug() << "Building web surface";
|
||||
++_currentWebCount;
|
||||
// Save the original GL context, because creating a QML surface will create a new context
|
||||
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
|
||||
QSurface * currentSurface = currentContext->surface();
|
||||
_webSurface = new OffscreenQmlSurface();
|
||||
_webSurface->create(currentContext);
|
||||
_webSurface->setBaseUrl(QUrl::fromLocalFile(PathUtils::resourcesPath() + "/qml/"));
|
||||
_webSurface->load("WebEntity.qml");
|
||||
_webSurface->resume();
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
_connection = QObject::connect(_webSurface, &OffscreenQmlSurface::textureUpdated, [&](GLuint textureId) {
|
||||
_texture = textureId;
|
||||
});
|
||||
// Restore the original GL context
|
||||
currentContext->makeCurrent(currentSurface);
|
||||
|
||||
auto forwardMouseEvent = [=](const RayToEntityIntersectionResult& intersection, const QMouseEvent* event) {
|
||||
// Ignore mouse interaction if we're locked
|
||||
if (this->getLocked()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event->button() == Qt::MouseButton::RightButton) {
|
||||
if (event->type() == QEvent::MouseButtonPress) {
|
||||
const QMouseEvent* mouseEvent = static_cast<const QMouseEvent*>(event);
|
||||
_lastPress = toGlm(mouseEvent->pos());
|
||||
}
|
||||
}
|
||||
|
||||
if (intersection.entityID == getID()) {
|
||||
if (event->button() == Qt::MouseButton::RightButton) {
|
||||
if (event->type() == QEvent::MouseButtonRelease) {
|
||||
const QMouseEvent* mouseEvent = static_cast<const QMouseEvent*>(event);
|
||||
ivec2 dist = glm::abs(toGlm(mouseEvent->pos()) - _lastPress);
|
||||
if (!glm::any(glm::greaterThan(dist, ivec2(1)))) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
QMetaObject::invokeMethod(_webSurface->getRootItem(), "goBack");
|
||||
});
|
||||
}
|
||||
_lastPress = ivec2(INT_MIN);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// FIXME doesn't work... double click events not received
|
||||
if (event->type() == QEvent::MouseButtonDblClick) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
});
|
||||
}
|
||||
|
||||
if (event->button() == Qt::MouseButton::MiddleButton) {
|
||||
if (event->type() == QEvent::MouseButtonRelease) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Map the intersection point to an actual offscreen pixel
|
||||
glm::vec3 point = intersection.intersection;
|
||||
point -= getPosition();
|
||||
point = glm::inverse(getRotation()) * point;
|
||||
point /= getDimensions();
|
||||
point += 0.5f;
|
||||
point.y = 1.0f - point.y;
|
||||
point *= getDimensions() * METERS_TO_INCHES * DPI;
|
||||
|
||||
if (event->button() == Qt::MouseButton::LeftButton) {
|
||||
if (event->type() == QEvent::MouseButtonPress) {
|
||||
this->_pressed = true;
|
||||
this->_lastMove = ivec2((int)point.x, (int)point.y);
|
||||
} else if (event->type() == QEvent::MouseButtonRelease) {
|
||||
this->_pressed = false;
|
||||
}
|
||||
}
|
||||
if (event->type() == QEvent::MouseMove) {
|
||||
this->_lastMove = ivec2((int)point.x, (int)point.y);
|
||||
}
|
||||
|
||||
// Forward the mouse event.
|
||||
QMouseEvent mappedEvent(event->type(),
|
||||
QPoint((int)point.x, (int)point.y),
|
||||
event->screenPos(), event->button(),
|
||||
event->buttons(), event->modifiers());
|
||||
QCoreApplication::sendEvent(_webSurface->getWindow(), &mappedEvent);
|
||||
}
|
||||
};
|
||||
_mousePressConnection = QObject::connect(renderer, &EntityTreeRenderer::mousePressOnEntity, forwardMouseEvent);
|
||||
_mouseReleaseConnection = QObject::connect(renderer, &EntityTreeRenderer::mouseReleaseOnEntity, forwardMouseEvent);
|
||||
_mouseMoveConnection = QObject::connect(renderer, &EntityTreeRenderer::mouseMoveOnEntity, forwardMouseEvent);
|
||||
_hoverLeaveConnection = QObject::connect(renderer, &EntityTreeRenderer::hoverLeaveEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
|
||||
if (this->_pressed && this->getID() == entityItemID) {
|
||||
// If the user mouses off the entity while the button is down, simulate a mouse release
|
||||
QMouseEvent mappedEvent(QEvent::MouseButtonRelease,
|
||||
QPoint(_lastMove.x, _lastMove.y),
|
||||
Qt::MouseButton::LeftButton,
|
||||
Qt::MouseButtons(), Qt::KeyboardModifiers());
|
||||
QCoreApplication::sendEvent(_webSurface->getWindow(), &mappedEvent);
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
void RenderableWebEntityItem::render(RenderArgs* args) {
|
||||
|
||||
#ifdef WANT_EXTRA_DEBUGGING
|
||||
{
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
|
@ -72,116 +172,19 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
|
|||
}
|
||||
#endif
|
||||
|
||||
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
|
||||
QSurface * currentSurface = currentContext->surface();
|
||||
if (!_webSurface) {
|
||||
_webSurface = new OffscreenQmlSurface();
|
||||
_webSurface->create(currentContext);
|
||||
_webSurface->setBaseUrl(QUrl::fromLocalFile(PathUtils::resourcesPath() + "/qml/"));
|
||||
_webSurface->load("WebEntity.qml");
|
||||
_webSurface->resume();
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
_connection = QObject::connect(_webSurface, &OffscreenQmlSurface::textureUpdated, [&](GLuint textureId) {
|
||||
_texture = textureId;
|
||||
});
|
||||
|
||||
auto forwardMouseEvent = [=](const RayToEntityIntersectionResult& intersection, const QMouseEvent* event) {
|
||||
// Ignore mouse interaction if we're locked
|
||||
if (this->getLocked()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event->button() == Qt::MouseButton::RightButton) {
|
||||
if (event->type() == QEvent::MouseButtonPress) {
|
||||
const QMouseEvent* mouseEvent = static_cast<const QMouseEvent*>(event);
|
||||
_lastPress = toGlm(mouseEvent->pos());
|
||||
}
|
||||
}
|
||||
|
||||
if (intersection.entityID == getID()) {
|
||||
if (event->button() == Qt::MouseButton::RightButton) {
|
||||
if (event->type() == QEvent::MouseButtonRelease) {
|
||||
const QMouseEvent* mouseEvent = static_cast<const QMouseEvent*>(event);
|
||||
ivec2 dist = glm::abs(toGlm(mouseEvent->pos()) - _lastPress);
|
||||
if (!glm::any(glm::greaterThan(dist, ivec2(1)))) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
QMetaObject::invokeMethod(_webSurface->getRootItem(), "goBack");
|
||||
});
|
||||
}
|
||||
_lastPress = ivec2(INT_MIN);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// FIXME doesn't work... double click events not received
|
||||
if (event->type() == QEvent::MouseButtonDblClick) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
});
|
||||
}
|
||||
|
||||
if (event->button() == Qt::MouseButton::MiddleButton) {
|
||||
if (event->type() == QEvent::MouseButtonRelease) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Map the intersection point to an actual offscreen pixel
|
||||
glm::vec3 point = intersection.intersection;
|
||||
point -= getPosition();
|
||||
point = glm::inverse(getRotation()) * point;
|
||||
point /= getDimensions();
|
||||
point += 0.5f;
|
||||
point.y = 1.0f - point.y;
|
||||
point *= getDimensions() * METERS_TO_INCHES * DPI;
|
||||
|
||||
if (event->button() == Qt::MouseButton::LeftButton) {
|
||||
if (event->type() == QEvent::MouseButtonPress) {
|
||||
this->_pressed = true;
|
||||
this->_lastMove = ivec2((int)point.x, (int)point.y);
|
||||
} else if (event->type() == QEvent::MouseButtonRelease) {
|
||||
this->_pressed = false;
|
||||
}
|
||||
}
|
||||
if (event->type() == QEvent::MouseMove) {
|
||||
this->_lastMove = ivec2((int)point.x, (int)point.y);
|
||||
}
|
||||
|
||||
// Forward the mouse event.
|
||||
QMouseEvent mappedEvent(event->type(),
|
||||
QPoint((int)point.x, (int)point.y),
|
||||
event->screenPos(), event->button(),
|
||||
event->buttons(), event->modifiers());
|
||||
QCoreApplication::sendEvent(_webSurface->getWindow(), &mappedEvent);
|
||||
}
|
||||
};
|
||||
|
||||
EntityTreeRenderer* renderer = static_cast<EntityTreeRenderer*>(args->_renderer);
|
||||
_mousePressConnection = QObject::connect(renderer, &EntityTreeRenderer::mousePressOnEntity, forwardMouseEvent);
|
||||
_mouseReleaseConnection = QObject::connect(renderer, &EntityTreeRenderer::mouseReleaseOnEntity, forwardMouseEvent);
|
||||
_mouseMoveConnection = QObject::connect(renderer, &EntityTreeRenderer::mouseMoveOnEntity, forwardMouseEvent);
|
||||
_hoverLeaveConnection = QObject::connect(renderer, &EntityTreeRenderer::hoverLeaveEntity, [=](const EntityItemID& entityItemID, const MouseEvent& event) {
|
||||
if (this->_pressed && this->getID() == entityItemID) {
|
||||
// If the user mouses off the entity while the button is down, simulate a mouse release
|
||||
QMouseEvent mappedEvent(QEvent::MouseButtonRelease,
|
||||
QPoint(_lastMove.x, _lastMove.y),
|
||||
Qt::MouseButton::LeftButton,
|
||||
Qt::MouseButtons(), Qt::KeyboardModifiers());
|
||||
QCoreApplication::sendEvent(_webSurface->getWindow(), &mappedEvent);
|
||||
}
|
||||
});
|
||||
if (!buildWebSurface(static_cast<EntityTreeRenderer*>(args->_renderer))) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
_lastRenderTime = usecTimestampNow();
|
||||
glm::vec2 dims = glm::vec2(getDimensions());
|
||||
dims *= METERS_TO_INCHES * DPI;
|
||||
// The offscreen surface is idempotent for resizes (bails early
|
||||
// if it's a no-op), so it's safe to just call resize every frame
|
||||
// without worrying about excessive overhead.
|
||||
_webSurface->resize(QSize(dims.x, dims.y));
|
||||
currentContext->makeCurrent(currentSurface);
|
||||
|
||||
PerformanceTimer perfTimer("RenderableWebEntityItem::render");
|
||||
Q_ASSERT(getType() == EntityTypes::Web);
|
||||
|
@ -223,3 +226,37 @@ void RenderableWebEntityItem::setProxyWindow(QWindow* proxyWindow) {
|
|||
QObject* RenderableWebEntityItem::getEventHandler() {
|
||||
return _webSurface->getEventHandler();
|
||||
}
|
||||
|
||||
void RenderableWebEntityItem::destroyWebSurface() {
|
||||
if (_webSurface) {
|
||||
--_currentWebCount;
|
||||
_webSurface->pause();
|
||||
_webSurface->disconnect(_connection);
|
||||
QObject::disconnect(_mousePressConnection);
|
||||
_mousePressConnection = QMetaObject::Connection();
|
||||
QObject::disconnect(_mouseReleaseConnection);
|
||||
_mouseReleaseConnection = QMetaObject::Connection();
|
||||
QObject::disconnect(_mouseMoveConnection);
|
||||
_mouseMoveConnection = QMetaObject::Connection();
|
||||
QObject::disconnect(_hoverLeaveConnection);
|
||||
_hoverLeaveConnection = QMetaObject::Connection();
|
||||
|
||||
// The lifetime of the QML surface MUST be managed by the main thread
|
||||
// Additionally, we MUST use local variables copied by value, rather than
|
||||
// member variables, since they would implicitly refer to a this that
|
||||
// is no longer valid
|
||||
auto webSurface = _webSurface;
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface] {
|
||||
webSurface->deleteLater();
|
||||
});
|
||||
_webSurface = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void RenderableWebEntityItem::update(const quint64& now) {
|
||||
auto interval = now - _lastRenderTime;
|
||||
if (interval > MAX_NO_RENDER_INTERVAL) {
|
||||
destroyWebSurface();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
class OffscreenQmlSurface;
|
||||
class QWindow;
|
||||
class QObject;
|
||||
class EntityTreeRenderer;
|
||||
|
||||
class RenderableWebEntityItem : public WebEntityItem {
|
||||
public:
|
||||
|
@ -31,15 +32,22 @@ public:
|
|||
void setProxyWindow(QWindow* proxyWindow);
|
||||
QObject* getEventHandler();
|
||||
|
||||
void update(const quint64& now) override;
|
||||
bool needsToCallUpdate() const { return _webSurface != nullptr; }
|
||||
|
||||
SIMPLE_RENDERABLE();
|
||||
|
||||
private:
|
||||
bool buildWebSurface(EntityTreeRenderer* renderer);
|
||||
void destroyWebSurface();
|
||||
|
||||
OffscreenQmlSurface* _webSurface{ nullptr };
|
||||
QMetaObject::Connection _connection;
|
||||
uint32_t _texture{ 0 };
|
||||
ivec2 _lastPress{ INT_MIN };
|
||||
bool _pressed{ false };
|
||||
ivec2 _lastMove{ INT_MIN };
|
||||
uint64_t _lastRenderTime{ 0 };
|
||||
|
||||
QMetaObject::Connection _mousePressConnection;
|
||||
QMetaObject::Connection _mouseReleaseConnection;
|
||||
|
|
|
@ -103,6 +103,7 @@ EntityItemPointer ParticleEffectEntityItem::factory(const EntityItemID& entityID
|
|||
// our non-pure virtual subclass for now...
|
||||
ParticleEffectEntityItem::ParticleEffectEntityItem(const EntityItemID& entityItemID) :
|
||||
EntityItem(entityItemID),
|
||||
_previousPosition(getPosition()),
|
||||
_lastSimulated(usecTimestampNow())
|
||||
{
|
||||
_type = EntityTypes::ParticleEffect;
|
||||
|
@ -623,7 +624,8 @@ void ParticleEffectEntityItem::stepSimulation(float deltaTime) {
|
|||
}
|
||||
|
||||
// emit a new particle at tail index.
|
||||
_particles.push_back(createParticle());
|
||||
_particles.push_back(createParticle(glm::mix(_previousPosition, getPosition(),
|
||||
(deltaTime - timeLeftInFrame) / deltaTime)));
|
||||
auto particle = _particles.back();
|
||||
particle.lifetime += timeLeftInFrame;
|
||||
|
||||
|
@ -637,15 +639,16 @@ void ParticleEffectEntityItem::stepSimulation(float deltaTime) {
|
|||
|
||||
_timeUntilNextEmit -= timeLeftInFrame;
|
||||
}
|
||||
_previousPosition = getPosition();
|
||||
}
|
||||
|
||||
ParticleEffectEntityItem::Particle ParticleEffectEntityItem::createParticle() {
|
||||
ParticleEffectEntityItem::Particle ParticleEffectEntityItem::createParticle(const glm::vec3& position) {
|
||||
Particle particle;
|
||||
|
||||
|
||||
particle.seed = randFloatInRange(-1.0f, 1.0f);
|
||||
if (getEmitterShouldTrail()) {
|
||||
particle.position = getPosition();
|
||||
particle.position = position;
|
||||
}
|
||||
// Position, velocity, and acceleration
|
||||
if (_polarStart == 0.0f && _polarFinish == 0.0f && _emitDimensions.z == 0.0f) {
|
||||
|
|
|
@ -227,7 +227,7 @@ protected:
|
|||
|
||||
bool isAnimatingSomething() const;
|
||||
|
||||
Particle createParticle();
|
||||
Particle createParticle(const glm::vec3& position);
|
||||
void stepSimulation(float deltaTime);
|
||||
void integrateParticle(Particle& particle, float deltaTime);
|
||||
|
||||
|
@ -275,7 +275,7 @@ protected:
|
|||
float _azimuthStart = DEFAULT_AZIMUTH_START;
|
||||
float _azimuthFinish = DEFAULT_AZIMUTH_FINISH;
|
||||
|
||||
|
||||
glm::vec3 _previousPosition;
|
||||
quint64 _lastSimulated { 0 };
|
||||
bool _isEmitting { true };
|
||||
|
||||
|
|
|
@ -175,6 +175,7 @@ private:
|
|||
doneCurrent();
|
||||
|
||||
getContextObject()->moveToThread(QCoreApplication::instance()->thread());
|
||||
_thread.quit();
|
||||
_cond.wakeOne();
|
||||
}
|
||||
|
||||
|
@ -228,7 +229,7 @@ private:
|
|||
|
||||
_quickWindow->setRenderTarget(GetName(*_fbo), QSize(_size.x, _size.y));
|
||||
|
||||
{
|
||||
try {
|
||||
PROFILE_RANGE("qml_render")
|
||||
TexturePtr texture = _textures.getNextTexture();
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
|
@ -245,8 +246,10 @@ private:
|
|||
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
|
||||
_quickWindow->resetOpenGLState();
|
||||
_escrow.submit(GetName(*texture));
|
||||
_lastRenderTime = usecTimestampNow();
|
||||
} catch (std::runtime_error& error) {
|
||||
qWarning() << "Failed to render QML " << error.what();
|
||||
}
|
||||
_lastRenderTime = usecTimestampNow();
|
||||
}
|
||||
|
||||
void aboutToQuit() {
|
||||
|
@ -321,7 +324,7 @@ OffscreenQmlSurface::~OffscreenQmlSurface() {
|
|||
|
||||
void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
|
||||
_renderer = new OffscreenQmlRenderer(this, shareContext);
|
||||
|
||||
_renderer->_renderControl->_renderWindow = _proxyWindow;
|
||||
// Create a QML engine.
|
||||
_qmlEngine = new QQmlEngine;
|
||||
if (!_qmlEngine->incubationController()) {
|
||||
|
@ -610,7 +613,10 @@ bool OffscreenQmlSurface::isPaused() const {
|
|||
}
|
||||
|
||||
void OffscreenQmlSurface::setProxyWindow(QWindow* window) {
|
||||
_renderer->_renderControl->_renderWindow = window;
|
||||
_proxyWindow = window;
|
||||
if (_renderer && _renderer->_renderControl) {
|
||||
_renderer->_renderControl->_renderWindow = window;
|
||||
}
|
||||
}
|
||||
|
||||
QObject* OffscreenQmlSurface::getEventHandler() {
|
||||
|
|
|
@ -95,7 +95,7 @@ private:
|
|||
bool _paused{ true };
|
||||
uint8_t _maxFps{ 60 };
|
||||
MouseTranslator _mouseTranslator{ [](const QPointF& p) { return p.toPoint(); } };
|
||||
|
||||
QWindow* _proxyWindow { nullptr };
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -78,6 +78,8 @@ public:
|
|||
|
||||
glm::vec3 getLinearVelocity() const;
|
||||
|
||||
float getCapsuleRadius() const { return _radius; }
|
||||
|
||||
enum class State {
|
||||
Ground = 0,
|
||||
Takeoff,
|
||||
|
|
|
@ -36,7 +36,7 @@ public:
|
|||
|
||||
virtual QThread* getMainThread() = 0;
|
||||
|
||||
virtual PickRay computePickRay(float x, float y) = 0;
|
||||
virtual PickRay computePickRay(float x, float y) const = 0;
|
||||
|
||||
virtual glm::vec3 getAvatarPosition() const = 0;
|
||||
|
||||
|
|
|
@ -92,15 +92,15 @@ void Model::setScale(const glm::vec3& scale) {
|
|||
_scaledToFit = false;
|
||||
}
|
||||
|
||||
const float METERS_PER_MILLIMETER = 0.01f;
|
||||
const float SCALE_CHANGE_EPSILON = 0.01f;
|
||||
|
||||
void Model::setScaleInternal(const glm::vec3& scale) {
|
||||
if (glm::distance(_scale, scale) > METERS_PER_MILLIMETER) {
|
||||
if (glm::distance(_scale, scale) > SCALE_CHANGE_EPSILON) {
|
||||
_scale = scale;
|
||||
if (_scale.x == 0.0f || _scale.y == 0.0f || _scale.z == 0.0f) {
|
||||
assert(false);
|
||||
}
|
||||
initJointTransforms();
|
||||
simulate(0.0f, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -45,8 +45,11 @@ ScriptAudioInjector* AudioScriptingInterface::playSound(Sound* sound, const Audi
|
|||
// stereo option isn't set from script, this comes from sound metadata or filename
|
||||
AudioInjectorOptions optionsCopy = injectorOptions;
|
||||
optionsCopy.stereo = sound->isStereo();
|
||||
|
||||
return new ScriptAudioInjector(AudioInjector::playSound(sound->getByteArray(), optionsCopy, _localAudioInterface));
|
||||
auto injector = AudioInjector::playSound(sound->getByteArray(), optionsCopy, _localAudioInterface);
|
||||
if (!injector) {
|
||||
return NULL;
|
||||
}
|
||||
return new ScriptAudioInjector(injector);
|
||||
|
||||
} else {
|
||||
qCDebug(scriptengine) << "AudioScriptingInterface::playSound called with null Sound object.";
|
||||
|
|
|
@ -30,7 +30,6 @@ const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
|||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
|
||||
static vr::IVRCompositor* _compositor{ nullptr };
|
||||
static vr::TrackedDevicePose_t _presentThreadTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
|
||||
static mat4 _sensorResetMat;
|
||||
|
@ -43,12 +42,12 @@ bool OpenVrDisplayPlugin::isSupported() const {
|
|||
void OpenVrDisplayPlugin::activate() {
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
if (!_hmd) {
|
||||
_hmd = acquireOpenVrSystem();
|
||||
if (!_system) {
|
||||
_system = acquireOpenVrSystem();
|
||||
}
|
||||
Q_ASSERT(_hmd);
|
||||
Q_ASSERT(_system);
|
||||
|
||||
_hmd->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
|
||||
_system->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
|
||||
// Recommended render target size is per-eye, so double the X size for
|
||||
// left + right eyes
|
||||
_renderTargetSize.x *= 2;
|
||||
|
@ -56,8 +55,8 @@ void OpenVrDisplayPlugin::activate() {
|
|||
{
|
||||
Lock lock(_poseMutex);
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
_eyeOffsets[eye] = toGlm(_hmd->GetEyeToHeadTransform(eye));
|
||||
_eyeProjections[eye] = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
_eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
|
||||
_eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
});
|
||||
// FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
|
||||
_cullingProjection = _eyeProjections[0];
|
||||
|
@ -71,9 +70,9 @@ void OpenVrDisplayPlugin::activate() {
|
|||
|
||||
void OpenVrDisplayPlugin::deactivate() {
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, false);
|
||||
if (_hmd) {
|
||||
if (_system) {
|
||||
releaseOpenVrSystem();
|
||||
_hmd = nullptr;
|
||||
_system = nullptr;
|
||||
}
|
||||
_compositor = nullptr;
|
||||
HmdDisplayPlugin::deactivate();
|
||||
|
@ -96,9 +95,31 @@ void OpenVrDisplayPlugin::resetSensors() {
|
|||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
|
||||
}
|
||||
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
|
||||
Lock lock(_poseMutex);
|
||||
|
||||
float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
|
||||
float frameDuration = 1.f / displayFrequency;
|
||||
float vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
|
||||
|
||||
#if THREADED_PRESENT
|
||||
// TODO: this seems awfuly long, 44ms total, but it produced the best results.
|
||||
const float NUM_PREDICTION_FRAMES = 3.0f;
|
||||
float predictedSecondsFromNow = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
|
||||
#else
|
||||
uint64_t frameCounter;
|
||||
float timeSinceLastVsync;
|
||||
_system->GetTimeSinceLastVsync(&timeSinceLastVsync, &frameCounter);
|
||||
float predictedSecondsFromNow = 3.0f * frameDuration - timeSinceLastVsync + vsyncToPhotons;
|
||||
#endif
|
||||
|
||||
vr::TrackedDevicePose_t predictedTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseSeated, predictedSecondsFromNow, predictedTrackedDevicePose, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
// copy and process predictedTrackedDevicePoses
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePose[i] = predictedTrackedDevicePose[i];
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
}
|
||||
return _trackedDevicePoseMat4[0];
|
||||
}
|
||||
|
||||
|
@ -112,18 +133,8 @@ void OpenVrDisplayPlugin::internalPresent() {
|
|||
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
|
||||
_compositor->Submit(vr::Eye_Right, &texture, &rightBounds);
|
||||
|
||||
glFinish();
|
||||
|
||||
_compositor->WaitGetPoses(_presentThreadTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
|
||||
|
||||
{
|
||||
// copy and process _presentThreadTrackedDevicePoses
|
||||
Lock lock(_poseMutex);
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePose[i] = _presentThreadTrackedDevicePose[i];
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
}
|
||||
}
|
||||
vr::TrackedDevicePose_t currentTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
_compositor->WaitGetPoses(currentTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
|
||||
|
||||
// Handle the mirroring in the base class
|
||||
HmdDisplayPlugin::internalPresent();
|
||||
|
|
|
@ -35,7 +35,7 @@ protected:
|
|||
virtual void internalPresent() override;
|
||||
|
||||
private:
|
||||
vr::IVRSystem* _hmd { nullptr };
|
||||
vr::IVRSystem* _system { nullptr };
|
||||
static const QString NAME;
|
||||
mutable Mutex _poseMutex;
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue