Merge branch 'master' of github.com:highfidelity/hifi into feat/geocaching
|
@ -1484,10 +1484,10 @@ PropertiesTool = function(opts) {
|
|||
selections.push(entity);
|
||||
}
|
||||
data.selections = selections;
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
});
|
||||
|
||||
webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
webView.webEventReceived.connect(function(data) {
|
||||
data = JSON.parse(data);
|
||||
if (data.type == "print") {
|
||||
if (data.message) {
|
||||
|
@ -1802,7 +1802,7 @@ var showMenuItem = propertyMenu.addMenuItem("Show in Marketplace");
|
|||
propertiesTool = PropertiesTool();
|
||||
var particleExplorerTool = ParticleExplorerTool();
|
||||
var selectedParticleEntity = 0;
|
||||
entityListTool.webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
entityListTool.webView.webEventReceived.connect(function(data) {
|
||||
var data = JSON.parse(data);
|
||||
if (data.type == "selectionUpdate") {
|
||||
var ids = data.entityIds;
|
||||
|
@ -1823,10 +1823,10 @@ entityListTool.webView.eventBridge.webEventReceived.connect(function(data) {
|
|||
selectedParticleEntity = ids[0];
|
||||
particleExplorerTool.setActiveParticleEntity(ids[0]);
|
||||
|
||||
particleExplorerTool.webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
particleExplorerTool.webView.webEventReceived.connect(function(data) {
|
||||
var data = JSON.parse(data);
|
||||
if (data.messageType === "page_loaded") {
|
||||
particleExplorerTool.webView.eventBridge.emitScriptEvent(JSON.stringify(particleData));
|
||||
particleExplorerTool.webView.emitScriptEvent(JSON.stringify(particleData));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,96 @@
|
|||
(function() {
|
||||
Script.include("../../libraries/virtualBaton.js");
|
||||
Script.include("../../libraries/utils.js");
|
||||
|
||||
var _this = this;
|
||||
|
||||
|
||||
this.startUpdate = function() {
|
||||
print("EBL START UPDATE");
|
||||
Entities.editEntity(_this.batonOwnerIndicator, {
|
||||
visible: true
|
||||
});
|
||||
|
||||
// Change color of box
|
||||
Entities.editEntity(_this.entityID, {
|
||||
color: randomColor()
|
||||
});
|
||||
|
||||
_this.position = Entities.getEntityProperties(_this.entityID, "position").position;
|
||||
_this.debugLightProperties.position = Vec3.sum(_this.position, {x: 0, y: 1, z: 0});
|
||||
_this.debugLightProperties.color = randomColor();
|
||||
var debugLight = Entities.addEntity(_this.debugLightProperties);
|
||||
Script.setTimeout(function() {
|
||||
Entities.deleteEntity(debugLight);
|
||||
}, 500);
|
||||
|
||||
}
|
||||
|
||||
this.maybeClaim = function() {
|
||||
print("EBL MAYBE CLAIM");
|
||||
if (_this.isBatonOwner === true) {
|
||||
_this.isBatonOwner = false;
|
||||
}
|
||||
Entities.editEntity(_this.batonOwnerIndicator, {
|
||||
visible: false
|
||||
});
|
||||
baton.claim(_this.startUpdate, _this.maybeClaim);
|
||||
}
|
||||
|
||||
this.unload = function() {
|
||||
print("EBL UNLOAD");
|
||||
baton.unload();
|
||||
Entities.deleteEntity(_this.batonOwnerIndicator);
|
||||
}
|
||||
|
||||
|
||||
this.preload = function(entityID) {
|
||||
print("EBL Preload!!");
|
||||
_this.entityID = entityID;
|
||||
_this.setupDebugEntities();
|
||||
|
||||
baton = virtualBaton({
|
||||
batonName: "batonSimpleEntityScript:" + _this.entityID
|
||||
});
|
||||
_this.isBatonOwner = false;
|
||||
_this.maybeClaim();
|
||||
|
||||
}
|
||||
|
||||
|
||||
this.setupDebugEntities = function() {
|
||||
_this.batonOwnerIndicator = Entities.addEntity({
|
||||
type: "Box",
|
||||
color: {
|
||||
red: 200,
|
||||
green: 10,
|
||||
blue: 200
|
||||
},
|
||||
position: Vec3.sum(MyAvatar.position, {
|
||||
x: 0,
|
||||
y: 1,
|
||||
z: 0
|
||||
}),
|
||||
dimensions: {
|
||||
x: 0.5,
|
||||
y: 1,
|
||||
z: 0
|
||||
},
|
||||
parentID: MyAvatar.sessionUUID,
|
||||
visible: false
|
||||
});
|
||||
}
|
||||
|
||||
_this.debugLightProperties = {
|
||||
type: "Light",
|
||||
name: "hifi-baton-light",
|
||||
dimensions: {
|
||||
x: 10,
|
||||
y: 10,
|
||||
z: 10
|
||||
},
|
||||
falloffRadius: 3,
|
||||
intensity: 20,
|
||||
}
|
||||
|
||||
});
|
|
@ -0,0 +1,33 @@
|
|||
var orientation = Camera.getOrientation();
|
||||
orientation = Quat.safeEulerAngles(orientation);
|
||||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
|
||||
|
||||
// Math.random ensures no caching of script
|
||||
var SCRIPT_URL = Script.resolvePath("batonSimpleEntityScript.js");
|
||||
|
||||
var batonBox = Entities.addEntity({
|
||||
type: "Box",
|
||||
name: "hifi-baton-entity",
|
||||
color: {
|
||||
red: 200,
|
||||
green: 200,
|
||||
blue: 200
|
||||
},
|
||||
position: center,
|
||||
dimensions: {
|
||||
x: 0.1,
|
||||
y: 0.1,
|
||||
z: 0.1
|
||||
},
|
||||
script: SCRIPT_URL
|
||||
});
|
||||
|
||||
|
||||
|
||||
function cleanup() {
|
||||
Entities.deleteEntity(batonBox);
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
|
@ -9,51 +9,11 @@
|
|||
//
|
||||
|
||||
var EventBridge;
|
||||
|
||||
EventBridgeConnectionProxy = function(parent) {
|
||||
this.parent = parent;
|
||||
this.realSignal = this.parent.realBridge.scriptEventReceived
|
||||
this.webWindowId = this.parent.webWindow.windowId;
|
||||
}
|
||||
|
||||
EventBridgeConnectionProxy.prototype.connect = function(callback) {
|
||||
var that = this;
|
||||
this.realSignal.connect(function(id, message) {
|
||||
if (id === that.webWindowId) { callback(message); }
|
||||
});
|
||||
}
|
||||
|
||||
EventBridgeProxy = function(webWindow) {
|
||||
this.webWindow = webWindow;
|
||||
this.realBridge = this.webWindow.eventBridge;
|
||||
this.scriptEventReceived = new EventBridgeConnectionProxy(this);
|
||||
}
|
||||
|
||||
EventBridgeProxy.prototype.emitWebEvent = function(data) {
|
||||
this.realBridge.emitWebEvent(data);
|
||||
}
|
||||
var WebChannel;
|
||||
|
||||
openEventBridge = function(callback) {
|
||||
EVENT_BRIDGE_URI = "ws://localhost:51016";
|
||||
socket = new WebSocket(this.EVENT_BRIDGE_URI);
|
||||
|
||||
socket.onclose = function() {
|
||||
console.error("web channel closed");
|
||||
};
|
||||
|
||||
socket.onerror = function(error) {
|
||||
console.error("web channel error: " + error);
|
||||
};
|
||||
|
||||
socket.onopen = function() {
|
||||
channel = new QWebChannel(socket, function(channel) {
|
||||
console.log("Document url is " + document.URL);
|
||||
var webWindow = channel.objects[document.URL.toLowerCase()];
|
||||
console.log("WebWindow is " + webWindow)
|
||||
eventBridgeProxy = new EventBridgeProxy(webWindow);
|
||||
EventBridge = eventBridgeProxy;
|
||||
if (callback) { callback(eventBridgeProxy); }
|
||||
});
|
||||
}
|
||||
WebChannel = new QWebChannel(qt.webChannelTransport, function (channel) {
|
||||
EventBridge = WebChannel.objects.eventBridgeWrapper.eventBridge;
|
||||
callback(EventBridge);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -4,21 +4,17 @@
|
|||
<script type="text/javascript" src="jquery-2.1.4.min.js"></script>
|
||||
<script type="text/javascript" src="qrc:///qtwebchannel/qwebchannel.js"></script>
|
||||
<script type="text/javascript" src="eventBridgeLoader.js"></script>
|
||||
|
||||
<script>
|
||||
var myBridge;
|
||||
|
||||
window.onload = function() {
|
||||
openEventBridge(function(eventBridge) {
|
||||
myBridge = eventBridge;
|
||||
myBridge.scriptEventReceived.connect(function(message) {
|
||||
openEventBridge(function() {
|
||||
EventBridge.scriptEventReceived.connect(function(message) {
|
||||
console.log("HTML side received message: " + message);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
testClick = function() {
|
||||
myBridge.emitWebEvent("HTML side sending message - button click");
|
||||
EventBridge.emitWebEvent(["Foo", "Bar", { "baz": 1} ]);
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
|
|
|
@ -38,14 +38,14 @@ EntityListTool = function(opts) {
|
|||
type: 'selectionUpdate',
|
||||
selectedIDs: selectedIDs,
|
||||
};
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
});
|
||||
|
||||
that.clearEntityList = function () {
|
||||
var data = {
|
||||
type: 'clearEntityList'
|
||||
}
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
};
|
||||
|
||||
that.sendUpdate = function() {
|
||||
|
@ -72,11 +72,11 @@ EntityListTool = function(opts) {
|
|||
entities: entities,
|
||||
selectedIDs: selectedIDs,
|
||||
};
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
}
|
||||
|
||||
|
||||
webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
webView.webEventReceived.connect(function(data) {
|
||||
data = JSON.parse(data);
|
||||
if (data.type == "selectionUpdate") {
|
||||
var ids = data.entityIds;
|
||||
|
|
|
@ -234,11 +234,11 @@ GridTool = function(opts) {
|
|||
});
|
||||
|
||||
horizontalGrid.addListener(function(data) {
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
selectionDisplay.updateHandles();
|
||||
});
|
||||
|
||||
webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
webView.webEventReceived.connect(function(data) {
|
||||
data = JSON.parse(data);
|
||||
if (data.type == "init") {
|
||||
horizontalGrid.emitUpdate();
|
||||
|
|
|
@ -28,7 +28,6 @@ colorMix = function(colorA, colorB, mix) {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
scaleLine = function (start, end, scale) {
|
||||
var v = Vec3.subtract(end, start);
|
||||
var length = Vec3.length(v);
|
||||
|
@ -262,6 +261,16 @@ randInt = function(low, high) {
|
|||
return Math.floor(randFloat(low, high));
|
||||
}
|
||||
|
||||
|
||||
randomColor = function() {
|
||||
return {
|
||||
red: randInt(0, 255),
|
||||
green: randInt(0, 255),
|
||||
blue: randInt(0, 255)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
hexToRgb = function(hex) {
|
||||
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
|
||||
return result ? {
|
||||
|
|
|
@ -26,7 +26,7 @@ ParticleExplorerTool = function() {
|
|||
});
|
||||
|
||||
that.webView.setVisible(true);
|
||||
that.webView.eventBridge.webEventReceived.connect(that.webEventReceived);
|
||||
that.webView.webEventReceived.connect(that.webEventReceived);
|
||||
}
|
||||
|
||||
|
||||
|
|
31
examples/tests/basicEntityTest/entitySpawner.js
Normal file
|
@ -0,0 +1,31 @@
|
|||
var orientation = Camera.getOrientation();
|
||||
orientation = Quat.safeEulerAngles(orientation);
|
||||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
|
||||
|
||||
// Math.random ensures no caching of script
|
||||
var SCRIPT_URL = Script.resolvePath("myEntityScript.js")
|
||||
|
||||
var myEntity = Entities.addEntity({
|
||||
type: "Sphere",
|
||||
color: {
|
||||
red: 200,
|
||||
green: 10,
|
||||
blue: 200
|
||||
},
|
||||
position: center,
|
||||
dimensions: {
|
||||
x: 1,
|
||||
y: 1,
|
||||
z: 1
|
||||
},
|
||||
script: SCRIPT_URL
|
||||
})
|
||||
|
||||
|
||||
function cleanup() {
|
||||
// Entities.deleteEntity(myEntity);
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
24
examples/tests/basicEntityTest/myEntityScript.js
Normal file
|
@ -0,0 +1,24 @@
|
|||
|
||||
(function() {
|
||||
var _this;
|
||||
MyEntity = function() {
|
||||
_this = this;
|
||||
|
||||
};
|
||||
|
||||
MyEntity.prototype = {
|
||||
|
||||
|
||||
preload: function(entityID) {
|
||||
this.entityID = entityID;
|
||||
var randNum = Math.random().toFixed(3);
|
||||
print("EBL PRELOAD ENTITY SCRIPT!!!", randNum)
|
||||
|
||||
},
|
||||
|
||||
|
||||
};
|
||||
|
||||
// entity scripts always need to return a newly constructed object of our type
|
||||
return new MyEntity();
|
||||
});
|
|
@ -0,0 +1,85 @@
|
|||
|
||||
(function() {
|
||||
Script.include("../../libraries/virtualBaton.js");
|
||||
|
||||
var baton;
|
||||
|
||||
var _this;
|
||||
BatonSoundEntity = function() {
|
||||
_this = this;
|
||||
_this.drumSound = SoundCache.getSound("https://s3.amazonaws.com/hifi-public/sounds/Drums/deepdrum1.wav");
|
||||
_this.injectorOptions = {position: MyAvatar.position, loop: false, volume: 1};
|
||||
_this.soundIntervalConnected = false;
|
||||
_this.batonDebugModel = Entities.addEntity({
|
||||
type: "Box",
|
||||
color: {red: 200, green: 10, blue: 200},
|
||||
position: Vec3.sum(MyAvatar.position, {x: 0, y: 1, z: 0}),
|
||||
dimensions: {x: 0.5, y: 1, z: 0},
|
||||
parentID: MyAvatar.sessionUUID,
|
||||
visible: false
|
||||
});
|
||||
};
|
||||
|
||||
function startUpdate() {
|
||||
// We are claiming the baton! So start our clip
|
||||
if (!_this.soundInjector) {
|
||||
// This client hasn't created their injector yet so create one
|
||||
_this.soundInjector = Audio.playSound(_this.drumSound, _this.injectorOptions);
|
||||
} else {
|
||||
// We already have our injector so just restart it
|
||||
_this.soundInjector.restart();
|
||||
}
|
||||
print("EBL START UPDATE");
|
||||
Entities.editEntity(_this.batonDebugModel, {visible: true});
|
||||
_this.playSoundInterval = Script.setInterval(function() {
|
||||
_this.soundInjector.restart();
|
||||
}, _this.drumSound.duration * 1000); // Duration is in seconds so convert to ms
|
||||
_this.soundIntervalConnected = true;
|
||||
}
|
||||
|
||||
function stopUpdateAndReclaim() {
|
||||
print("EBL STOP UPDATE AND RECLAIM")
|
||||
// when the baton is release
|
||||
if (_this.soundIntervalConnected === true) {
|
||||
Script.clearInterval(_this.playSoundInterval);
|
||||
_this.soundIntervalConnected = false;
|
||||
print("EBL CLEAR INTERVAL")
|
||||
}
|
||||
Entities.editEntity(_this.batonDebugModel, {visible: false});
|
||||
// hook up callbacks to the baton
|
||||
baton.claim(startUpdate, stopUpdateAndReclaim);
|
||||
}
|
||||
|
||||
BatonSoundEntity.prototype = {
|
||||
|
||||
|
||||
preload: function(entityID) {
|
||||
_this.entityID = entityID;
|
||||
print("EBL PRELOAD ENTITY SCRIPT!!!");
|
||||
baton = virtualBaton({
|
||||
// One winner for each entity
|
||||
batonName: "io.highfidelity.soundEntityBatonTest:" + _this.entityID,
|
||||
// debugFlow: true
|
||||
});
|
||||
stopUpdateAndReclaim();
|
||||
},
|
||||
|
||||
unload: function() {
|
||||
print("EBL UNLOAD");
|
||||
// baton.release();
|
||||
baton.unload();
|
||||
Entities.deleteEntity(_this.batonDebugModel);
|
||||
if (_this.soundIntervalConnected === true) {
|
||||
Script.clearInterval(_this.playSoundInterval);
|
||||
_this.soundIntervalConnected = false;
|
||||
_this.soundInjector.stop();
|
||||
delete _this.soundInjector;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
|
||||
// entity scripts always need to return a newly constructed object of our type
|
||||
return new BatonSoundEntity();
|
||||
});
|
|
@ -0,0 +1,31 @@
|
|||
var orientation = Camera.getOrientation();
|
||||
orientation = Quat.safeEulerAngles(orientation);
|
||||
orientation.x = 0;
|
||||
orientation = Quat.fromVec3Degrees(orientation);
|
||||
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
|
||||
|
||||
// Math.random ensures no caching of script
|
||||
var SCRIPT_URL = Script.resolvePath("batonSoundTestEntityScript.js")
|
||||
|
||||
var soundEntity = Entities.addEntity({
|
||||
type: "Box",
|
||||
color: {
|
||||
red: 200,
|
||||
green: 10,
|
||||
blue: 10
|
||||
},
|
||||
position: center,
|
||||
dimensions: {
|
||||
x: 0.1,
|
||||
y: 0.1,
|
||||
z: 0.1
|
||||
},
|
||||
script: SCRIPT_URL
|
||||
});
|
||||
|
||||
|
||||
function cleanup() {
|
||||
// Entities.deleteEntity(soundEntity);
|
||||
}
|
||||
|
||||
Script.scriptEnding.connect(cleanup);
|
|
@ -2,32 +2,18 @@ print("Launching web window");
|
|||
|
||||
var htmlUrl = Script.resolvePath("..//html/qmlWebTest.html")
|
||||
webWindow = new OverlayWebWindow('Test Event Bridge', htmlUrl, 320, 240, false);
|
||||
print("JS Side window: " + webWindow);
|
||||
print("JS Side bridge: " + webWindow.eventBridge);
|
||||
webWindow.eventBridge.webEventReceived.connect(function(data) {
|
||||
webWindow.webEventReceived.connect(function(data) {
|
||||
print("JS Side event received: " + data);
|
||||
});
|
||||
|
||||
var titles = ["A", "B", "C"];
|
||||
var titleIndex = 0;
|
||||
|
||||
Script.setInterval(function() {
|
||||
webWindow.eventBridge.emitScriptEvent("JS Event sent");
|
||||
var size = webWindow.size;
|
||||
var position = webWindow.position;
|
||||
print("Window url: " + webWindow.url)
|
||||
print("Window visible: " + webWindow.visible)
|
||||
print("Window size: " + size.x + "x" + size.y)
|
||||
print("Window pos: " + position.x + "x" + position.y)
|
||||
webWindow.setVisible(!webWindow.visible);
|
||||
webWindow.setTitle(titles[titleIndex]);
|
||||
webWindow.setSize(320 + Math.random() * 100, 240 + Math.random() * 100);
|
||||
titleIndex += 1;
|
||||
titleIndex %= titles.length;
|
||||
}, 2 * 1000);
|
||||
var message = [ Math.random(), Math.random() ];
|
||||
print("JS Side sending: " + message);
|
||||
webWindow.emitScriptEvent(message);
|
||||
}, 5 * 1000);
|
||||
|
||||
Script.setTimeout(function() {
|
||||
print("Closing script");
|
||||
Script.scriptEnding.connect(function(){
|
||||
webWindow.close();
|
||||
Script.stop();
|
||||
}, 15 * 1000)
|
||||
webWindow.deleteLater();
|
||||
});
|
||||
|
||||
|
|
186
examples/utilities/tools/render/PlotPerf.qml
Normal file
|
@ -0,0 +1,186 @@
|
|||
//
|
||||
// PlotPerf.qml
|
||||
// examples/utilities/tools/render
|
||||
//
|
||||
// Created by Sam Gateau on 3//2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
|
||||
Item {
|
||||
id: root
|
||||
width: parent.width
|
||||
height: 100
|
||||
property string title
|
||||
property var config
|
||||
property string parameters
|
||||
|
||||
// THis is my hack to get the name of the first property and assign it to a trigger var in order to get
|
||||
// a signal called whenever the value changed
|
||||
property var trigger: config[parameters.split(":")[3].split("-")[0]]
|
||||
|
||||
property var inputs: parameters.split(":")
|
||||
property var valueScale: +inputs[0]
|
||||
property var valueUnit: inputs[1]
|
||||
property var valueNumDigits: inputs[2]
|
||||
property var input_VALUE_OFFSET: 3
|
||||
property var valueMax : 1
|
||||
|
||||
property var _values : new Array()
|
||||
property var tick : 0
|
||||
|
||||
function createValues() {
|
||||
if (inputs.length > input_VALUE_OFFSET) {
|
||||
for (var i = input_VALUE_OFFSET; i < inputs.length; i++) {
|
||||
var varProps = inputs[i].split("-")
|
||||
_values.push( {
|
||||
value: varProps[0],
|
||||
valueMax: 1,
|
||||
numSamplesConstantMax: 0,
|
||||
valueHistory: new Array(),
|
||||
label: varProps[1],
|
||||
color: varProps[2],
|
||||
scale: (varProps.length > 3 ? varProps[3] : 1),
|
||||
unit: (varProps.length > 4 ? varProps[4] : valueUnit)
|
||||
})
|
||||
}
|
||||
}
|
||||
print("in creator" + JSON.stringify(_values));
|
||||
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
createValues();
|
||||
print(JSON.stringify(_values));
|
||||
|
||||
}
|
||||
|
||||
function pullFreshValues() {
|
||||
//print("pullFreshValues");
|
||||
var VALUE_HISTORY_SIZE = 100;
|
||||
var UPDATE_CANVAS_RATE = 20;
|
||||
tick++;
|
||||
|
||||
|
||||
var currentValueMax = 0
|
||||
for (var i = 0; i < _values.length; i++) {
|
||||
|
||||
var currentVal = config[_values[i].value] * _values[i].scale;
|
||||
_values[i].valueHistory.push(currentVal)
|
||||
_values[i].numSamplesConstantMax++;
|
||||
|
||||
if (_values[i].valueHistory.length > VALUE_HISTORY_SIZE) {
|
||||
var lostValue = _values[i].valueHistory.shift();
|
||||
if (lostValue >= _values[i].valueMax) {
|
||||
_values[i].valueMax *= 0.99
|
||||
_values[i].numSamplesConstantMax = 0
|
||||
}
|
||||
}
|
||||
|
||||
if (_values[i].valueMax < currentVal) {
|
||||
_values[i].valueMax = currentVal;
|
||||
_values[i].numSamplesConstantMax = 0
|
||||
}
|
||||
|
||||
if (_values[i].numSamplesConstantMax > VALUE_HISTORY_SIZE) {
|
||||
_values[i].numSamplesConstantMax = 0
|
||||
_values[i].valueMax *= 0.95 // lower slowly the current max if no new above max since a while
|
||||
}
|
||||
|
||||
if (currentValueMax < _values[i].valueMax) {
|
||||
currentValueMax = _values[i].valueMax
|
||||
}
|
||||
}
|
||||
|
||||
if ((valueMax < currentValueMax) || (tick % VALUE_HISTORY_SIZE == 0)) {
|
||||
valueMax = currentValueMax;
|
||||
}
|
||||
|
||||
if (tick % UPDATE_CANVAS_RATE == 0) {
|
||||
mycanvas.requestPaint()
|
||||
}
|
||||
}
|
||||
onTriggerChanged: pullFreshValues()
|
||||
|
||||
Canvas {
|
||||
id: mycanvas
|
||||
anchors.fill:parent
|
||||
onPaint: {
|
||||
var lineHeight = 12;
|
||||
|
||||
function displayValue(val, unit) {
|
||||
return (val / root.valueScale).toFixed(root.valueNumDigits) + " " + unit
|
||||
}
|
||||
|
||||
function pixelFromVal(val, valScale) {
|
||||
return lineHeight + (height - lineHeight) * (1 - (0.9) * val / valueMax);
|
||||
}
|
||||
function valueFromPixel(pixY) {
|
||||
return ((pixY - lineHeight) / (height - lineHeight) - 1) * valueMax / (-0.9);
|
||||
}
|
||||
function plotValueHistory(ctx, valHistory, color) {
|
||||
var widthStep= width / (valHistory.length - 1);
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.strokeStyle= color; // Green path
|
||||
ctx.lineWidth="2";
|
||||
ctx.moveTo(0, pixelFromVal(valHistory[0]));
|
||||
|
||||
for (var i = 1; i < valHistory.length; i++) {
|
||||
ctx.lineTo(i * widthStep, pixelFromVal(valHistory[i]));
|
||||
}
|
||||
|
||||
ctx.stroke();
|
||||
}
|
||||
function displayValueLegend(ctx, val, num) {
|
||||
ctx.fillStyle = val.color;
|
||||
var bestValue = val.valueHistory[val.valueHistory.length -1];
|
||||
ctx.textAlign = "right";
|
||||
ctx.fillText(displayValue(bestValue, val.unit), width, (num + 2) * lineHeight * 1.5);
|
||||
ctx.textAlign = "left";
|
||||
ctx.fillText(val.label, 0, (num + 2) * lineHeight * 1.5);
|
||||
}
|
||||
|
||||
function displayTitle(ctx, text, maxVal) {
|
||||
ctx.fillStyle = "grey";
|
||||
ctx.textAlign = "right";
|
||||
ctx.fillText(displayValue(valueFromPixel(lineHeight), root.valueUnit), width, lineHeight);
|
||||
|
||||
ctx.fillStyle = "white";
|
||||
ctx.textAlign = "left";
|
||||
ctx.fillText(text, 0, lineHeight);
|
||||
}
|
||||
function displayBackground(ctx) {
|
||||
ctx.fillStyle = Qt.rgba(0, 0, 0, 0.6);
|
||||
ctx.fillRect(0, 0, width, height);
|
||||
|
||||
ctx.strokeStyle= "grey";
|
||||
ctx.lineWidth="2";
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, lineHeight + 1);
|
||||
ctx.lineTo(width, lineHeight + 1);
|
||||
ctx.moveTo(0, height);
|
||||
ctx.lineTo(width, height);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
var ctx = getContext("2d");
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
ctx.font="12px Verdana";
|
||||
|
||||
displayBackground(ctx);
|
||||
|
||||
for (var i = 0; i < _values.length; i++) {
|
||||
plotValueHistory(ctx, _values[i].valueHistory, _values[i].color)
|
||||
displayValueLegend(ctx, _values[i], i)
|
||||
}
|
||||
|
||||
displayTitle(ctx, title, valueMax)
|
||||
}
|
||||
}
|
||||
}
|
21
examples/utilities/tools/render/renderStats.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
//
|
||||
// renderStats.js
|
||||
// examples/utilities/tools/render
|
||||
//
|
||||
// Sam Gateau, created on 3/22/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set up the qml ui
|
||||
var qml = Script.resolvePath('stats.qml');
|
||||
var window = new OverlayWindow({
|
||||
title: 'Render Stats',
|
||||
source: qml,
|
||||
width: 300,
|
||||
height: 200
|
||||
});
|
||||
window.setPosition(500, 50);
|
||||
window.closed.connect(function() { Script.stop(); });
|
69
examples/utilities/tools/render/stats.qml
Normal file
|
@ -0,0 +1,69 @@
|
|||
//
|
||||
// stats.qml
|
||||
// examples/utilities/tools/render
|
||||
//
|
||||
// Created by Zach Pomerantz on 2/8/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
|
||||
|
||||
Item {
|
||||
id: statsUI
|
||||
anchors.fill:parent
|
||||
|
||||
Column {
|
||||
id: stats
|
||||
spacing: 8
|
||||
anchors.fill:parent
|
||||
|
||||
property var config: Render.getConfig("Stats")
|
||||
|
||||
function evalEvenHeight() {
|
||||
// Why do we have to do that manually ? cannot seem to find a qml / anchor / layout mode that does that ?
|
||||
return (height - spacing * (children.length - 1)) / children.length
|
||||
}
|
||||
|
||||
PlotPerf {
|
||||
title: "Num Buffers"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1::0:bufferCPUCount-CPU-#00B4EF:bufferGPUCount-GPU-#1AC567"
|
||||
}
|
||||
PlotPerf {
|
||||
title: "gpu::Buffer Memory"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1048576:Mb:1:bufferCPUMemoryUsage-CPU-#00B4EF:bufferGPUMemoryUsage-GPU-#1AC567"
|
||||
}
|
||||
|
||||
PlotPerf {
|
||||
title: "Num Textures"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1::0:textureCPUCount-CPU-#00B4EF:textureGPUCount-GPU-#1AC567:frameTextureCount-Frame-#E2334D"
|
||||
}
|
||||
PlotPerf {
|
||||
title: "gpu::Texture Memory"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1048576:Mb:1:textureCPUMemoryUsage-CPU-#00B4EF:textureGPUMemoryUsage-GPU-#1AC567"
|
||||
}
|
||||
PlotPerf {
|
||||
title: "Drawcalls"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1::0:frameDrawcallCount-frame-#E2334D:frameDrawcallRate-rate-#1AC567-0.001-K/s"
|
||||
}
|
||||
PlotPerf {
|
||||
title: "Triangles"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1000:K:0:frameTriangleCount-frame-#E2334D:frameTriangleRate-rate-#1AC567-0.001-MT/s"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,7 +11,6 @@
|
|||
|
||||
#include "IceServer.h"
|
||||
|
||||
#include <openssl/rsa.h>
|
||||
#include <openssl/x509.h>
|
||||
|
||||
#include <QtCore/QJsonDocument>
|
||||
|
@ -68,7 +67,9 @@ bool IceServer::packetVersionMatch(const udt::Packet& packet) {
|
|||
}
|
||||
|
||||
void IceServer::processPacket(std::unique_ptr<udt::Packet> packet) {
|
||||
|
||||
|
||||
_lastPacketTimestamp = QDateTime::currentMSecsSinceEpoch();
|
||||
|
||||
auto nlPacket = NLPacket::fromBase(std::move(packet));
|
||||
|
||||
// make sure that this packet at least looks like something we can read
|
||||
|
@ -161,15 +162,12 @@ SharedNetworkPeer IceServer::addOrUpdateHeartbeatingPeer(NLPacket& packet) {
|
|||
}
|
||||
|
||||
bool IceServer::isVerifiedHeartbeat(const QUuid& domainID, const QByteArray& plaintext, const QByteArray& signature) {
|
||||
// check if we have a private key for this domain ID - if we do not then fire off the request for it
|
||||
// check if we have a public key for this domain ID - if we do not then fire off the request for it
|
||||
auto it = _domainPublicKeys.find(domainID);
|
||||
if (it != _domainPublicKeys.end()) {
|
||||
|
||||
// attempt to verify the signature for this heartbeat
|
||||
const unsigned char* publicKeyData = reinterpret_cast<const unsigned char*>(it->second.constData());
|
||||
|
||||
// first load up the public key into an RSA struct
|
||||
RSA* rsaPublicKey = d2i_RSA_PUBKEY(NULL, &publicKeyData, it->second.size());
|
||||
const auto rsaPublicKey = it->second.get();
|
||||
|
||||
if (rsaPublicKey) {
|
||||
auto hashedPlaintext = QCryptographicHash::hash(plaintext, QCryptographicHash::Sha256);
|
||||
|
@ -180,9 +178,6 @@ bool IceServer::isVerifiedHeartbeat(const QUuid& domainID, const QByteArray& pla
|
|||
signature.size(),
|
||||
rsaPublicKey);
|
||||
|
||||
// free up the public key and remove connection token before we return
|
||||
RSA_free(rsaPublicKey);
|
||||
|
||||
if (verificationResult == 1) {
|
||||
// this is the only success case - we return true here to indicate that the heartbeat is verified
|
||||
return true;
|
||||
|
@ -192,7 +187,7 @@ bool IceServer::isVerifiedHeartbeat(const QUuid& domainID, const QByteArray& pla
|
|||
|
||||
} else {
|
||||
// we can't let this user in since we couldn't convert their public key to an RSA key we could use
|
||||
qWarning() << "Could not convert in-memory public key for" << domainID << "to usable RSA public key.";
|
||||
qWarning() << "Public key for" << domainID << "is not a usable RSA* public key.";
|
||||
qWarning() << "Re-requesting public key from API";
|
||||
}
|
||||
}
|
||||
|
@ -240,7 +235,22 @@ void IceServer::publicKeyReplyFinished(QNetworkReply* reply) {
|
|||
if (responseObject[STATUS_KEY].toString() == SUCCESS_VALUE) {
|
||||
auto dataObject = responseObject[DATA_KEY].toObject();
|
||||
if (dataObject.contains(PUBLIC_KEY_KEY)) {
|
||||
_domainPublicKeys[domainID] = QByteArray::fromBase64(dataObject[PUBLIC_KEY_KEY].toString().toUtf8());
|
||||
|
||||
// grab the base 64 public key from the API response
|
||||
auto apiPublicKey = QByteArray::fromBase64(dataObject[PUBLIC_KEY_KEY].toString().toUtf8());
|
||||
|
||||
// convert the downloaded public key to an RSA struct, if possible
|
||||
const unsigned char* publicKeyData = reinterpret_cast<const unsigned char*>(apiPublicKey.constData());
|
||||
|
||||
RSA* rsaPublicKey = d2i_RSA_PUBKEY(NULL, &publicKeyData, apiPublicKey.size());
|
||||
|
||||
if (rsaPublicKey) {
|
||||
_domainPublicKeys[domainID] = { rsaPublicKey, RSA_free };
|
||||
} else {
|
||||
qWarning() << "Could not convert in-memory public key for" << domainID << "to usable RSA public key.";
|
||||
qWarning() << "Public key will be re-requested on next heartbeat.";
|
||||
}
|
||||
|
||||
} else {
|
||||
qWarning() << "There was no public key present in response for domain with ID" << domainID;
|
||||
}
|
||||
|
@ -254,6 +264,8 @@ void IceServer::publicKeyReplyFinished(QNetworkReply* reply) {
|
|||
|
||||
qWarning() << "Error retreiving public key for domain with ID" << domainID << "-" << reply->errorString();
|
||||
}
|
||||
|
||||
reply->deleteLater();
|
||||
}
|
||||
|
||||
void IceServer::sendPeerInformationPacket(const NetworkPeer& peer, const HifiSockAddr* destinationSockAddr) {
|
||||
|
@ -274,6 +286,11 @@ void IceServer::clearInactivePeers() {
|
|||
|
||||
if ((usecTimestampNow() - peer->getLastHeardMicrostamp()) > (PEER_SILENCE_THRESHOLD_MSECS * 1000)) {
|
||||
qDebug() << "Removing peer from memory for inactivity -" << *peer;
|
||||
|
||||
// if we had a public key for this domain, remove it now
|
||||
_domainPublicKeys.erase(peer->getUUID());
|
||||
|
||||
// remove the peer object
|
||||
peerItem = _activePeers.erase(peerItem);
|
||||
} else {
|
||||
// we didn't kill this peer, push the iterator forwards
|
||||
|
@ -288,7 +305,14 @@ bool IceServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url, b
|
|||
|
||||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
|
||||
if (url.path() == "/status") {
|
||||
connection->respond(HTTPConnection::StatusCode200, QByteArray::number(_activePeers.size()));
|
||||
// figure out if we respond with 0 (we're good) or 1 (we think we're in trouble)
|
||||
|
||||
const quint64 MAX_PACKET_GAP_MS_FOR_STUCK_SOCKET = 10 * 1000;
|
||||
|
||||
int statusNumber = (QDateTime::currentMSecsSinceEpoch() - _lastPacketTimestamp > MAX_PACKET_GAP_MS_FOR_STUCK_SOCKET)
|
||||
? 1 : 0;
|
||||
|
||||
connection->respond(HTTPConnection::StatusCode200, QByteArray::number(statusNumber));
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#include <QtCore/QSharedPointer>
|
||||
#include <QUdpSocket>
|
||||
|
||||
#include <openssl/rsa.h>
|
||||
|
||||
#include <UUIDHasher.h>
|
||||
|
||||
#include <NetworkPeer.h>
|
||||
|
@ -52,8 +54,11 @@ private:
|
|||
|
||||
HTTPManager _httpManager;
|
||||
|
||||
using DomainPublicKeyHash = std::unordered_map<QUuid, QByteArray>;
|
||||
using RSAUniquePtr = std::unique_ptr<RSA, std::function<void(RSA*)>>;
|
||||
using DomainPublicKeyHash = std::unordered_map<QUuid, RSAUniquePtr>;
|
||||
DomainPublicKeyHash _domainPublicKeys;
|
||||
|
||||
quint64 _lastPacketTimestamp;
|
||||
};
|
||||
|
||||
#endif // hifi_IceServer_h
|
||||
|
|
Before Width: | Height: | Size: 361 KiB After Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 361 KiB After Width: | Height: | Size: 51 KiB |
|
@ -1,6 +1,7 @@
|
|||
import QtQuick 2.3
|
||||
import QtQuick.Controls 1.2
|
||||
import QtWebEngine 1.1
|
||||
import QtWebChannel 1.0
|
||||
|
||||
import "windows" as Windows
|
||||
import "controls" as Controls
|
||||
|
@ -15,11 +16,24 @@ Windows.Window {
|
|||
// Don't destroy on close... otherwise the JS/C++ will have a dangling pointer
|
||||
destroyOnCloseButton: false
|
||||
property alias source: webview.url
|
||||
property alias eventBridge: eventBridgeWrapper.eventBridge;
|
||||
|
||||
QtObject {
|
||||
id: eventBridgeWrapper
|
||||
WebChannel.id: "eventBridgeWrapper"
|
||||
property var eventBridge;
|
||||
}
|
||||
|
||||
// This is for JS/QML communication, which is unused in a WebWindow,
|
||||
// but not having this here results in spurious warnings about a
|
||||
// missing signal
|
||||
signal sendToScript(var message);
|
||||
|
||||
Controls.WebView {
|
||||
id: webview
|
||||
url: "about:blank"
|
||||
anchors.fill: parent
|
||||
focus: true
|
||||
webChannel.registeredObjects: [eventBridgeWrapper]
|
||||
}
|
||||
} // dialog
|
||||
|
|
|
@ -20,6 +20,7 @@ Windows.Window {
|
|||
// Don't destroy on close... otherwise the JS/C++ will have a dangling pointer
|
||||
destroyOnCloseButton: false
|
||||
property var source;
|
||||
property var eventBridge;
|
||||
property var component;
|
||||
property var dynamicContent;
|
||||
onSourceChanged: {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtWebEngine 1.1
|
||||
|
||||
import QtWebChannel 1.0
|
||||
import Qt.labs.settings 1.0
|
||||
|
||||
import "windows" as Windows
|
||||
|
@ -37,14 +37,26 @@ Windows.Window {
|
|||
Repeater {
|
||||
model: 4
|
||||
Tab {
|
||||
// Force loading of the content even if the tab is not visible
|
||||
// (required for letting the C++ code access the webview)
|
||||
active: true
|
||||
enabled: false;
|
||||
// we need to store the original url here for future identification
|
||||
enabled: false
|
||||
property string originalUrl: "";
|
||||
onEnabledChanged: toolWindow.updateVisiblity();
|
||||
|
||||
Controls.WebView {
|
||||
id: webView;
|
||||
anchors.fill: parent
|
||||
enabled: false
|
||||
property alias eventBridgeWrapper: eventBridgeWrapper
|
||||
|
||||
QtObject {
|
||||
id: eventBridgeWrapper
|
||||
WebChannel.id: "eventBridgeWrapper"
|
||||
property var eventBridge;
|
||||
}
|
||||
|
||||
webChannel.registeredObjects: [eventBridgeWrapper]
|
||||
onEnabledChanged: toolWindow.updateVisiblity();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -113,20 +125,23 @@ Windows.Window {
|
|||
|
||||
var tab = tabView.getTab(index);
|
||||
tab.title = "";
|
||||
tab.originalUrl = "";
|
||||
tab.enabled = false;
|
||||
tab.originalUrl = "";
|
||||
tab.item.url = "about:blank";
|
||||
tab.item.enabled = false;
|
||||
}
|
||||
|
||||
function addWebTab(properties) {
|
||||
if (!properties.source) {
|
||||
console.warn("Attempted to open Web Tool Pane without URL")
|
||||
console.warn("Attempted to open Web Tool Pane without URL");
|
||||
return;
|
||||
}
|
||||
|
||||
var existingTabIndex = findIndexForUrl(properties.source);
|
||||
if (existingTabIndex >= 0) {
|
||||
console.log("Existing tab " + existingTabIndex + " found with URL " + properties.source)
|
||||
return tabView.getTab(existingTabIndex);
|
||||
console.log("Existing tab " + existingTabIndex + " found with URL " + properties.source);
|
||||
var tab = tabView.getTab(existingTabIndex);
|
||||
return tab.item;
|
||||
}
|
||||
|
||||
var freeTabIndex = findFreeTab();
|
||||
|
@ -135,25 +150,28 @@ Windows.Window {
|
|||
return;
|
||||
}
|
||||
|
||||
var newTab = tabView.getTab(freeTabIndex);
|
||||
newTab.title = properties.title || "Unknown";
|
||||
newTab.originalUrl = properties.source;
|
||||
newTab.item.url = properties.source;
|
||||
newTab.active = true;
|
||||
|
||||
if (properties.width) {
|
||||
tabView.width = Math.min(Math.max(tabView.width, properties.width),
|
||||
toolWindow.maxSize.x);
|
||||
tabView.width = Math.min(Math.max(tabView.width, properties.width), toolWindow.maxSize.x);
|
||||
}
|
||||
|
||||
if (properties.height) {
|
||||
tabView.height = Math.min(Math.max(tabView.height, properties.height),
|
||||
toolWindow.maxSize.y);
|
||||
tabView.height = Math.min(Math.max(tabView.height, properties.height), toolWindow.maxSize.y);
|
||||
}
|
||||
|
||||
console.log("Updating visibility based on child tab added");
|
||||
newTab.enabledChanged.connect(updateVisiblity)
|
||||
updateVisiblity();
|
||||
return newTab
|
||||
var tab = tabView.getTab(freeTabIndex);
|
||||
tab.title = properties.title || "Unknown";
|
||||
tab.enabled = true;
|
||||
console.log("New tab URL: " + properties.source)
|
||||
tab.originalUrl = properties.source;
|
||||
|
||||
var eventBridge = properties.eventBridge;
|
||||
console.log("Event bridge: " + eventBridge);
|
||||
|
||||
var result = tab.item;
|
||||
result.enabled = true;
|
||||
console.log("Setting event bridge: " + eventBridge);
|
||||
result.eventBridgeWrapper.eventBridge = eventBridge;
|
||||
result.url = properties.source;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,6 +59,7 @@ WebEngineView {
|
|||
request.openIn(newWindow.webView)
|
||||
}
|
||||
|
||||
|
||||
profile: desktop.browserProfile
|
||||
// This breaks the webchannel used for passing messages. Fixed in Qt 5.6
|
||||
// See https://bugreports.qt.io/browse/QTBUG-49521
|
||||
//profile: desktop.browserProfile
|
||||
}
|
||||
|
|
|
@ -240,25 +240,26 @@ class DeadlockWatchdogThread : public QThread {
|
|||
public:
|
||||
static const unsigned long HEARTBEAT_CHECK_INTERVAL_SECS = 1;
|
||||
static const unsigned long HEARTBEAT_UPDATE_INTERVAL_SECS = 1;
|
||||
static const unsigned long MAX_HEARTBEAT_AGE_USECS = 15 * USECS_PER_SECOND;
|
||||
|
||||
static const unsigned long HEARTBEAT_REPORT_INTERVAL_USECS = 5 * USECS_PER_SECOND;
|
||||
static const unsigned long MAX_HEARTBEAT_AGE_USECS = 30 * USECS_PER_SECOND;
|
||||
static const int WARNING_ELAPSED_HEARTBEAT = 500 * USECS_PER_MSEC; // warn if elapsed heartbeat average is large
|
||||
static const int HEARTBEAT_SAMPLES = 100000; // ~5 seconds worth of samples
|
||||
|
||||
// Set the heartbeat on launch
|
||||
DeadlockWatchdogThread() {
|
||||
setObjectName("Deadlock Watchdog");
|
||||
QTimer* heartbeatTimer = new QTimer();
|
||||
// Give the heartbeat an initial value
|
||||
updateHeartbeat();
|
||||
connect(heartbeatTimer, &QTimer::timeout, [this] {
|
||||
updateHeartbeat();
|
||||
});
|
||||
heartbeatTimer->start(HEARTBEAT_UPDATE_INTERVAL_SECS * MSECS_PER_SECOND);
|
||||
_heartbeat = usecTimestampNow();
|
||||
connect(qApp, &QCoreApplication::aboutToQuit, [this] {
|
||||
_quit = true;
|
||||
});
|
||||
}
|
||||
|
||||
void updateHeartbeat() {
|
||||
_heartbeat = usecTimestampNow();
|
||||
auto now = usecTimestampNow();
|
||||
auto elapsed = now - _heartbeat;
|
||||
_movingAverage.addSample(elapsed);
|
||||
_heartbeat = now;
|
||||
}
|
||||
|
||||
void deadlockDetectionCrash() {
|
||||
|
@ -269,10 +270,52 @@ public:
|
|||
void run() override {
|
||||
while (!_quit) {
|
||||
QThread::sleep(HEARTBEAT_UPDATE_INTERVAL_SECS);
|
||||
|
||||
uint64_t lastHeartbeat = _heartbeat; // sample atomic _heartbeat, because we could context switch away and have it updated on us
|
||||
uint64_t now = usecTimestampNow();
|
||||
auto lastHeartbeatAge = (now > lastHeartbeat) ? now - lastHeartbeat : 0;
|
||||
auto sinceLastReport = (now > _lastReport) ? now - _lastReport : 0;
|
||||
auto elapsedMovingAverage = _movingAverage.getAverage();
|
||||
|
||||
if (elapsedMovingAverage > _maxElapsedAverage) {
|
||||
qDebug() << "DEADLOCK WATCHDOG NEW maxElapsedAverage:"
|
||||
<< "lastHeartbeatAge:" << lastHeartbeatAge
|
||||
<< "elapsedMovingAverage:" << elapsedMovingAverage
|
||||
<< "maxElapsed:" << _maxElapsed
|
||||
<< "PREVIOUS maxElapsedAverage:" << _maxElapsedAverage
|
||||
<< "NEW maxElapsedAverage:" << elapsedMovingAverage
|
||||
<< "samples:" << _movingAverage.getSamples();
|
||||
_maxElapsedAverage = elapsedMovingAverage;
|
||||
}
|
||||
if (lastHeartbeatAge > _maxElapsed) {
|
||||
qDebug() << "DEADLOCK WATCHDOG NEW maxElapsed:"
|
||||
<< "lastHeartbeatAge:" << lastHeartbeatAge
|
||||
<< "elapsedMovingAverage:" << elapsedMovingAverage
|
||||
<< "PREVIOUS maxElapsed:" << _maxElapsed
|
||||
<< "NEW maxElapsed:" << lastHeartbeatAge
|
||||
<< "maxElapsedAverage:" << _maxElapsedAverage
|
||||
<< "samples:" << _movingAverage.getSamples();
|
||||
_maxElapsed = lastHeartbeatAge;
|
||||
}
|
||||
if ((sinceLastReport > HEARTBEAT_REPORT_INTERVAL_USECS) || (elapsedMovingAverage > WARNING_ELAPSED_HEARTBEAT)) {
|
||||
qDebug() << "DEADLOCK WATCHDOG STATUS -- lastHeartbeatAge:" << lastHeartbeatAge
|
||||
<< "elapsedMovingAverage:" << elapsedMovingAverage
|
||||
<< "maxElapsed:" << _maxElapsed
|
||||
<< "maxElapsedAverage:" << _maxElapsedAverage
|
||||
<< "samples:" << _movingAverage.getSamples();
|
||||
_lastReport = now;
|
||||
}
|
||||
|
||||
#ifdef NDEBUG
|
||||
auto now = usecTimestampNow();
|
||||
auto lastHeartbeatAge = now - _heartbeat;
|
||||
if (lastHeartbeatAge > MAX_HEARTBEAT_AGE_USECS) {
|
||||
qDebug() << "DEADLOCK DETECTED -- "
|
||||
<< "lastHeartbeatAge:" << lastHeartbeatAge
|
||||
<< "[ lastHeartbeat :" << lastHeartbeat
|
||||
<< "now:" << now << " ]"
|
||||
<< "elapsedMovingAverage:" << elapsedMovingAverage
|
||||
<< "maxElapsed:" << _maxElapsed
|
||||
<< "maxElapsedAverage:" << _maxElapsedAverage
|
||||
<< "samples:" << _movingAverage.getSamples();
|
||||
deadlockDetectionCrash();
|
||||
}
|
||||
#endif
|
||||
|
@ -280,10 +323,19 @@ public:
|
|||
}
|
||||
|
||||
static std::atomic<uint64_t> _heartbeat;
|
||||
static std::atomic<uint64_t> _lastReport;
|
||||
static std::atomic<uint64_t> _maxElapsed;
|
||||
static std::atomic<int> _maxElapsedAverage;
|
||||
static ThreadSafeMovingAverage<int, HEARTBEAT_SAMPLES> _movingAverage;
|
||||
|
||||
bool _quit { false };
|
||||
};
|
||||
|
||||
std::atomic<uint64_t> DeadlockWatchdogThread::_heartbeat;
|
||||
std::atomic<uint64_t> DeadlockWatchdogThread::_lastReport;
|
||||
std::atomic<uint64_t> DeadlockWatchdogThread::_maxElapsed;
|
||||
std::atomic<int> DeadlockWatchdogThread::_maxElapsedAverage;
|
||||
ThreadSafeMovingAverage<int, DeadlockWatchdogThread::HEARTBEAT_SAMPLES> DeadlockWatchdogThread::_movingAverage;
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
class MyNativeEventFilter : public QAbstractNativeEventFilter {
|
||||
|
@ -1381,6 +1433,8 @@ void Application::initializeUi() {
|
|||
|
||||
void Application::paintGL() {
|
||||
|
||||
updateHeartbeat();
|
||||
|
||||
// Some plugins process message events, potentially leading to
|
||||
// re-entering a paint event. don't allow further processing if this
|
||||
// happens
|
||||
|
@ -2502,6 +2556,8 @@ static uint32_t _renderedFrameIndex { INVALID_FRAME };
|
|||
|
||||
void Application::idle(uint64_t now) {
|
||||
|
||||
updateHeartbeat();
|
||||
|
||||
if (_aboutToQuit || _inPaint) {
|
||||
return; // bail early, nothing to do here.
|
||||
}
|
||||
|
@ -4830,13 +4886,39 @@ void Application::updateDisplayMode() {
|
|||
{
|
||||
std::unique_lock<std::mutex> lock(_displayPluginLock);
|
||||
|
||||
auto oldDisplayPlugin = _displayPlugin;
|
||||
if (_displayPlugin) {
|
||||
_displayPlugin->deactivate();
|
||||
}
|
||||
|
||||
// FIXME probably excessive and useless context switching
|
||||
_offscreenContext->makeCurrent();
|
||||
newDisplayPlugin->activate();
|
||||
|
||||
bool active = newDisplayPlugin->activate();
|
||||
|
||||
if (!active) {
|
||||
// If the new plugin fails to activate, fallback to last display
|
||||
qWarning() << "Failed to activate display: " << newDisplayPlugin->getName();
|
||||
newDisplayPlugin = oldDisplayPlugin;
|
||||
|
||||
if (newDisplayPlugin) {
|
||||
qWarning() << "Falling back to last display: " << newDisplayPlugin->getName();
|
||||
active = newDisplayPlugin->activate();
|
||||
}
|
||||
|
||||
// If there is no last display, or
|
||||
// If the last display fails to activate, fallback to desktop
|
||||
if (!active) {
|
||||
newDisplayPlugin = displayPlugins.at(0);
|
||||
qWarning() << "Falling back to display: " << newDisplayPlugin->getName();
|
||||
active = newDisplayPlugin->activate();
|
||||
}
|
||||
|
||||
if (!active) {
|
||||
qFatal("Failed to activate fallback plugin");
|
||||
}
|
||||
}
|
||||
|
||||
_offscreenContext->makeCurrent();
|
||||
offscreenUi->resize(fromGlm(newDisplayPlugin->getRecommendedUiSize()));
|
||||
_offscreenContext->makeCurrent();
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
#include "AddressManager.h"
|
||||
#include "Application.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "UserActivityLogger.h"
|
||||
#include "MainWindow.h"
|
||||
|
||||
#ifdef HAS_BUGSPLAT
|
||||
|
@ -102,11 +103,19 @@ int main(int argc, const char* argv[]) {
|
|||
// Check OpenGL version.
|
||||
// This is done separately from the main Application so that start-up and shut-down logic within the main Application is
|
||||
// not made more complicated than it already is.
|
||||
bool override = false;
|
||||
QString glVersion;
|
||||
{
|
||||
OpenGLVersionChecker openGLVersionChecker(argc, const_cast<char**>(argv));
|
||||
if (!openGLVersionChecker.isValidVersion()) {
|
||||
qCDebug(interfaceapp, "Early exit due to OpenGL version.");
|
||||
return 0;
|
||||
bool valid = true;
|
||||
glVersion = openGLVersionChecker.checkVersion(valid, override);
|
||||
if (!valid) {
|
||||
if (override) {
|
||||
qCDebug(interfaceapp, "Running on insufficient OpenGL version: %s.", glVersion.toStdString().c_str());
|
||||
} else {
|
||||
qCDebug(interfaceapp, "Early exit due to OpenGL version.");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,6 +143,22 @@ int main(int argc, const char* argv[]) {
|
|||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
Application app(argc, const_cast<char**>(argv), startupTime);
|
||||
|
||||
// If we failed the OpenGLVersion check, log it.
|
||||
if (override) {
|
||||
auto& accountManager = AccountManager::getInstance();
|
||||
if (accountManager.isLoggedIn()) {
|
||||
UserActivityLogger::getInstance().insufficientGLVersion(glVersion);
|
||||
} else {
|
||||
QObject::connect(&AccountManager::getInstance(), &AccountManager::loginComplete, [glVersion](){
|
||||
static bool loggedInsufficientGL = false;
|
||||
if (!loggedInsufficientGL) {
|
||||
UserActivityLogger::getInstance().insufficientGLVersion(glVersion);
|
||||
loggedInsufficientGL = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Setup local server
|
||||
QLocalServer server { &app };
|
||||
|
||||
|
|
|
@ -19,9 +19,7 @@ const QString Basic2DWindowOpenGLDisplayPlugin::NAME("Desktop");
|
|||
|
||||
static const QString FULLSCREEN = "Fullscreen";
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
|
||||
Parent::internalActivate();
|
||||
|
||||
bool Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
|
||||
_framerateActions.clear();
|
||||
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), FULLSCREEN,
|
||||
[this](bool clicked) {
|
||||
|
@ -33,6 +31,8 @@ void Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
|
|||
}, true, false);
|
||||
|
||||
updateFramerate();
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::submitSceneTexture(uint32_t frameIndex, const gpu::TexturePointer& sceneTexture) {
|
||||
|
|
|
@ -22,7 +22,7 @@ public:
|
|||
|
||||
virtual float getTargetFrameRate() override { return _framerateTarget ? (float) _framerateTarget : TARGET_FRAMERATE_Basic2DWindowOpenGL; }
|
||||
|
||||
virtual void internalActivate() override;
|
||||
virtual bool internalActivate() override;
|
||||
|
||||
virtual void submitSceneTexture(uint32_t frameIndex, const gpu::TexturePointer& sceneTexture) override;
|
||||
|
||||
|
|
|
@ -219,7 +219,7 @@ void OpenGLDisplayPlugin::cleanupForSceneTexture(const gpu::TexturePointer& scen
|
|||
}
|
||||
|
||||
|
||||
void OpenGLDisplayPlugin::activate() {
|
||||
bool OpenGLDisplayPlugin::activate() {
|
||||
if (!_cursorsData.size()) {
|
||||
auto& cursorManager = Cursor::Manager::instance();
|
||||
for (const auto iconId : cursorManager.registeredIcons()) {
|
||||
|
@ -238,7 +238,9 @@ void OpenGLDisplayPlugin::activate() {
|
|||
|
||||
// Child classes may override this in order to do things like initialize
|
||||
// libraries, etc
|
||||
internalActivate();
|
||||
if (!internalActivate()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
#if THREADED_PRESENT
|
||||
|
@ -263,7 +265,8 @@ void OpenGLDisplayPlugin::activate() {
|
|||
customizeContext();
|
||||
_container->makeRenderingContextCurrent();
|
||||
#endif
|
||||
DisplayPlugin::activate();
|
||||
|
||||
return DisplayPlugin::activate();
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::deactivate() {
|
||||
|
|
|
@ -32,7 +32,7 @@ public:
|
|||
|
||||
// These must be final to ensure proper ordering of operations
|
||||
// between the main thread and the presentation thread
|
||||
void activate() override final;
|
||||
bool activate() override final;
|
||||
void deactivate() override final;
|
||||
|
||||
bool eventFilter(QObject* receiver, QEvent* event) override;
|
||||
|
@ -77,7 +77,8 @@ protected:
|
|||
virtual void customizeContext();
|
||||
virtual void uncustomizeContext();
|
||||
|
||||
virtual void internalActivate() {}
|
||||
// Returns true on successful activation
|
||||
virtual bool internalActivate() { return true; }
|
||||
virtual void internalDeactivate() {}
|
||||
virtual void cleanupForSceneTexture(const gpu::TexturePointer& sceneTexture);
|
||||
// Plugin specific functionality to send the composed scene to the output window or device
|
||||
|
|
|
@ -32,7 +32,7 @@ glm::uvec2 HmdDisplayPlugin::getRecommendedUiSize() const {
|
|||
return CompositorHelper::VIRTUAL_SCREEN_SIZE;
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::internalActivate() {
|
||||
bool HmdDisplayPlugin::internalActivate() {
|
||||
_monoPreview = _container->getBoolSetting("monoPreview", DEFAULT_MONO_VIEW);
|
||||
|
||||
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), MONO_PREVIEW,
|
||||
|
@ -41,7 +41,8 @@ void HmdDisplayPlugin::internalActivate() {
|
|||
_container->setBoolSetting("monoPreview", _monoPreview);
|
||||
}, true, _monoPreview);
|
||||
_container->removeMenu(FRAMERATE);
|
||||
Parent::internalActivate();
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::customizeContext() {
|
||||
|
|
|
@ -33,7 +33,7 @@ protected:
|
|||
virtual bool isHmdMounted() const = 0;
|
||||
virtual void postPreview() {};
|
||||
|
||||
void internalActivate() override;
|
||||
bool internalActivate() override;
|
||||
void compositeOverlay() override;
|
||||
void compositePointer() override;
|
||||
void internalPresent() override;
|
||||
|
|
|
@ -58,7 +58,7 @@ glm::mat4 StereoDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& basePr
|
|||
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
|
||||
|
||||
std::vector<QAction*> _screenActions;
|
||||
void StereoDisplayPlugin::internalActivate() {
|
||||
bool StereoDisplayPlugin::internalActivate() {
|
||||
auto screens = qApp->screens();
|
||||
_screenActions.resize(screens.size());
|
||||
for (int i = 0; i < screens.size(); ++i) {
|
||||
|
@ -77,7 +77,8 @@ void StereoDisplayPlugin::internalActivate() {
|
|||
|
||||
_screen = qApp->primaryScreen();
|
||||
_container->setFullscreen(_screen);
|
||||
Parent::internalActivate();
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void StereoDisplayPlugin::updateScreen() {
|
||||
|
|
|
@ -29,7 +29,7 @@ public:
|
|||
// virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
|
||||
|
||||
protected:
|
||||
virtual void internalActivate() override;
|
||||
virtual bool internalActivate() override;
|
||||
virtual void internalDeactivate() override;
|
||||
void updateScreen();
|
||||
|
||||
|
|
|
@ -333,7 +333,9 @@ void RenderableModelEntityItem::updateModelBounds() {
|
|||
bool movingOrAnimating = isMovingRelativeToParent() || isAnimatingSomething();
|
||||
if ((movingOrAnimating ||
|
||||
_needsInitialSimulation ||
|
||||
_needsJointSimulation ||
|
||||
_model->getTranslation() != getPosition() ||
|
||||
_model->getScaleToFitDimensions() != getDimensions() ||
|
||||
_model->getRotation() != getRotation() ||
|
||||
_model->getRegistrationPoint() != getRegistrationPoint())
|
||||
&& _model->isActive() && _dimensionsInitialized) {
|
||||
|
@ -349,6 +351,7 @@ void RenderableModelEntityItem::updateModelBounds() {
|
|||
}
|
||||
|
||||
_needsInitialSimulation = false;
|
||||
_needsJointSimulation = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -738,6 +741,7 @@ bool RenderableModelEntityItem::setAbsoluteJointRotationInObjectFrame(int index,
|
|||
_absoluteJointRotationsInObjectFrameSet[index] = true;
|
||||
_absoluteJointRotationsInObjectFrameDirty[index] = true;
|
||||
result = true;
|
||||
_needsJointSimulation = true;
|
||||
}
|
||||
});
|
||||
return result;
|
||||
|
@ -753,11 +757,33 @@ bool RenderableModelEntityItem::setAbsoluteJointTranslationInObjectFrame(int ind
|
|||
_absoluteJointTranslationsInObjectFrameSet[index] = true;
|
||||
_absoluteJointTranslationsInObjectFrameDirty[index] = true;
|
||||
result = true;
|
||||
_needsJointSimulation = true;
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
void RenderableModelEntityItem::setJointRotations(const QVector<glm::quat>& rotations) {
|
||||
ModelEntityItem::setJointRotations(rotations);
|
||||
_needsJointSimulation = true;
|
||||
}
|
||||
|
||||
void RenderableModelEntityItem::setJointRotationsSet(const QVector<bool>& rotationsSet) {
|
||||
ModelEntityItem::setJointRotationsSet(rotationsSet);
|
||||
_needsJointSimulation = true;
|
||||
}
|
||||
|
||||
void RenderableModelEntityItem::setJointTranslations(const QVector<glm::vec3>& translations) {
|
||||
ModelEntityItem::setJointTranslations(translations);
|
||||
_needsJointSimulation = true;
|
||||
}
|
||||
|
||||
void RenderableModelEntityItem::setJointTranslationsSet(const QVector<bool>& translationsSet) {
|
||||
ModelEntityItem::setJointTranslationsSet(translationsSet);
|
||||
_needsJointSimulation = true;
|
||||
}
|
||||
|
||||
|
||||
void RenderableModelEntityItem::locationChanged() {
|
||||
EntityItem::locationChanged();
|
||||
if (_model && _model->isActive()) {
|
||||
|
|
|
@ -69,6 +69,11 @@ public:
|
|||
virtual bool setAbsoluteJointRotationInObjectFrame(int index, const glm::quat& rotation) override;
|
||||
virtual bool setAbsoluteJointTranslationInObjectFrame(int index, const glm::vec3& translation) override;
|
||||
|
||||
virtual void setJointRotations(const QVector<glm::quat>& rotations) override;
|
||||
virtual void setJointRotationsSet(const QVector<bool>& rotationsSet) override;
|
||||
virtual void setJointTranslations(const QVector<glm::vec3>& translations) override;
|
||||
virtual void setJointTranslationsSet(const QVector<bool>& translationsSet) override;
|
||||
|
||||
virtual void loader() override;
|
||||
virtual void locationChanged() override;
|
||||
|
||||
|
@ -96,6 +101,8 @@ private:
|
|||
bool _showCollisionHull = false;
|
||||
|
||||
bool getAnimationFrame();
|
||||
|
||||
bool _needsJointSimulation { false };
|
||||
};
|
||||
|
||||
#endif // hifi_RenderableModelEntityItem_h
|
||||
|
|
|
@ -48,6 +48,8 @@ public:
|
|||
|
||||
virtual ~RenderablePolyVoxEntityItem();
|
||||
|
||||
void initializePolyVox();
|
||||
|
||||
virtual void somethingChangedNotification() {
|
||||
// This gets called from EnityItem::readEntityDataFromBuffer every time a packet describing
|
||||
// this entity comes from the entity-server. It gets called even if nothing has actually changed
|
||||
|
@ -114,17 +116,28 @@ public:
|
|||
virtual void setYPNeighborID(const EntityItemID& yPNeighborID);
|
||||
virtual void setZPNeighborID(const EntityItemID& zPNeighborID);
|
||||
|
||||
virtual void rebakeMesh();
|
||||
|
||||
virtual void updateRegistrationPoint(const glm::vec3& value);
|
||||
|
||||
void setVoxelsFromData(QByteArray uncompressedData, quint16 voxelXSize, quint16 voxelYSize, quint16 voxelZSize);
|
||||
void forEachVoxelValue(quint16 voxelXSize, quint16 voxelYSize, quint16 voxelZSize,
|
||||
std::function<void(int, int, int, uint8_t)> thunk);
|
||||
|
||||
void setMesh(model::MeshPointer mesh);
|
||||
void setCollisionPoints(const QVector<QVector<glm::vec3>> points, AABox box);
|
||||
PolyVox::SimpleVolume<uint8_t>* getVolData() { return _volData; }
|
||||
|
||||
uint8_t getVoxelInternal(int x, int y, int z);
|
||||
bool setVoxelInternal(int x, int y, int z, uint8_t toValue);
|
||||
|
||||
void setVolDataDirty() { withWriteLock([&] { _volDataDirty = true; }); }
|
||||
|
||||
private:
|
||||
// The PolyVoxEntityItem class has _voxelData which contains dimensions and compressed voxel data. The dimensions
|
||||
// may not match _voxelVolumeSize.
|
||||
|
||||
model::MeshPointer _mesh;
|
||||
bool _meshDirty; // does collision-shape need to be recomputed?
|
||||
mutable QReadWriteLock _meshLock{QReadWriteLock::Recursive};
|
||||
bool _meshDirty { true }; // does collision-shape need to be recomputed?
|
||||
bool _meshInitialized { false };
|
||||
|
||||
NetworkTexturePointer _xTexture;
|
||||
NetworkTexturePointer _yTexture;
|
||||
|
@ -135,44 +148,35 @@ private:
|
|||
static gpu::PipelinePointer _pipeline;
|
||||
|
||||
ShapeInfo _shapeInfo;
|
||||
mutable QReadWriteLock _shapeInfoLock;
|
||||
|
||||
PolyVox::SimpleVolume<uint8_t>* _volData = nullptr;
|
||||
mutable QReadWriteLock _volDataLock{QReadWriteLock::Recursive}; // lock for _volData
|
||||
bool _volDataDirty = false; // does getMesh need to be called?
|
||||
int _onCount; // how many non-zero voxels are in _volData
|
||||
|
||||
bool inUserBounds(const PolyVox::SimpleVolume<uint8_t>* vol, PolyVoxEntityItem::PolyVoxSurfaceStyle surfaceStyle,
|
||||
int x, int y, int z) const;
|
||||
uint8_t getVoxelInternal(int x, int y, int z);
|
||||
bool setVoxelInternal(int x, int y, int z, uint8_t toValue);
|
||||
bool _neighborsNeedUpdate { false };
|
||||
|
||||
bool updateOnCount(int x, int y, int z, uint8_t toValue);
|
||||
PolyVox::RaycastResult doRayCast(glm::vec4 originInVoxel, glm::vec4 farInVoxel, glm::vec4& result) const;
|
||||
|
||||
// these are run off the main thread
|
||||
void decompressVolumeData();
|
||||
void decompressVolumeDataAsync();
|
||||
void compressVolumeDataAndSendEditPacket();
|
||||
void compressVolumeDataAndSendEditPacketAsync();
|
||||
void getMesh();
|
||||
void getMeshAsync();
|
||||
virtual void getMesh(); // recompute mesh
|
||||
void computeShapeInfoWorker();
|
||||
void computeShapeInfoWorkerAsync();
|
||||
|
||||
QSemaphore _threadRunning{1};
|
||||
|
||||
// these are cached lookups of _xNNeighborID, _yNNeighborID, _zNNeighborID, _xPNeighborID, _yPNeighborID, _zPNeighborID
|
||||
EntityItemWeakPointer _xNNeighbor; // neighor found by going along negative X axis
|
||||
EntityItemWeakPointer _xNNeighbor; // neighbor found by going along negative X axis
|
||||
EntityItemWeakPointer _yNNeighbor;
|
||||
EntityItemWeakPointer _zNNeighbor;
|
||||
EntityItemWeakPointer _xPNeighbor; // neighor found by going along positive X axis
|
||||
EntityItemWeakPointer _xPNeighbor; // neighbor found by going along positive X axis
|
||||
EntityItemWeakPointer _yPNeighbor;
|
||||
EntityItemWeakPointer _zPNeighbor;
|
||||
void clearOutOfDateNeighbors();
|
||||
void cacheNeighbors();
|
||||
void copyUpperEdgesFromNeighbors();
|
||||
void bonkNeighbors();
|
||||
};
|
||||
|
||||
bool inUserBounds(const PolyVox::SimpleVolume<uint8_t>* vol, PolyVoxEntityItem::PolyVoxSurfaceStyle surfaceStyle,
|
||||
int x, int y, int z);
|
||||
|
||||
#endif // hifi_RenderablePolyVoxEntityItem_h
|
||||
|
|
|
@ -120,10 +120,10 @@ public:
|
|||
virtual glm::vec3 getJointPosition(int jointIndex) const { return glm::vec3(); }
|
||||
virtual glm::quat getJointRotation(int jointIndex) const { return glm::quat(); }
|
||||
|
||||
void setJointRotations(const QVector<glm::quat>& rotations);
|
||||
void setJointRotationsSet(const QVector<bool>& rotationsSet);
|
||||
void setJointTranslations(const QVector<glm::vec3>& translations);
|
||||
void setJointTranslationsSet(const QVector<bool>& translationsSet);
|
||||
virtual void setJointRotations(const QVector<glm::quat>& rotations);
|
||||
virtual void setJointRotationsSet(const QVector<bool>& rotationsSet);
|
||||
virtual void setJointTranslations(const QVector<glm::vec3>& translations);
|
||||
virtual void setJointTranslationsSet(const QVector<bool>& translationsSet);
|
||||
QVector<glm::quat> getJointRotations() const;
|
||||
QVector<bool> getJointRotationsSet() const;
|
||||
QVector<glm::vec3> getJointTranslations() const;
|
||||
|
|
|
@ -64,44 +64,47 @@ PolyVoxEntityItem::PolyVoxEntityItem(const EntityItemID& entityItemID) :
|
|||
}
|
||||
|
||||
void PolyVoxEntityItem::setVoxelVolumeSize(glm::vec3 voxelVolumeSize) {
|
||||
QWriteLocker(&this->_voxelDataLock);
|
||||
withWriteLock([&] {
|
||||
assert((int)_voxelVolumeSize.x == _voxelVolumeSize.x);
|
||||
assert((int)_voxelVolumeSize.y == _voxelVolumeSize.y);
|
||||
assert((int)_voxelVolumeSize.z == _voxelVolumeSize.z);
|
||||
|
||||
assert((int)_voxelVolumeSize.x == _voxelVolumeSize.x);
|
||||
assert((int)_voxelVolumeSize.y == _voxelVolumeSize.y);
|
||||
assert((int)_voxelVolumeSize.z == _voxelVolumeSize.z);
|
||||
_voxelVolumeSize = glm::vec3(roundf(voxelVolumeSize.x), roundf(voxelVolumeSize.y), roundf(voxelVolumeSize.z));
|
||||
if (_voxelVolumeSize.x < 1) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping x of" << _voxelVolumeSize.x << "to 1";
|
||||
_voxelVolumeSize.x = 1;
|
||||
}
|
||||
if (_voxelVolumeSize.x > MAX_VOXEL_DIMENSION) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping x of" << _voxelVolumeSize.x << "to max";
|
||||
_voxelVolumeSize.x = MAX_VOXEL_DIMENSION;
|
||||
}
|
||||
|
||||
_voxelVolumeSize = glm::vec3(roundf(voxelVolumeSize.x), roundf(voxelVolumeSize.y), roundf(voxelVolumeSize.z));
|
||||
if (_voxelVolumeSize.x < 1) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping x of" << _voxelVolumeSize.x << "to 1";
|
||||
_voxelVolumeSize.x = 1;
|
||||
}
|
||||
if (_voxelVolumeSize.x > MAX_VOXEL_DIMENSION) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping x of" << _voxelVolumeSize.x << "to max";
|
||||
_voxelVolumeSize.x = MAX_VOXEL_DIMENSION;
|
||||
}
|
||||
if (_voxelVolumeSize.y < 1) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping y of" << _voxelVolumeSize.y << "to 1";
|
||||
_voxelVolumeSize.y = 1;
|
||||
}
|
||||
if (_voxelVolumeSize.y > MAX_VOXEL_DIMENSION) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping y of" << _voxelVolumeSize.y << "to max";
|
||||
_voxelVolumeSize.y = MAX_VOXEL_DIMENSION;
|
||||
}
|
||||
|
||||
if (_voxelVolumeSize.y < 1) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping y of" << _voxelVolumeSize.y << "to 1";
|
||||
_voxelVolumeSize.y = 1;
|
||||
}
|
||||
if (_voxelVolumeSize.y > MAX_VOXEL_DIMENSION) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping y of" << _voxelVolumeSize.y << "to max";
|
||||
_voxelVolumeSize.y = MAX_VOXEL_DIMENSION;
|
||||
}
|
||||
|
||||
if (_voxelVolumeSize.z < 1) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping z of" << _voxelVolumeSize.z << "to 1";
|
||||
_voxelVolumeSize.z = 1;
|
||||
}
|
||||
if (_voxelVolumeSize.z > MAX_VOXEL_DIMENSION) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping z of" << _voxelVolumeSize.z << "to max";
|
||||
_voxelVolumeSize.z = MAX_VOXEL_DIMENSION;
|
||||
}
|
||||
if (_voxelVolumeSize.z < 1) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping z of" << _voxelVolumeSize.z << "to 1";
|
||||
_voxelVolumeSize.z = 1;
|
||||
}
|
||||
if (_voxelVolumeSize.z > MAX_VOXEL_DIMENSION) {
|
||||
qDebug() << "PolyVoxEntityItem::setVoxelVolumeSize clamping z of" << _voxelVolumeSize.z << "to max";
|
||||
_voxelVolumeSize.z = MAX_VOXEL_DIMENSION;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const glm::vec3& PolyVoxEntityItem::getVoxelVolumeSize() const {
|
||||
QWriteLocker locker(&this->_voxelDataLock);
|
||||
return _voxelVolumeSize;
|
||||
glm::vec3 PolyVoxEntityItem::getVoxelVolumeSize() const {
|
||||
glm::vec3 voxelVolumeSize;
|
||||
withReadLock([&] {
|
||||
voxelVolumeSize = _voxelVolumeSize;
|
||||
});
|
||||
return voxelVolumeSize;
|
||||
}
|
||||
|
||||
|
||||
|
@ -226,12 +229,16 @@ void PolyVoxEntityItem::debugDump() const {
|
|||
}
|
||||
|
||||
void PolyVoxEntityItem::setVoxelData(QByteArray voxelData) {
|
||||
QWriteLocker(&this->_voxelDataLock);
|
||||
_voxelData = voxelData;
|
||||
_voxelDataDirty = true;
|
||||
withWriteLock([&] {
|
||||
_voxelData = voxelData;
|
||||
_voxelDataDirty = true;
|
||||
});
|
||||
}
|
||||
|
||||
const QByteArray PolyVoxEntityItem::getVoxelData() const {
|
||||
QReadLocker(&this->_voxelDataLock);
|
||||
return _voxelData;
|
||||
QByteArray voxelDataCopy;
|
||||
withReadLock([&] {
|
||||
voxelDataCopy = _voxelData;
|
||||
});
|
||||
return voxelDataCopy;
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ class PolyVoxEntityItem : public EntityItem {
|
|||
virtual void debugDump() const;
|
||||
|
||||
virtual void setVoxelVolumeSize(glm::vec3 voxelVolumeSize);
|
||||
virtual const glm::vec3& getVoxelVolumeSize() const;
|
||||
virtual glm::vec3 getVoxelVolumeSize() const;
|
||||
|
||||
virtual void setVoxelData(QByteArray voxelData);
|
||||
virtual const QByteArray getVoxelData() const;
|
||||
|
@ -128,12 +128,14 @@ class PolyVoxEntityItem : public EntityItem {
|
|||
|
||||
virtual void rebakeMesh() {};
|
||||
|
||||
void setVoxelDataDirty(bool value) { withWriteLock([&] { _voxelDataDirty = value; }); }
|
||||
virtual void getMesh() {}; // recompute mesh
|
||||
|
||||
protected:
|
||||
glm::vec3 _voxelVolumeSize; // this is always 3 bytes
|
||||
|
||||
mutable QReadWriteLock _voxelDataLock;
|
||||
QByteArray _voxelData;
|
||||
bool _voxelDataDirty;
|
||||
bool _voxelDataDirty; // _voxelData has changed, things that depend on it should be updated
|
||||
|
||||
PolyVoxSurfaceStyle _voxelSurfaceStyle;
|
||||
|
||||
|
|
|
@ -50,7 +50,9 @@ void GLWidget::initializeGL() {
|
|||
// TODO: write the proper code for linux
|
||||
makeCurrent();
|
||||
#if defined(Q_OS_WIN)
|
||||
_vsyncSupported = context()->contextHandle()->hasExtension("WGL_EXT_swap_control");;
|
||||
if (isValid() && context() && context()->contextHandle()) {
|
||||
_vsyncSupported = context()->contextHandle()->hasExtension("WGL_EXT_swap_control");;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -190,25 +190,21 @@ void OffscreenQmlRenderThread::setupFbo() {
|
|||
using namespace oglplus;
|
||||
_textures.setSize(_size);
|
||||
|
||||
// Before making any ogl calls, clear any outstanding errors
|
||||
// FIXME: Something upstream is polluting the context with a GL_INVALID_ENUM,
|
||||
// likely from glewExperimental = true
|
||||
GLenum err = glGetError();
|
||||
if (err != GL_NO_ERROR) {
|
||||
qDebug() << "Clearing outstanding GL error to set up QML FBO:" << glewGetErrorString(err);
|
||||
try {
|
||||
_depthStencil.reset(new Renderbuffer());
|
||||
Context::Bound(Renderbuffer::Target::Renderbuffer, *_depthStencil)
|
||||
.Storage(
|
||||
PixelDataInternalFormat::DepthComponent,
|
||||
_size.x, _size.y);
|
||||
|
||||
_fbo.reset(new Framebuffer());
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
_fbo->AttachRenderbuffer(Framebuffer::Target::Draw,
|
||||
FramebufferAttachment::Depth, *_depthStencil);
|
||||
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
|
||||
} catch (oglplus::Error& error) {
|
||||
qWarning() << "OpenGL error in QML render setup: " << error.what();
|
||||
}
|
||||
|
||||
_depthStencil.reset(new Renderbuffer());
|
||||
Context::Bound(Renderbuffer::Target::Renderbuffer, *_depthStencil)
|
||||
.Storage(
|
||||
PixelDataInternalFormat::DepthComponent,
|
||||
_size.x, _size.y);
|
||||
|
||||
_fbo.reset(new Framebuffer());
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
_fbo->AttachRenderbuffer(Framebuffer::Target::Draw,
|
||||
FramebufferAttachment::Depth, *_depthStencil);
|
||||
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
|
||||
}
|
||||
|
||||
void OffscreenQmlRenderThread::init() {
|
||||
|
@ -299,10 +295,21 @@ void OffscreenQmlRenderThread::render() {
|
|||
|
||||
try {
|
||||
PROFILE_RANGE("qml_render")
|
||||
TexturePtr texture = _textures.getNextTexture();
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
_fbo->AttachTexture(Framebuffer::Target::Draw, FramebufferAttachment::Color, *texture, 0);
|
||||
_fbo->Complete(Framebuffer::Target::Draw);
|
||||
|
||||
TexturePtr texture = _textures.getNextTexture();
|
||||
|
||||
try {
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
_fbo->AttachTexture(Framebuffer::Target::Draw, FramebufferAttachment::Color, *texture, 0);
|
||||
_fbo->Complete(Framebuffer::Target::Draw);
|
||||
} catch (oglplus::Error& error) {
|
||||
qWarning() << "OpenGL error in QML render: " << error.what();
|
||||
|
||||
// In case we are failing from a failed setupFbo, reset fbo before next render
|
||||
setupFbo();
|
||||
throw;
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE("qml_render->rendercontrol")
|
||||
_renderControl->render();
|
||||
|
@ -311,13 +318,14 @@ void OffscreenQmlRenderThread::render() {
|
|||
// for now just clear the errors
|
||||
glGetError();
|
||||
}
|
||||
|
||||
// FIXME probably unecessary
|
||||
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
|
||||
_quickWindow->resetOpenGLState();
|
||||
_escrow.submit(GetName(*texture));
|
||||
_lastRenderTime = usecTimestampNow();
|
||||
} catch (std::runtime_error& error) {
|
||||
qWarning() << "Failed to render QML " << error.what();
|
||||
qWarning() << "Failed to render QML: " << error.what();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,11 +22,26 @@ OpenGLVersionChecker::OpenGLVersionChecker(int& argc, char** argv) :
|
|||
{
|
||||
}
|
||||
|
||||
bool OpenGLVersionChecker::isValidVersion() {
|
||||
bool valid = true;
|
||||
QString OpenGLVersionChecker::checkVersion(bool& valid, bool& override) {
|
||||
valid = true;
|
||||
override = false;
|
||||
|
||||
// Retrieve OpenGL version
|
||||
GLWidget* glWidget = new GLWidget();
|
||||
valid = glWidget->isValid();
|
||||
// Inform user if no OpenGL support
|
||||
if (!valid) {
|
||||
QMessageBox messageBox;
|
||||
messageBox.setWindowTitle("Missing OpenGL Support");
|
||||
messageBox.setIcon(QMessageBox::Warning);
|
||||
messageBox.setText(QString().sprintf("Your system does not support OpenGL, Interface cannot run."));
|
||||
messageBox.setInformativeText("Press OK to exit.");
|
||||
messageBox.setStandardButtons(QMessageBox::Ok);
|
||||
messageBox.setDefaultButton(QMessageBox::Ok);
|
||||
messageBox.exec();
|
||||
return QString();
|
||||
}
|
||||
|
||||
// Retrieve OpenGL version
|
||||
glWidget->initializeGL();
|
||||
QString glVersion = QString((const char*)glGetString(GL_VERSION));
|
||||
delete glWidget;
|
||||
|
@ -54,8 +69,8 @@ bool OpenGLVersionChecker::isValidVersion() {
|
|||
messageBox.setInformativeText("Press OK to exit; Ignore to continue.");
|
||||
messageBox.setStandardButtons(QMessageBox::Ok | QMessageBox::Ignore);
|
||||
messageBox.setDefaultButton(QMessageBox::Ok);
|
||||
valid = messageBox.exec() == QMessageBox::Ignore;
|
||||
override = messageBox.exec() == QMessageBox::Ignore;
|
||||
}
|
||||
|
||||
return valid;
|
||||
return glVersion;
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ class OpenGLVersionChecker : public QApplication {
|
|||
public:
|
||||
OpenGLVersionChecker(int& argc, char** argv);
|
||||
|
||||
static bool isValidVersion();
|
||||
static QString checkVersion(bool& valid, bool& override);
|
||||
};
|
||||
|
||||
#endif // hifi_OpenGLVersionChecker_h
|
||||
|
|
|
@ -74,6 +74,11 @@ void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, cons
|
|||
_backend->downloadFramebuffer(srcFramebuffer, region, destImage);
|
||||
}
|
||||
|
||||
|
||||
void Context::getStats(ContextStats& stats) const {
|
||||
_backend->getStats(stats);
|
||||
}
|
||||
|
||||
const Backend::TransformCamera& Backend::TransformCamera::recomputeDerived(const Transform& xformView) const {
|
||||
_projectionInverse = glm::inverse(_projection);
|
||||
|
||||
|
@ -102,3 +107,68 @@ Backend::TransformCamera Backend::TransformCamera::getEyeCamera(int eye, const S
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Counters for Buffer and Texture usage in GPU/Context
|
||||
std::atomic<uint32_t> Context::_bufferGPUCount{ 0 };
|
||||
std::atomic<Buffer::Size> Context::_bufferGPUMemoryUsage{ 0 };
|
||||
|
||||
std::atomic<uint32_t> Context::_textureGPUCount{ 0 };
|
||||
std::atomic<Texture::Size> Context::_textureGPUMemoryUsage{ 0 };
|
||||
|
||||
void Context::incrementBufferGPUCount() {
|
||||
_bufferGPUCount++;
|
||||
}
|
||||
void Context::decrementBufferGPUCount() {
|
||||
_bufferGPUCount--;
|
||||
}
|
||||
void Context::updateBufferGPUMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (newObjectSize > prevObjectSize) {
|
||||
_bufferGPUMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
} else {
|
||||
_bufferGPUMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
void Context::incrementTextureGPUCount() {
|
||||
_textureGPUCount++;
|
||||
}
|
||||
void Context::decrementTextureGPUCount() {
|
||||
_textureGPUCount--;
|
||||
}
|
||||
void Context::updateTextureGPUMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (newObjectSize > prevObjectSize) {
|
||||
_textureGPUMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
} else {
|
||||
_textureGPUMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t Context::getBufferGPUCount() {
|
||||
return _bufferGPUCount.load();
|
||||
}
|
||||
|
||||
Context::Size Context::getBufferGPUMemoryUsage() {
|
||||
return _bufferGPUMemoryUsage.load();
|
||||
}
|
||||
|
||||
uint32_t Context::getTextureGPUCount() {
|
||||
return _textureGPUCount.load();
|
||||
}
|
||||
|
||||
Context::Size Context::getTextureGPUMemoryUsage() {
|
||||
return _textureGPUMemoryUsage.load();
|
||||
}
|
||||
|
||||
void Backend::incrementBufferGPUCount() { Context::incrementBufferGPUCount(); }
|
||||
void Backend::decrementBufferGPUCount() { Context::decrementBufferGPUCount(); }
|
||||
void Backend::updateBufferGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize) { Context::updateBufferGPUMemoryUsage(prevObjectSize, newObjectSize); }
|
||||
void Backend::incrementTextureGPUCount() { Context::incrementTextureGPUCount(); }
|
||||
void Backend::decrementTextureGPUCount() { Context::decrementTextureGPUCount(); }
|
||||
void Backend::updateTextureGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize) { Context::updateTextureGPUMemoryUsage(prevObjectSize, newObjectSize); }
|
||||
|
||||
|
|
|
@ -27,6 +27,21 @@ class QImage;
|
|||
|
||||
namespace gpu {
|
||||
|
||||
struct ContextStats {
|
||||
public:
|
||||
int _ISNumFormatChanges = 0;
|
||||
int _ISNumInputBufferChanges = 0;
|
||||
int _ISNumIndexBufferChanges = 0;
|
||||
|
||||
int _RSNumTextureBounded = 0;
|
||||
|
||||
int _DSNumDrawcalls = 0;
|
||||
int _DSNumTriangles = 0;
|
||||
|
||||
ContextStats() {}
|
||||
ContextStats(const ContextStats& stats) = default;
|
||||
};
|
||||
|
||||
struct StereoState {
|
||||
bool _enable{ false };
|
||||
bool _skybox{ false };
|
||||
|
@ -100,13 +115,27 @@ public:
|
|||
return reinterpret_cast<T*>(object.gpuObject.getGPUObject());
|
||||
}
|
||||
|
||||
void getStats(ContextStats& stats) const { stats = _stats; }
|
||||
|
||||
|
||||
|
||||
// These should only be accessed by Backend implementation to repport the buffer and texture allocations,
|
||||
// they are NOT public calls
|
||||
static void incrementBufferGPUCount();
|
||||
static void decrementBufferGPUCount();
|
||||
static void updateBufferGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize);
|
||||
static void incrementTextureGPUCount();
|
||||
static void decrementTextureGPUCount();
|
||||
static void updateTextureGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize);
|
||||
|
||||
protected:
|
||||
StereoState _stereo;
|
||||
ContextStats _stats;
|
||||
};
|
||||
|
||||
class Context {
|
||||
public:
|
||||
using Size = Resource::Size;
|
||||
typedef Backend* (*CreateBackend)();
|
||||
typedef bool (*MakeProgram)(Shader& shader, const Shader::BindingSet& bindings);
|
||||
|
||||
|
@ -125,6 +154,7 @@ public:
|
|||
~Context();
|
||||
|
||||
void render(Batch& batch);
|
||||
|
||||
void enableStereo(bool enable = true);
|
||||
bool isStereo();
|
||||
void setStereoProjections(const mat4 eyeProjections[2]);
|
||||
|
@ -137,6 +167,16 @@ public:
|
|||
// It s here for convenience to easily capture a snapshot
|
||||
void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage);
|
||||
|
||||
// Repporting stats of the context
|
||||
void getStats(ContextStats& stats) const;
|
||||
|
||||
|
||||
static uint32_t getBufferGPUCount();
|
||||
static Size getBufferGPUMemoryUsage();
|
||||
|
||||
static uint32_t getTextureGPUCount();
|
||||
static Size getTextureGPUMemoryUsage();
|
||||
|
||||
protected:
|
||||
Context(const Context& context);
|
||||
|
||||
|
@ -153,6 +193,23 @@ protected:
|
|||
static std::once_flag _initialized;
|
||||
|
||||
friend class Shader;
|
||||
|
||||
// These should only be accessed by the Backend, they are NOT public calls
|
||||
static void incrementBufferGPUCount();
|
||||
static void decrementBufferGPUCount();
|
||||
static void updateBufferGPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
static void incrementTextureGPUCount();
|
||||
static void decrementTextureGPUCount();
|
||||
static void updateTextureGPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
|
||||
// Buffer and Texture Counters
|
||||
static std::atomic<uint32_t> _bufferGPUCount;
|
||||
static std::atomic<Size> _bufferGPUMemoryUsage;
|
||||
|
||||
static std::atomic<uint32_t> _textureGPUCount;
|
||||
static std::atomic<Size> _textureGPUMemoryUsage;
|
||||
|
||||
friend class Backend;
|
||||
};
|
||||
typedef std::shared_ptr<Context> ContextPointer;
|
||||
|
||||
|
|
|
@ -324,7 +324,10 @@ void GLBackend::do_draw(Batch& batch, size_t paramOffset) {
|
|||
uint32 numVertices = batch._params[paramOffset + 1]._uint;
|
||||
uint32 startVertex = batch._params[paramOffset + 0]._uint;
|
||||
glDrawArrays(mode, startVertex, numVertices);
|
||||
(void) CHECK_GL_ERROR();
|
||||
_stats._DSNumTriangles += numVertices / 3;
|
||||
_stats._DSNumDrawcalls++;
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
void GLBackend::do_drawIndexed(Batch& batch, size_t paramOffset) {
|
||||
|
@ -339,6 +342,9 @@ void GLBackend::do_drawIndexed(Batch& batch, size_t paramOffset) {
|
|||
GLvoid* indexBufferByteOffset = reinterpret_cast<GLvoid*>(startIndex * typeByteSize + _input._indexBufferOffset);
|
||||
|
||||
glDrawElements(mode, numIndices, glType, indexBufferByteOffset);
|
||||
_stats._DSNumTriangles += numIndices / 3;
|
||||
_stats._DSNumDrawcalls++;
|
||||
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
|
@ -350,6 +356,9 @@ void GLBackend::do_drawInstanced(Batch& batch, size_t paramOffset) {
|
|||
uint32 startVertex = batch._params[paramOffset + 1]._uint;
|
||||
|
||||
glDrawArraysInstancedARB(mode, startVertex, numVertices, numInstances);
|
||||
_stats._DSNumTriangles += (numInstances * numVertices) / 3;
|
||||
_stats._DSNumDrawcalls += numInstances;
|
||||
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
|
@ -372,6 +381,9 @@ void GLBackend::do_drawIndexedInstanced(Batch& batch, size_t paramOffset) {
|
|||
glDrawElementsInstanced(mode, numIndices, glType, indexBufferByteOffset, numInstances);
|
||||
Q_UNUSED(startInstance);
|
||||
#endif
|
||||
_stats._DSNumTriangles += (numInstances * numIndices) / 3;
|
||||
_stats._DSNumDrawcalls += numInstances;
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
|
@ -382,6 +394,7 @@ void GLBackend::do_multiDrawIndirect(Batch& batch, size_t paramOffset) {
|
|||
GLenum mode = _primitiveToGLmode[(Primitive)batch._params[paramOffset + 1]._uint];
|
||||
|
||||
glMultiDrawArraysIndirect(mode, reinterpret_cast<GLvoid*>(_input._indirectBufferOffset), commandCount, (GLsizei)_input._indirectBufferStride);
|
||||
_stats._DSNumDrawcalls += commandCount;
|
||||
#else
|
||||
// FIXME implement the slow path
|
||||
#endif
|
||||
|
@ -396,6 +409,8 @@ void GLBackend::do_multiDrawIndexedIndirect(Batch& batch, size_t paramOffset) {
|
|||
GLenum indexType = _elementTypeToGLType[_input._indexBufferType];
|
||||
|
||||
glMultiDrawElementsIndirect(mode, indexType, reinterpret_cast<GLvoid*>(_input._indirectBufferOffset), commandCount, (GLsizei)_input._indirectBufferStride);
|
||||
_stats._DSNumDrawcalls += commandCount;
|
||||
|
||||
#else
|
||||
// FIXME implement the slow path
|
||||
#endif
|
||||
|
|
|
@ -67,6 +67,8 @@ public:
|
|||
|
||||
GLBuffer();
|
||||
~GLBuffer();
|
||||
|
||||
void setSize(GLuint size);
|
||||
};
|
||||
static GLBuffer* syncGPUObject(const Buffer& buffer);
|
||||
static GLuint getBufferID(const Buffer& buffer);
|
||||
|
@ -77,10 +79,15 @@ public:
|
|||
Stamp _contentStamp;
|
||||
GLuint _texture;
|
||||
GLenum _target;
|
||||
GLuint _size;
|
||||
|
||||
GLTexture();
|
||||
~GLTexture();
|
||||
|
||||
void setSize(GLuint size);
|
||||
GLuint size() const { return _size; }
|
||||
|
||||
private:
|
||||
GLuint _size;
|
||||
};
|
||||
static GLTexture* syncGPUObject(const Texture& texture);
|
||||
static GLuint getTextureID(const TexturePointer& texture, bool sync = true);
|
||||
|
@ -230,26 +237,11 @@ public:
|
|||
void do_setStateBlend(State::BlendFunction blendFunction);
|
||||
|
||||
void do_setStateColorWriteMask(uint32 mask);
|
||||
|
||||
// Repporting stats of the context
|
||||
class Stats {
|
||||
public:
|
||||
int _ISNumFormatChanges = 0;
|
||||
int _ISNumInputBufferChanges = 0;
|
||||
int _ISNumIndexBufferChanges = 0;
|
||||
|
||||
Stats() {}
|
||||
Stats(const Stats& stats) = default;
|
||||
};
|
||||
|
||||
void getStats(Stats& stats) const { stats = _stats; }
|
||||
|
||||
|
||||
protected:
|
||||
void renderPassTransfer(Batch& batch);
|
||||
void renderPassDraw(Batch& batch);
|
||||
|
||||
Stats _stats;
|
||||
|
||||
// Draw Stage
|
||||
void do_draw(Batch& batch, size_t paramOffset);
|
||||
void do_drawIndexed(Batch& batch, size_t paramOffset);
|
||||
|
|
|
@ -16,12 +16,21 @@ GLBackend::GLBuffer::GLBuffer() :
|
|||
_stamp(0),
|
||||
_buffer(0),
|
||||
_size(0)
|
||||
{}
|
||||
{
|
||||
Backend::incrementBufferGPUCount();
|
||||
}
|
||||
|
||||
GLBackend::GLBuffer::~GLBuffer() {
|
||||
if (_buffer != 0) {
|
||||
glDeleteBuffers(1, &_buffer);
|
||||
}
|
||||
Backend::updateBufferGPUMemoryUsage(_size, 0);
|
||||
Backend::decrementBufferGPUCount();
|
||||
}
|
||||
|
||||
void GLBackend::GLBuffer::setSize(GLuint size) {
|
||||
Backend::updateBufferGPUMemoryUsage(_size, size);
|
||||
_size = size;
|
||||
}
|
||||
|
||||
GLBackend::GLBuffer* GLBackend::syncGPUObject(const Buffer& buffer) {
|
||||
|
@ -46,7 +55,7 @@ GLBackend::GLBuffer* GLBackend::syncGPUObject(const Buffer& buffer) {
|
|||
glBufferData(GL_ARRAY_BUFFER, buffer.getSysmem().getSize(), buffer.getSysmem().readData(), GL_DYNAMIC_DRAW);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
object->_stamp = buffer.getSysmem().getStamp();
|
||||
object->_size = (GLuint)buffer.getSysmem().getSize();
|
||||
object->setSize((GLuint)buffer.getSysmem().getSize());
|
||||
//}
|
||||
(void) CHECK_GL_ERROR();
|
||||
|
||||
|
|
|
@ -251,6 +251,9 @@ void GLBackend::do_setResourceTexture(Batch& batch, size_t paramOffset) {
|
|||
return;
|
||||
}
|
||||
|
||||
// One more True texture bound
|
||||
_stats._RSNumTextureBounded++;
|
||||
|
||||
// Always make sure the GLObject is in sync
|
||||
GLTexture* object = GLBackend::syncGPUObject(*resourceTexture);
|
||||
if (object) {
|
||||
|
|
|
@ -19,12 +19,21 @@ GLBackend::GLTexture::GLTexture() :
|
|||
_texture(0),
|
||||
_target(GL_TEXTURE_2D),
|
||||
_size(0)
|
||||
{}
|
||||
{
|
||||
Backend::incrementTextureGPUCount();
|
||||
}
|
||||
|
||||
GLBackend::GLTexture::~GLTexture() {
|
||||
if (_texture != 0) {
|
||||
glDeleteTextures(1, &_texture);
|
||||
}
|
||||
Backend::updateTextureGPUMemoryUsage(_size, 0);
|
||||
Backend::decrementTextureGPUCount();
|
||||
}
|
||||
|
||||
void GLBackend::GLTexture::setSize(GLuint size) {
|
||||
Backend::updateTextureGPUMemoryUsage(_size, size);
|
||||
_size = size;
|
||||
}
|
||||
|
||||
class GLTexelFormat {
|
||||
|
@ -427,8 +436,8 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
if (needUpdate) {
|
||||
if (texture.isStoredMipFaceAvailable(0)) {
|
||||
Texture::PixelsPointer mip = texture.accessStoredMipFace(0);
|
||||
const GLvoid* bytes = mip->_sysmem.read<Byte>();
|
||||
Element srcFormat = mip->_format;
|
||||
const GLvoid* bytes = mip->readData();
|
||||
Element srcFormat = mip->getFormat();
|
||||
|
||||
GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(texture.getTexelFormat(), srcFormat);
|
||||
|
||||
|
@ -458,8 +467,8 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
if (texture.isStoredMipFaceAvailable(0)) {
|
||||
Texture::PixelsPointer mip = texture.accessStoredMipFace(0);
|
||||
|
||||
bytes = mip->_sysmem.read<Byte>();
|
||||
srcFormat = mip->_format;
|
||||
bytes = mip->readData();
|
||||
srcFormat = mip->getFormat();
|
||||
|
||||
object->_contentStamp = texture.getDataStamp();
|
||||
}
|
||||
|
@ -483,7 +492,7 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
|
||||
object->_storageStamp = texture.getStamp();
|
||||
object->_contentStamp = texture.getDataStamp();
|
||||
object->_size = (GLuint)texture.getSize();
|
||||
object->setSize((GLuint)texture.getSize());
|
||||
}
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, boundTex);
|
||||
|
@ -507,11 +516,11 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
for (int f = 0; f < NUM_FACES; f++) {
|
||||
if (texture.isStoredMipFaceAvailable(0, f)) {
|
||||
Texture::PixelsPointer mipFace = texture.accessStoredMipFace(0, f);
|
||||
Element srcFormat = mipFace->_format;
|
||||
Element srcFormat = mipFace->getFormat();
|
||||
GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(texture.getTexelFormat(), srcFormat);
|
||||
|
||||
glTexSubImage2D(FACE_LAYOUT[f], 0, texelFormat.internalFormat, texture.getWidth(), texture.getWidth(), 0,
|
||||
texelFormat.format, texelFormat.type, (GLvoid*) (mipFace->_sysmem.read<Byte>()));
|
||||
texelFormat.format, texelFormat.type, (GLvoid*) (mipFace->readData()));
|
||||
|
||||
// At this point the mip pixels have been loaded, we can notify
|
||||
texture.notifyMipFaceGPULoaded(0, f);
|
||||
|
@ -536,11 +545,11 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
for (int f = 0; f < NUM_FACES; f++) {
|
||||
if (texture.isStoredMipFaceAvailable(0, f)) {
|
||||
Texture::PixelsPointer mipFace = texture.accessStoredMipFace(0, f);
|
||||
Element srcFormat = mipFace->_format;
|
||||
Element srcFormat = mipFace->getFormat();
|
||||
GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(texture.getTexelFormat(), srcFormat);
|
||||
|
||||
glTexImage2D(FACE_LAYOUT[f], 0, texelFormat.internalFormat, texture.getWidth(), texture.getWidth(), 0,
|
||||
texelFormat.format, texelFormat.type, (GLvoid*) (mipFace->_sysmem.read<Byte>()));
|
||||
texelFormat.format, texelFormat.type, (GLvoid*) (mipFace->readData()));
|
||||
|
||||
// At this point the mip pixels have been loaded, we can notify
|
||||
texture.notifyMipFaceGPULoaded(0, f);
|
||||
|
@ -561,7 +570,7 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
|
||||
object->_storageStamp = texture.getStamp();
|
||||
object->_contentStamp = texture.getDataStamp();
|
||||
object->_size = (GLuint)texture.getSize();
|
||||
object->setSize((GLuint)texture.getSize());
|
||||
}
|
||||
|
||||
glBindTexture(GL_TEXTURE_CUBE_MAP, boundTex);
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#include <NumericalConstants.h>
|
||||
#include <QDebug>
|
||||
|
||||
#include "Context.h"
|
||||
|
||||
using namespace gpu;
|
||||
|
||||
class AllocationDebugger {
|
||||
|
@ -232,19 +234,55 @@ Resource::Size Resource::Sysmem::append(Size size, const Byte* bytes) {
|
|||
return 0;
|
||||
}
|
||||
|
||||
std::atomic<uint32_t> Buffer::_bufferCPUCount{ 0 };
|
||||
std::atomic<Buffer::Size> Buffer::_bufferCPUMemoryUsage{ 0 };
|
||||
|
||||
void Buffer::updateBufferCPUMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (prevObjectSize > newObjectSize) {
|
||||
_bufferCPUMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
} else {
|
||||
_bufferCPUMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t Buffer::getBufferCPUCount() {
|
||||
return _bufferCPUCount.load();
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::getBufferCPUMemoryUsage() {
|
||||
return _bufferCPUMemoryUsage.load();
|
||||
}
|
||||
|
||||
uint32_t Buffer::getBufferGPUCount() {
|
||||
return Context::getBufferGPUCount();
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::getBufferGPUMemoryUsage() {
|
||||
return Context::getBufferGPUMemoryUsage();
|
||||
}
|
||||
|
||||
Buffer::Buffer() :
|
||||
Resource(),
|
||||
_sysmem(new Sysmem()) {
|
||||
_bufferCPUCount++;
|
||||
|
||||
}
|
||||
|
||||
Buffer::Buffer(Size size, const Byte* bytes) :
|
||||
Resource(),
|
||||
_sysmem(new Sysmem(size, bytes)) {
|
||||
_bufferCPUCount++;
|
||||
Buffer::updateBufferCPUMemoryUsage(0, _sysmem->getSize());
|
||||
}
|
||||
|
||||
Buffer::Buffer(const Buffer& buf) :
|
||||
Resource(),
|
||||
_sysmem(new Sysmem(buf.getSysmem())) {
|
||||
_bufferCPUCount++;
|
||||
Buffer::updateBufferCPUMemoryUsage(0, _sysmem->getSize());
|
||||
}
|
||||
|
||||
Buffer& Buffer::operator=(const Buffer& buf) {
|
||||
|
@ -253,18 +291,27 @@ Buffer& Buffer::operator=(const Buffer& buf) {
|
|||
}
|
||||
|
||||
Buffer::~Buffer() {
|
||||
_bufferCPUCount--;
|
||||
|
||||
if (_sysmem) {
|
||||
Buffer::updateBufferCPUMemoryUsage(_sysmem->getSize(), 0);
|
||||
delete _sysmem;
|
||||
_sysmem = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::resize(Size size) {
|
||||
return editSysmem().resize(size);
|
||||
auto prevSize = editSysmem().getSize();
|
||||
auto newSize = editSysmem().resize(size);
|
||||
Buffer::updateBufferCPUMemoryUsage(prevSize, newSize);
|
||||
return newSize;
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::setData(Size size, const Byte* data) {
|
||||
return editSysmem().setData(size, data);
|
||||
auto prevSize = editSysmem().getSize();
|
||||
auto newSize = editSysmem().setData(size, data);
|
||||
Buffer::updateBufferCPUMemoryUsage(prevSize, newSize);
|
||||
return newSize;
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::setSubData(Size offset, Size size, const Byte* data) {
|
||||
|
@ -272,6 +319,9 @@ Buffer::Size Buffer::setSubData(Size offset, Size size, const Byte* data) {
|
|||
}
|
||||
|
||||
Buffer::Size Buffer::append(Size size, const Byte* data) {
|
||||
return editSysmem().append( size, data);
|
||||
auto prevSize = editSysmem().getSize();
|
||||
auto newSize = editSysmem().append( size, data);
|
||||
Buffer::updateBufferCPUMemoryUsage(prevSize, newSize);
|
||||
return newSize;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include "Format.h"
|
||||
|
||||
#include <vector>
|
||||
#include <atomic>
|
||||
|
||||
#include <memory>
|
||||
#ifdef _DEBUG
|
||||
|
@ -109,7 +110,15 @@ protected:
|
|||
};
|
||||
|
||||
class Buffer : public Resource {
|
||||
static std::atomic<uint32_t> _bufferCPUCount;
|
||||
static std::atomic<Size> _bufferCPUMemoryUsage;
|
||||
static void updateBufferCPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
|
||||
public:
|
||||
static uint32_t getBufferCPUCount();
|
||||
static Size getBufferCPUMemoryUsage();
|
||||
static uint32_t getBufferGPUCount();
|
||||
static Size getBufferGPUMemoryUsage();
|
||||
|
||||
Buffer();
|
||||
Buffer(Size size, const Byte* bytes);
|
||||
|
|
|
@ -12,20 +12,77 @@
|
|||
#include "Texture.h"
|
||||
|
||||
#include <glm/gtc/constants.hpp>
|
||||
|
||||
#include <QDebug>
|
||||
#include "GPULogging.h"
|
||||
#include "Context.h"
|
||||
|
||||
using namespace gpu;
|
||||
|
||||
|
||||
std::atomic<uint32_t> Texture::_textureCPUCount{ 0 };
|
||||
std::atomic<Texture::Size> Texture::_textureCPUMemoryUsage{ 0 };
|
||||
|
||||
void Texture::updateTextureCPUMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (prevObjectSize > newObjectSize) {
|
||||
_textureCPUMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
} else {
|
||||
_textureCPUMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t Texture::getTextureCPUCount() {
|
||||
return _textureCPUCount.load();
|
||||
}
|
||||
|
||||
Texture::Size Texture::getTextureCPUMemoryUsage() {
|
||||
return _textureCPUMemoryUsage.load();
|
||||
}
|
||||
|
||||
uint32_t Texture::getTextureGPUCount() {
|
||||
return Context::getTextureGPUCount();
|
||||
}
|
||||
|
||||
Texture::Size Texture::getTextureGPUMemoryUsage() {
|
||||
return Context::getTextureGPUMemoryUsage();
|
||||
|
||||
}
|
||||
|
||||
uint8 Texture::NUM_FACES_PER_TYPE[NUM_TYPES] = {1, 1, 1, 6};
|
||||
|
||||
Texture::Pixels::Pixels(const Element& format, Size size, const Byte* bytes) :
|
||||
_sysmem(size, bytes),
|
||||
_format(format),
|
||||
_sysmem(size, bytes),
|
||||
_isGPULoaded(false) {
|
||||
Texture::updateTextureCPUMemoryUsage(0, _sysmem.getSize());
|
||||
}
|
||||
|
||||
Texture::Pixels::~Pixels() {
|
||||
Texture::updateTextureCPUMemoryUsage(_sysmem.getSize(), 0);
|
||||
}
|
||||
|
||||
Texture::Size Texture::Pixels::resize(Size pSize) {
|
||||
auto prevSize = _sysmem.getSize();
|
||||
auto newSize = _sysmem.resize(pSize);
|
||||
Texture::updateTextureCPUMemoryUsage(prevSize, newSize);
|
||||
return newSize;
|
||||
}
|
||||
|
||||
Texture::Size Texture::Pixels::setData(const Element& format, Size size, const Byte* bytes ) {
|
||||
_format = format;
|
||||
auto prevSize = _sysmem.getSize();
|
||||
auto newSize = _sysmem.setData(size, bytes);
|
||||
Texture::updateTextureCPUMemoryUsage(prevSize, newSize);
|
||||
_isGPULoaded = false;
|
||||
return newSize;
|
||||
}
|
||||
|
||||
void Texture::Pixels::notifyGPULoaded() {
|
||||
_isGPULoaded = true;
|
||||
auto prevSize = _sysmem.getSize();
|
||||
auto newSize = _sysmem.resize(0);
|
||||
Texture::updateTextureCPUMemoryUsage(prevSize, newSize);
|
||||
}
|
||||
|
||||
void Texture::Storage::assignTexture(Texture* texture) {
|
||||
|
@ -59,15 +116,15 @@ const Texture::PixelsPointer Texture::Storage::getMipFace(uint16 level, uint8 fa
|
|||
|
||||
void Texture::Storage::notifyMipFaceGPULoaded(uint16 level, uint8 face) const {
|
||||
PixelsPointer mipFace = getMipFace(level, face);
|
||||
if (mipFace && (_type != TEX_CUBE)) {
|
||||
mipFace->_isGPULoaded = true;
|
||||
mipFace->_sysmem.resize(0);
|
||||
// Free the mips
|
||||
if (mipFace) {
|
||||
mipFace->notifyGPULoaded();
|
||||
}
|
||||
}
|
||||
|
||||
bool Texture::Storage::isMipAvailable(uint16 level, uint8 face) const {
|
||||
PixelsPointer mipFace = getMipFace(level, face);
|
||||
return (mipFace && mipFace->_sysmem.getSize());
|
||||
return (mipFace && mipFace->getSize());
|
||||
}
|
||||
|
||||
bool Texture::Storage::allocateMip(uint16 level) {
|
||||
|
@ -103,9 +160,7 @@ bool Texture::Storage::assignMipData(uint16 level, const Element& format, Size s
|
|||
auto faceBytes = bytes;
|
||||
Size allocated = 0;
|
||||
for (auto& face : mip) {
|
||||
face->_format = format;
|
||||
allocated += face->_sysmem.setData(sizePerFace, faceBytes);
|
||||
face->_isGPULoaded = false;
|
||||
allocated += face->setData(format, sizePerFace, faceBytes);
|
||||
faceBytes += sizePerFace;
|
||||
}
|
||||
|
||||
|
@ -122,9 +177,7 @@ bool Texture::Storage::assignMipFaceData(uint16 level, const Element& format, Si
|
|||
Size allocated = 0;
|
||||
if (face < mip.size()) {
|
||||
auto mipFace = mip[face];
|
||||
mipFace->_format = format;
|
||||
allocated += mipFace->_sysmem.setData(size, bytes);
|
||||
mipFace->_isGPULoaded = false;
|
||||
allocated += mipFace->setData(format, size, bytes);
|
||||
bumpStamp();
|
||||
}
|
||||
|
||||
|
@ -171,10 +224,12 @@ Texture* Texture::createFromStorage(Storage* storage) {
|
|||
Texture::Texture():
|
||||
Resource()
|
||||
{
|
||||
_textureCPUCount++;
|
||||
}
|
||||
|
||||
Texture::~Texture()
|
||||
{
|
||||
_textureCPUCount--;
|
||||
}
|
||||
|
||||
Texture::Size Texture::resize(Type type, const Element& texelFormat, uint16 width, uint16 height, uint16 depth, uint16 numSamples, uint16 numSlices) {
|
||||
|
@ -292,7 +347,7 @@ bool Texture::assignStoredMip(uint16 level, const Element& format, Size size, co
|
|||
}
|
||||
}
|
||||
|
||||
// THen check that the mem buffer passed make sense with its format
|
||||
// THen check that the mem texture passed make sense with its format
|
||||
Size expectedSize = evalStoredMipSize(level, format);
|
||||
if (size == expectedSize) {
|
||||
_storage->assignMipData(level, format, size, bytes);
|
||||
|
@ -323,7 +378,7 @@ bool Texture::assignStoredMipFace(uint16 level, const Element& format, Size size
|
|||
}
|
||||
}
|
||||
|
||||
// THen check that the mem buffer passed make sense with its format
|
||||
// THen check that the mem texture passed make sense with its format
|
||||
Size expectedSize = evalStoredMipFaceSize(level, format);
|
||||
if (size == expectedSize) {
|
||||
_storage->assignMipFaceData(level, format, size, bytes, face);
|
||||
|
@ -364,7 +419,7 @@ uint16 Texture::autoGenerateMips(uint16 maxMip) {
|
|||
|
||||
uint16 Texture::getStoredMipWidth(uint16 level) const {
|
||||
PixelsPointer mipFace = accessStoredMipFace(level);
|
||||
if (mipFace && mipFace->_sysmem.getSize()) {
|
||||
if (mipFace && mipFace->getSize()) {
|
||||
return evalMipWidth(level);
|
||||
}
|
||||
return 0;
|
||||
|
@ -372,7 +427,7 @@ uint16 Texture::getStoredMipWidth(uint16 level) const {
|
|||
|
||||
uint16 Texture::getStoredMipHeight(uint16 level) const {
|
||||
PixelsPointer mip = accessStoredMipFace(level);
|
||||
if (mip && mip->_sysmem.getSize()) {
|
||||
if (mip && mip->getSize()) {
|
||||
return evalMipHeight(level);
|
||||
}
|
||||
return 0;
|
||||
|
@ -380,7 +435,7 @@ uint16 Texture::getStoredMipHeight(uint16 level) const {
|
|||
|
||||
uint16 Texture::getStoredMipDepth(uint16 level) const {
|
||||
PixelsPointer mipFace = accessStoredMipFace(level);
|
||||
if (mipFace && mipFace->_sysmem.getSize()) {
|
||||
if (mipFace && mipFace->getSize()) {
|
||||
return evalMipDepth(level);
|
||||
}
|
||||
return 0;
|
||||
|
@ -388,7 +443,7 @@ uint16 Texture::getStoredMipDepth(uint16 level) const {
|
|||
|
||||
uint32 Texture::getStoredMipNumTexels(uint16 level) const {
|
||||
PixelsPointer mipFace = accessStoredMipFace(level);
|
||||
if (mipFace && mipFace->_sysmem.getSize()) {
|
||||
if (mipFace && mipFace->getSize()) {
|
||||
return evalMipWidth(level) * evalMipHeight(level) * evalMipDepth(level);
|
||||
}
|
||||
return 0;
|
||||
|
@ -396,7 +451,7 @@ uint32 Texture::getStoredMipNumTexels(uint16 level) const {
|
|||
|
||||
uint32 Texture::getStoredMipSize(uint16 level) const {
|
||||
PixelsPointer mipFace = accessStoredMipFace(level);
|
||||
if (mipFace && mipFace->_sysmem.getSize()) {
|
||||
if (mipFace && mipFace->getSize()) {
|
||||
return evalMipWidth(level) * evalMipHeight(level) * evalMipDepth(level) * getTexelFormat().getSize();
|
||||
}
|
||||
return 0;
|
||||
|
@ -642,8 +697,8 @@ bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<
|
|||
// for each face of cube texture
|
||||
for(int face=0; face < gpu::Texture::NUM_CUBE_FACES; face++) {
|
||||
|
||||
auto numComponents = cubeTexture.accessStoredMipFace(0,face)->_format.getScalarCount();
|
||||
auto data = cubeTexture.accessStoredMipFace(0,face)->_sysmem.readData();
|
||||
auto numComponents = cubeTexture.accessStoredMipFace(0,face)->getFormat().getScalarCount();
|
||||
auto data = cubeTexture.accessStoredMipFace(0,face)->readData();
|
||||
if (data == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -138,7 +138,14 @@ protected:
|
|||
};
|
||||
|
||||
class Texture : public Resource {
|
||||
static std::atomic<uint32_t> _textureCPUCount;
|
||||
static std::atomic<Size> _textureCPUMemoryUsage;
|
||||
static void updateTextureCPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
public:
|
||||
static uint32_t getTextureCPUCount();
|
||||
static Size getTextureCPUMemoryUsage();
|
||||
static uint32_t getTextureGPUCount();
|
||||
static Size getTextureGPUMemoryUsage();
|
||||
|
||||
class Usage {
|
||||
public:
|
||||
|
@ -194,9 +201,21 @@ public:
|
|||
Pixels(const Element& format, Size size, const Byte* bytes);
|
||||
~Pixels();
|
||||
|
||||
Sysmem _sysmem;
|
||||
const Byte* readData() const { return _sysmem.readData(); }
|
||||
Size getSize() const { return _sysmem.getSize(); }
|
||||
Size resize(Size pSize);
|
||||
Size setData(const Element& format, Size size, const Byte* bytes );
|
||||
|
||||
const Element& getFormat() const { return _format; }
|
||||
|
||||
void notifyGPULoaded();
|
||||
|
||||
protected:
|
||||
Element _format;
|
||||
Sysmem _sysmem;
|
||||
bool _isGPULoaded;
|
||||
|
||||
friend class Texture;
|
||||
};
|
||||
typedef std::shared_ptr< Pixels > PixelsPointer;
|
||||
|
||||
|
@ -448,7 +467,7 @@ typedef std::shared_ptr<Texture> TexturePointer;
|
|||
typedef std::vector< TexturePointer > Textures;
|
||||
|
||||
|
||||
// TODO: For now TextureView works with Buffer as a place holder for the Texture.
|
||||
// TODO: For now TextureView works with Texture as a place holder for the Texture.
|
||||
// The overall logic should be about the same except that the Texture will be a real GL Texture under the hood
|
||||
class TextureView {
|
||||
public:
|
||||
|
|
|
@ -81,6 +81,8 @@ AccountManager::AccountManager() :
|
|||
|
||||
qRegisterMetaType<QHttpMultiPart*>("QHttpMultiPart*");
|
||||
|
||||
qRegisterMetaType<AccountManagerAuth::Type>();
|
||||
|
||||
connect(&_accountInfo, &DataServerAccountInfo::balanceChanged, this, &AccountManager::accountInfoBalanceChanged);
|
||||
}
|
||||
|
||||
|
@ -215,12 +217,13 @@ void AccountManager::sendRequest(const QString& path,
|
|||
if (thread() != QThread::currentThread()) {
|
||||
QMetaObject::invokeMethod(this, "sendRequest",
|
||||
Q_ARG(const QString&, path),
|
||||
Q_ARG(AccountManagerAuth::Type, AccountManagerAuth::Required),
|
||||
Q_ARG(AccountManagerAuth::Type, authType),
|
||||
Q_ARG(QNetworkAccessManager::Operation, operation),
|
||||
Q_ARG(const JSONCallbackParameters&, callbackParams),
|
||||
Q_ARG(const QByteArray&, dataByteArray),
|
||||
Q_ARG(QHttpMultiPart*, dataMultiPart),
|
||||
Q_ARG(QVariantMap, propertyMap));
|
||||
return;
|
||||
}
|
||||
|
||||
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
|
||||
|
|
|
@ -58,8 +58,6 @@ void UserActivityLogger::logAction(QString action, QJsonObject details, JSONCall
|
|||
|
||||
// if no callbacks specified, call our owns
|
||||
if (params.isEmpty()) {
|
||||
params.jsonCallbackReceiver = this;
|
||||
params.jsonCallbackMethod = "requestFinished";
|
||||
params.errorCallbackReceiver = this;
|
||||
params.errorCallbackMethod = "requestError";
|
||||
}
|
||||
|
@ -70,10 +68,6 @@ void UserActivityLogger::logAction(QString action, QJsonObject details, JSONCall
|
|||
params, NULL, multipart);
|
||||
}
|
||||
|
||||
void UserActivityLogger::requestFinished(QNetworkReply& requestReply) {
|
||||
// qCDebug(networking) << object;
|
||||
}
|
||||
|
||||
void UserActivityLogger::requestError(QNetworkReply& errorReply) {
|
||||
qCDebug(networking) << errorReply.error() << "-" << errorReply.errorString();
|
||||
}
|
||||
|
@ -91,6 +85,15 @@ void UserActivityLogger::launch(QString applicationVersion, bool previousSession
|
|||
logAction(ACTION_NAME, actionDetails);
|
||||
}
|
||||
|
||||
void UserActivityLogger::insufficientGLVersion(QString glVersion) {
|
||||
const QString ACTION_NAME = "insufficient_gl";
|
||||
QJsonObject actionDetails;
|
||||
QString GL_VERSION = "glVersion";
|
||||
actionDetails.insert(GL_VERSION, glVersion);
|
||||
|
||||
logAction(ACTION_NAME, actionDetails);
|
||||
}
|
||||
|
||||
void UserActivityLogger::changedDisplayName(QString displayName) {
|
||||
const QString ACTION_NAME = "changed_display_name";
|
||||
QJsonObject actionDetails;
|
||||
|
|
|
@ -30,6 +30,8 @@ public slots:
|
|||
void logAction(QString action, QJsonObject details = QJsonObject(), JSONCallbackParameters params = JSONCallbackParameters());
|
||||
|
||||
void launch(QString applicationVersion, bool previousSessionCrashed, int previousSessionRuntime);
|
||||
|
||||
void insufficientGLVersion(QString glVersion);
|
||||
|
||||
void changedDisplayName(QString displayName);
|
||||
void changedModel(QString typeOfModel, QString modelURL);
|
||||
|
@ -39,7 +41,6 @@ public slots:
|
|||
void wentTo(QString destinationType, QString destinationName);
|
||||
|
||||
private slots:
|
||||
void requestFinished(QNetworkReply& requestReply);
|
||||
void requestError(QNetworkReply& errorReply);
|
||||
|
||||
private:
|
||||
|
|
|
@ -15,6 +15,8 @@
|
|||
|
||||
#include <LogHandler.h>
|
||||
|
||||
#include "Socket.h"
|
||||
|
||||
using namespace udt;
|
||||
|
||||
static int packetMetaTypeId = qRegisterMetaType<Packet*>("Packet*");
|
||||
|
@ -28,15 +30,10 @@ static const std::array<Key, 4> KEYS {{
|
|||
}};
|
||||
|
||||
void xorHelper(char* start, int size, Key key) {
|
||||
const auto end = start + size;
|
||||
|
||||
auto p = start;
|
||||
for (; p + sizeof(Key) < end; p += sizeof(Key)) {
|
||||
*reinterpret_cast<Key*>(p) ^= key;
|
||||
}
|
||||
|
||||
for (int i = 0; p < end; ++p || ++i) {
|
||||
*p ^= *(reinterpret_cast<const char*>(&key) + i);
|
||||
auto current = start;
|
||||
auto xorValue = reinterpret_cast<const char*>(&key);
|
||||
for (int i = 0; i < size; ++i) {
|
||||
*(current++) ^= *(xorValue + (i % sizeof(Key)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ PacketQueue::PacketPointer PacketQueue::takePacket() {
|
|||
--_currentIndex;
|
||||
}
|
||||
|
||||
return std::move(packet);
|
||||
return packet;
|
||||
}
|
||||
|
||||
unsigned int PacketQueue::nextIndex() {
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QDateTime>
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QtCore/QThread>
|
||||
|
||||
#include <LogHandler.h>
|
||||
|
@ -27,6 +28,7 @@
|
|||
#include "ControlPacket.h"
|
||||
#include "Packet.h"
|
||||
#include "PacketList.h"
|
||||
#include "../UserActivityLogger.h"
|
||||
#include "Socket.h"
|
||||
|
||||
using namespace udt;
|
||||
|
@ -328,7 +330,39 @@ void SendQueue::run() {
|
|||
nextPacketTimestamp += std::chrono::microseconds(nextPacketDelta);
|
||||
|
||||
// sleep as long as we need until next packet send, if we can
|
||||
const auto timeToSleep = duration_cast<microseconds>(nextPacketTimestamp - p_high_resolution_clock::now());
|
||||
auto now = p_high_resolution_clock::now();
|
||||
auto timeToSleep = duration_cast<microseconds>(nextPacketTimestamp - now);
|
||||
|
||||
// we're seeing SendQueues sleep for a long period of time here,
|
||||
// which can lock the NodeList if it's attempting to clear connections
|
||||
// for now we guard this by capping the time this thread and sleep for
|
||||
|
||||
const microseconds MAX_SEND_QUEUE_SLEEP_USECS { 2000000 };
|
||||
if (timeToSleep > MAX_SEND_QUEUE_SLEEP_USECS) {
|
||||
qWarning() << "udt::SendQueue wanted to sleep for" << timeToSleep.count() << "microseconds";
|
||||
qWarning() << "Capping sleep to" << MAX_SEND_QUEUE_SLEEP_USECS.count();
|
||||
qWarning() << "PSP:" << _packetSendPeriod << "NPD:" << nextPacketDelta
|
||||
<< "NPT:" << nextPacketTimestamp.time_since_epoch().count()
|
||||
<< "NOW:" << now.time_since_epoch().count();
|
||||
|
||||
// alright, we're in a weird state
|
||||
// we want to know why this is happening so we can implement a better fix than this guard
|
||||
// send some details up to the API (if the user allows us) that indicate how we could such a large timeToSleep
|
||||
static const QString SEND_QUEUE_LONG_SLEEP_ACTION = "sendqueue-sleep";
|
||||
|
||||
// setup a json object with the details we want
|
||||
QJsonObject longSleepObject;
|
||||
longSleepObject["timeToSleep"] = qint64(timeToSleep.count());
|
||||
longSleepObject["packetSendPeriod"] = _packetSendPeriod.load();
|
||||
longSleepObject["nextPacketDelta"] = nextPacketDelta;
|
||||
longSleepObject["nextPacketTimestamp"] = qint64(nextPacketTimestamp.time_since_epoch().count());
|
||||
longSleepObject["then"] = qint64(now.time_since_epoch().count());
|
||||
|
||||
// hopefully send this event using the user activity logger
|
||||
UserActivityLogger::getInstance().logAction(SEND_QUEUE_LONG_SLEEP_ACTION, longSleepObject);
|
||||
|
||||
timeToSleep = MAX_SEND_QUEUE_SLEEP_USECS;
|
||||
}
|
||||
|
||||
std::this_thread::sleep_for(timeToSleep);
|
||||
}
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
|
||||
#include "PluginContainer.h"
|
||||
|
||||
void DisplayPlugin::activate() {
|
||||
Parent::activate();
|
||||
bool DisplayPlugin::activate() {
|
||||
if (isHmd() && (getHmdScreen() >= 0)) {
|
||||
_container->showDisplayPluginsTools();
|
||||
}
|
||||
return Parent::activate();
|
||||
}
|
||||
|
||||
void DisplayPlugin::deactivate() {
|
||||
|
|
|
@ -59,7 +59,7 @@ class DisplayPlugin : public Plugin {
|
|||
Q_OBJECT
|
||||
using Parent = Plugin;
|
||||
public:
|
||||
void activate() override;
|
||||
bool activate() override;
|
||||
void deactivate() override;
|
||||
virtual bool isHmd() const { return false; }
|
||||
virtual int getHmdScreen() const { return -1; }
|
||||
|
|
|
@ -38,8 +38,10 @@ public:
|
|||
virtual void deinit();
|
||||
|
||||
/// Called when a plugin is being activated for use. May be called multiple times.
|
||||
virtual void activate() {
|
||||
/// Returns true if plugin was successfully activated.
|
||||
virtual bool activate() {
|
||||
_active = true;
|
||||
return _active;
|
||||
}
|
||||
|
||||
/// Called when a plugin is no longer being used. May be called multiple times.
|
||||
|
|
|
@ -17,12 +17,14 @@
|
|||
|
||||
#include <gpu/Context.h>
|
||||
|
||||
#include "EngineStats.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
Engine::Engine() :
|
||||
_sceneContext(std::make_shared<SceneContext>()),
|
||||
_renderContext(std::make_shared<RenderContext>()) {
|
||||
addJob<EngineStats>("Stats");
|
||||
}
|
||||
|
||||
void Engine::load() {
|
||||
|
@ -57,4 +59,6 @@ void Engine::run() {
|
|||
for (auto job : _jobs) {
|
||||
job.run(_sceneContext, _renderContext);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -16,37 +16,37 @@
|
|||
|
||||
#include "Context.h"
|
||||
#include "Task.h"
|
||||
|
||||
namespace render {
|
||||
|
||||
// The render engine holds all render tasks, and is itself a render task.
|
||||
// State flows through tasks to jobs via the render and scene contexts -
|
||||
// the engine should not be known from its jobs.
|
||||
class Engine : public Task {
|
||||
public:
|
||||
Engine();
|
||||
~Engine() = default;
|
||||
// The render engine holds all render tasks, and is itself a render task.
|
||||
// State flows through tasks to jobs via the render and scene contexts -
|
||||
// the engine should not be known from its jobs.
|
||||
class Engine : public Task {
|
||||
public:
|
||||
|
||||
// Load any persisted settings, and set up the presets
|
||||
// This should be run after adding all jobs, and before building ui
|
||||
void load();
|
||||
Engine();
|
||||
~Engine() = default;
|
||||
|
||||
// Register the scene
|
||||
void registerScene(const ScenePointer& scene) { _sceneContext->_scene = scene; }
|
||||
// Load any persisted settings, and set up the presets
|
||||
// This should be run after adding all jobs, and before building ui
|
||||
void load();
|
||||
|
||||
// Push a RenderContext
|
||||
void setRenderContext(const RenderContext& renderContext) { (*_renderContext) = renderContext; }
|
||||
RenderContextPointer getRenderContext() const { return _renderContext; }
|
||||
// Register the scene
|
||||
void registerScene(const ScenePointer& scene) { _sceneContext->_scene = scene; }
|
||||
|
||||
// Render a frame
|
||||
// A frame must have a scene registered and a context set to render
|
||||
void run();
|
||||
// Push a RenderContext
|
||||
void setRenderContext(const RenderContext& renderContext) { (*_renderContext) = renderContext; }
|
||||
RenderContextPointer getRenderContext() const { return _renderContext; }
|
||||
|
||||
protected:
|
||||
SceneContextPointer _sceneContext;
|
||||
RenderContextPointer _renderContext;
|
||||
};
|
||||
using EnginePointer = std::shared_ptr<Engine>;
|
||||
// Render a frame
|
||||
// A frame must have a scene registered and a context set to render
|
||||
void run();
|
||||
|
||||
protected:
|
||||
SceneContextPointer _sceneContext;
|
||||
RenderContextPointer _renderContext;
|
||||
};
|
||||
using EnginePointer = std::shared_ptr<Engine>;
|
||||
|
||||
}
|
||||
|
||||
|
|
49
libraries/render/src/render/EngineStats.cpp
Normal file
|
@ -0,0 +1,49 @@
|
|||
//
|
||||
// EngineStats.cpp
|
||||
// render/src/render
|
||||
//
|
||||
// Created by Sam Gateau on 3/27/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "EngineStats.h"
|
||||
|
||||
#include <gpu/Texture.h>
|
||||
|
||||
using namespace render;
|
||||
|
||||
void EngineStats::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
// Tick time
|
||||
|
||||
quint64 msecsElapsed = _frameTimer.restart();
|
||||
double frequency = 1000.0 / msecsElapsed;
|
||||
|
||||
// Update the stats
|
||||
auto config = std::static_pointer_cast<Config>(renderContext->jobConfig);
|
||||
|
||||
config->bufferCPUCount = gpu::Buffer::getBufferCPUCount();
|
||||
config->bufferGPUCount = gpu::Buffer::getBufferGPUCount();
|
||||
config->bufferCPUMemoryUsage = gpu::Buffer::getBufferCPUMemoryUsage();
|
||||
config->bufferGPUMemoryUsage = gpu::Buffer::getBufferGPUMemoryUsage();
|
||||
|
||||
config->textureCPUCount = gpu::Texture::getTextureCPUCount();
|
||||
config->textureGPUCount = gpu::Texture::getTextureGPUCount();
|
||||
config->textureCPUMemoryUsage = gpu::Texture::getTextureCPUMemoryUsage();
|
||||
config->textureGPUMemoryUsage = gpu::Texture::getTextureGPUMemoryUsage();
|
||||
|
||||
gpu::ContextStats gpuStats(_gpuStats);
|
||||
renderContext->args->_context->getStats(_gpuStats);
|
||||
|
||||
config->frameDrawcallCount = _gpuStats._DSNumDrawcalls - gpuStats._DSNumDrawcalls;
|
||||
config->frameDrawcallRate = config->frameDrawcallCount * frequency;
|
||||
|
||||
config->frameTriangleCount = _gpuStats._DSNumTriangles - gpuStats._DSNumTriangles;
|
||||
config->frameTriangleRate = config->frameTriangleCount * frequency;
|
||||
|
||||
config->frameTextureCount = _gpuStats._RSNumTextureBounded - gpuStats._RSNumTextureBounded;
|
||||
config->frameTextureRate = config->frameTextureCount * frequency;
|
||||
|
||||
config->emitDirty();
|
||||
}
|
89
libraries/render/src/render/EngineStats.h
Normal file
|
@ -0,0 +1,89 @@
|
|||
//
|
||||
// EngineStats.h
|
||||
// render/src/render
|
||||
//
|
||||
// Created by Sam Gateau on 3/27/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_render_EngineStats_h
|
||||
#define hifi_render_EngineStats_h
|
||||
|
||||
#include <gpu/Context.h>
|
||||
|
||||
#include <QElapsedTimer>
|
||||
|
||||
#include "Engine.h"
|
||||
|
||||
namespace render {
|
||||
|
||||
// A simple job collecting global stats on the Engine / Scene / GPU
|
||||
class EngineStatsConfig : public Job::Config{
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(quint32 bufferCPUCount MEMBER bufferCPUCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 bufferGPUCount MEMBER bufferGPUCount NOTIFY dirty)
|
||||
Q_PROPERTY(qint64 bufferCPUMemoryUsage MEMBER bufferCPUMemoryUsage NOTIFY dirty)
|
||||
Q_PROPERTY(qint64 bufferGPUMemoryUsage MEMBER bufferGPUMemoryUsage NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 textureCPUCount MEMBER textureCPUCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 textureGPUCount MEMBER textureGPUCount NOTIFY dirty)
|
||||
Q_PROPERTY(qint64 textureCPUMemoryUsage MEMBER textureCPUMemoryUsage NOTIFY dirty)
|
||||
Q_PROPERTY(qint64 textureGPUMemoryUsage MEMBER textureGPUMemoryUsage NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 frameDrawcallCount MEMBER frameDrawcallCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 frameDrawcallRate MEMBER frameDrawcallRate NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 frameTriangleCount MEMBER frameTriangleCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 frameTriangleRate MEMBER frameTriangleRate NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 frameTextureCount MEMBER frameTextureCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 frameTextureRate MEMBER frameTextureRate NOTIFY dirty)
|
||||
|
||||
|
||||
public:
|
||||
EngineStatsConfig() : Job::Config(true) {}
|
||||
|
||||
quint32 bufferCPUCount{ 0 };
|
||||
quint32 bufferGPUCount{ 0 };
|
||||
qint64 bufferCPUMemoryUsage{ 0 };
|
||||
qint64 bufferGPUMemoryUsage{ 0 };
|
||||
|
||||
quint32 textureCPUCount{ 0 };
|
||||
quint32 textureGPUCount{ 0 };
|
||||
qint64 textureCPUMemoryUsage{ 0 };
|
||||
qint64 textureGPUMemoryUsage{ 0 };
|
||||
|
||||
quint32 frameDrawcallCount{ 0 };
|
||||
quint32 frameDrawcallRate{ 0 };
|
||||
|
||||
quint32 frameTriangleCount{ 0 };
|
||||
quint32 frameTriangleRate{ 0 };
|
||||
|
||||
quint32 frameTextureCount{ 0 };
|
||||
quint32 frameTextureRate{ 0 };
|
||||
|
||||
void emitDirty() { emit dirty(); }
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class EngineStats {
|
||||
gpu::ContextStats _gpuStats;
|
||||
QElapsedTimer _frameTimer;
|
||||
public:
|
||||
using Config = EngineStatsConfig;
|
||||
using JobModel = Job::Model<EngineStats, Config>;
|
||||
|
||||
EngineStats() { _frameTimer.start(); }
|
||||
|
||||
void configure(const Config& configuration) {}
|
||||
void run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext);
|
||||
};
|
||||
}
|
||||
|
||||
#endif
|
|
@ -45,18 +45,23 @@ protected:
|
|||
}
|
||||
|
||||
virtual bool process() {
|
||||
lock();
|
||||
if (!_items.size()) {
|
||||
unlock();
|
||||
_hasItemsMutex.lock();
|
||||
_hasItems.wait(&_hasItemsMutex, getMaxWait());
|
||||
_hasItemsMutex.unlock();
|
||||
} else {
|
||||
unlock();
|
||||
}
|
||||
|
||||
lock();
|
||||
if (!_items.size()) {
|
||||
unlock();
|
||||
return isStillRunning();
|
||||
}
|
||||
|
||||
Queue processItems;
|
||||
lock();
|
||||
processItems.swap(_items);
|
||||
unlock();
|
||||
return processQueueItems(processItems);
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#ifndef hifi_SimpleMovingAverage_h
|
||||
#define hifi_SimpleMovingAverage_h
|
||||
|
||||
#include <mutex>
|
||||
#include <stdint.h>
|
||||
|
||||
class SimpleMovingAverage {
|
||||
|
@ -64,4 +65,45 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
template <class T, int MAX_NUM_SAMPLES> class ThreadSafeMovingAverage {
|
||||
public:
|
||||
void clear() {
|
||||
std::unique_lock<std::mutex> lock(_lock);
|
||||
_samples = 0;
|
||||
}
|
||||
|
||||
bool isAverageValid() const {
|
||||
std::unique_lock<std::mutex> lock(_lock);
|
||||
return (_samples > 0);
|
||||
}
|
||||
|
||||
void addSample(T sample) {
|
||||
std::unique_lock<std::mutex> lock(_lock);
|
||||
if (_samples > 0) {
|
||||
_average = (sample * WEIGHTING) + (_average * ONE_MINUS_WEIGHTING);
|
||||
} else {
|
||||
_average = sample;
|
||||
}
|
||||
_samples++;
|
||||
}
|
||||
|
||||
T getAverage() const {
|
||||
std::unique_lock<std::mutex> lock(_lock);
|
||||
return _average;
|
||||
}
|
||||
|
||||
size_t getSamples() const {
|
||||
std::unique_lock<std::mutex> lock(_lock);
|
||||
return _samples;
|
||||
}
|
||||
|
||||
private:
|
||||
const float WEIGHTING = 1.0f / (float)MAX_NUM_SAMPLES;
|
||||
const float ONE_MINUS_WEIGHTING = 1.0f - WEIGHTING;
|
||||
size_t _samples { 0 };
|
||||
T _average;
|
||||
mutable std::mutex _lock;
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_SimpleMovingAverage_h
|
||||
|
|
|
@ -8,35 +8,44 @@
|
|||
|
||||
#include "QmlWebWindowClass.h"
|
||||
|
||||
#include <QtCore/QUrl>
|
||||
#include <QtCore/QUrlQuery>
|
||||
#include <QtCore/QThread>
|
||||
|
||||
#include <QtQml/QQmlContext>
|
||||
|
||||
#include <QtScript/QScriptContext>
|
||||
#include <QtScript/QScriptEngine>
|
||||
|
||||
#include <QtQuick/QQuickItem>
|
||||
|
||||
#include <AbstractUriHandler.h>
|
||||
#include <AccountManager.h>
|
||||
#include <AddressManager.h>
|
||||
#include <DependencyManager.h>
|
||||
|
||||
#include "OffscreenUi.h"
|
||||
|
||||
static const char* const URL_PROPERTY = "source";
|
||||
|
||||
// Method called by Qt scripts to create a new web window in the overlay
|
||||
QScriptValue QmlWebWindowClass::constructor(QScriptContext* context, QScriptEngine* engine) {
|
||||
return QmlWindowClass::internalConstructor("QmlWebWindow.qml", context, engine,
|
||||
[&](QObject* object) { return new QmlWebWindowClass(object); });
|
||||
auto properties = parseArguments(context);
|
||||
QmlWebWindowClass* retVal { nullptr };
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->executeOnUiThread([&] {
|
||||
retVal = new QmlWebWindowClass();
|
||||
retVal->initQml(properties);
|
||||
}, true);
|
||||
Q_ASSERT(retVal);
|
||||
connect(engine, &QScriptEngine::destroyed, retVal, &QmlWindowClass::deleteLater);
|
||||
return engine->newQObject(retVal);
|
||||
}
|
||||
|
||||
QmlWebWindowClass::QmlWebWindowClass(QObject* qmlWindow) : QmlWindowClass(qmlWindow) {
|
||||
void QmlWebWindowClass::emitScriptEvent(const QVariant& scriptMessage) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "emitScriptEvent", Qt::QueuedConnection, Q_ARG(QVariant, scriptMessage));
|
||||
} else {
|
||||
emit scriptEventReceived(scriptMessage);
|
||||
}
|
||||
}
|
||||
|
||||
void QmlWebWindowClass::emitWebEvent(const QVariant& webMessage) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "emitWebEvent", Qt::QueuedConnection, Q_ARG(QVariant, webMessage));
|
||||
} else {
|
||||
emit webEventReceived(webMessage);
|
||||
}
|
||||
}
|
||||
|
||||
QString QmlWebWindowClass::getURL() const {
|
||||
QVariant result = DependencyManager::get<OffscreenUi>()->returnFromUiThread([&]()->QVariant {
|
||||
|
|
|
@ -18,14 +18,21 @@ class QmlWebWindowClass : public QmlWindowClass {
|
|||
|
||||
public:
|
||||
static QScriptValue constructor(QScriptContext* context, QScriptEngine* engine);
|
||||
QmlWebWindowClass(QObject* qmlWindow);
|
||||
|
||||
public slots:
|
||||
public slots:
|
||||
QString getURL() const;
|
||||
void setURL(const QString& url);
|
||||
|
||||
void emitScriptEvent(const QVariant& scriptMessage);
|
||||
void emitWebEvent(const QVariant& webMessage);
|
||||
|
||||
signals:
|
||||
void urlChanged();
|
||||
void scriptEventReceived(const QVariant& message);
|
||||
void webEventReceived(const QVariant& message);
|
||||
|
||||
protected:
|
||||
QString qmlSource() const override { return "QmlWebWindow.qml"; }
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -20,203 +20,113 @@
|
|||
|
||||
#include <QtWebSockets/QWebSocketServer>
|
||||
#include <QtWebSockets/QWebSocket>
|
||||
#include <QtWebChannel/QWebChannel>
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonObject>
|
||||
|
||||
#include "OffscreenUi.h"
|
||||
|
||||
QWebSocketServer* QmlWindowClass::_webChannelServer { nullptr };
|
||||
static QWebChannel webChannel;
|
||||
static const uint16_t WEB_CHANNEL_PORT = 51016;
|
||||
static std::atomic<int> nextWindowId;
|
||||
static const char* const SOURCE_PROPERTY = "source";
|
||||
static const char* const TITLE_PROPERTY = "title";
|
||||
static const char* const EVENT_BRIDGE_PROPERTY = "eventBridge";
|
||||
static const char* const WIDTH_PROPERTY = "width";
|
||||
static const char* const HEIGHT_PROPERTY = "height";
|
||||
static const char* const VISIBILE_PROPERTY = "visible";
|
||||
static const char* const TOOLWINDOW_PROPERTY = "toolWindow";
|
||||
static const uvec2 MAX_QML_WINDOW_SIZE { 1280, 720 };
|
||||
static const uvec2 MIN_QML_WINDOW_SIZE { 120, 80 };
|
||||
|
||||
void QmlScriptEventBridge::emitWebEvent(const QString& data) {
|
||||
QMetaObject::invokeMethod(this, "webEventReceived", Qt::QueuedConnection, Q_ARG(QString, data));
|
||||
}
|
||||
|
||||
void QmlScriptEventBridge::emitScriptEvent(const QString& data) {
|
||||
QMetaObject::invokeMethod(this, "scriptEventReceived", Qt::QueuedConnection,
|
||||
Q_ARG(int, _webWindow->getWindowId()), Q_ARG(QString, data));
|
||||
}
|
||||
|
||||
class QmlWebTransport : public QWebChannelAbstractTransport {
|
||||
Q_OBJECT
|
||||
public:
|
||||
QmlWebTransport(QWebSocket* webSocket) : _webSocket(webSocket) {
|
||||
// Translate from the websocket layer to the webchannel layer
|
||||
connect(webSocket, &QWebSocket::textMessageReceived, [this](const QString& message) {
|
||||
QJsonParseError error;
|
||||
QJsonDocument document = QJsonDocument::fromJson(message.toUtf8(), &error);
|
||||
if (error.error || !document.isObject()) {
|
||||
qWarning() << "Unable to parse incoming JSON message" << message;
|
||||
return;
|
||||
}
|
||||
emit messageReceived(document.object(), this);
|
||||
});
|
||||
}
|
||||
|
||||
virtual void sendMessage(const QJsonObject &message) override {
|
||||
// Translate from the webchannel layer to the websocket layer
|
||||
_webSocket->sendTextMessage(QJsonDocument(message).toJson(QJsonDocument::Compact));
|
||||
}
|
||||
|
||||
private:
|
||||
QWebSocket* const _webSocket;
|
||||
};
|
||||
|
||||
|
||||
void QmlWindowClass::setupServer() {
|
||||
if (!_webChannelServer) {
|
||||
_webChannelServer = new QWebSocketServer("EventBridge Server", QWebSocketServer::NonSecureMode);
|
||||
if (!_webChannelServer->listen(QHostAddress::LocalHost, WEB_CHANNEL_PORT)) {
|
||||
qFatal("Failed to open web socket server.");
|
||||
}
|
||||
|
||||
QObject::connect(_webChannelServer, &QWebSocketServer::newConnection, [] {
|
||||
webChannel.connectTo(new QmlWebTransport(_webChannelServer->nextPendingConnection()));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
QScriptValue QmlWindowClass::internalConstructor(const QString& qmlSource,
|
||||
QScriptContext* context, QScriptEngine* engine,
|
||||
std::function<QmlWindowClass*(QObject*)> builder)
|
||||
{
|
||||
QVariantMap QmlWindowClass::parseArguments(QScriptContext* context) {
|
||||
const auto argumentCount = context->argumentCount();
|
||||
QString url;
|
||||
QString title;
|
||||
int width = -1, height = -1;
|
||||
bool visible = true;
|
||||
bool toolWindow = false;
|
||||
QVariantMap properties;
|
||||
if (argumentCount > 1) {
|
||||
|
||||
if (!context->argument(0).isUndefined()) {
|
||||
title = context->argument(0).toString();
|
||||
properties[TITLE_PROPERTY] = context->argument(0).toString();
|
||||
}
|
||||
if (!context->argument(1).isUndefined()) {
|
||||
url = context->argument(1).toString();
|
||||
properties[SOURCE_PROPERTY] = context->argument(1).toString();
|
||||
}
|
||||
if (context->argument(2).isNumber()) {
|
||||
width = context->argument(2).toInt32();
|
||||
properties[WIDTH_PROPERTY] = context->argument(2).toInt32();
|
||||
}
|
||||
if (context->argument(3).isNumber()) {
|
||||
height = context->argument(3).toInt32();
|
||||
properties[HEIGHT_PROPERTY] = context->argument(3).toInt32();
|
||||
}
|
||||
if (context->argument(4).isBool()) {
|
||||
toolWindow = context->argument(4).toBool();
|
||||
properties[TOOLWINDOW_PROPERTY] = context->argument(4).toBool();
|
||||
}
|
||||
} else {
|
||||
auto argumentObject = context->argument(0);
|
||||
if (!argumentObject.property(TITLE_PROPERTY).isUndefined()) {
|
||||
title = argumentObject.property(TITLE_PROPERTY).toString();
|
||||
}
|
||||
if (!argumentObject.property(SOURCE_PROPERTY).isUndefined()) {
|
||||
url = argumentObject.property(SOURCE_PROPERTY).toString();
|
||||
}
|
||||
if (argumentObject.property(WIDTH_PROPERTY).isNumber()) {
|
||||
width = argumentObject.property(WIDTH_PROPERTY).toInt32();
|
||||
}
|
||||
if (argumentObject.property(HEIGHT_PROPERTY).isNumber()) {
|
||||
height = argumentObject.property(HEIGHT_PROPERTY).toInt32();
|
||||
}
|
||||
if (argumentObject.property(VISIBILE_PROPERTY).isBool()) {
|
||||
visible = argumentObject.property(VISIBILE_PROPERTY).toBool();
|
||||
}
|
||||
if (argumentObject.property(TOOLWINDOW_PROPERTY).isBool()) {
|
||||
toolWindow = argumentObject.property(TOOLWINDOW_PROPERTY).toBool();
|
||||
}
|
||||
properties = context->argument(0).toVariant().toMap();
|
||||
}
|
||||
|
||||
QString url = properties[SOURCE_PROPERTY].toString();
|
||||
if (!url.startsWith("http") && !url.startsWith("file://") && !url.startsWith("about:")) {
|
||||
url = QUrl::fromLocalFile(url).toString();
|
||||
properties[SOURCE_PROPERTY] = QUrl::fromLocalFile(url).toString();
|
||||
}
|
||||
|
||||
if (width != -1 || height != -1) {
|
||||
width = std::max(100, std::min(1280, width));
|
||||
height = std::max(100, std::min(720, height));
|
||||
}
|
||||
|
||||
QmlWindowClass* retVal{ nullptr };
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
|
||||
if (toolWindow) {
|
||||
auto toolWindow = offscreenUi->getToolWindow();
|
||||
QVariantMap properties;
|
||||
properties.insert(TITLE_PROPERTY, title);
|
||||
properties.insert(SOURCE_PROPERTY, url);
|
||||
if (width != -1 && height != -1) {
|
||||
properties.insert(WIDTH_PROPERTY, width);
|
||||
properties.insert(HEIGHT_PROPERTY, height);
|
||||
}
|
||||
|
||||
// Build the event bridge and wrapper on the main thread
|
||||
QVariant newTabVar;
|
||||
bool invokeResult = QMetaObject::invokeMethod(toolWindow, "addWebTab", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(QVariant, newTabVar),
|
||||
Q_ARG(QVariant, QVariant::fromValue(properties)));
|
||||
|
||||
QQuickItem* newTab = qvariant_cast<QQuickItem*>(newTabVar);
|
||||
if (!invokeResult || !newTab) {
|
||||
return QScriptValue();
|
||||
}
|
||||
|
||||
offscreenUi->returnFromUiThread([&] {
|
||||
setupServer();
|
||||
retVal = builder(newTab);
|
||||
retVal->_toolWindow = true;
|
||||
registerObject(url.toLower(), retVal);
|
||||
return QVariant();
|
||||
});
|
||||
} else {
|
||||
// Build the event bridge and wrapper on the main thread
|
||||
QMetaObject::invokeMethod(offscreenUi.data(), "load", Qt::BlockingQueuedConnection,
|
||||
Q_ARG(const QString&, qmlSource),
|
||||
Q_ARG(std::function<void(QQmlContext*, QObject*)>, [&](QQmlContext* context, QObject* object) {
|
||||
setupServer();
|
||||
retVal = builder(object);
|
||||
context->engine()->setObjectOwnership(retVal->_qmlWindow, QQmlEngine::CppOwnership);
|
||||
registerObject(url.toLower(), retVal);
|
||||
if (!title.isEmpty()) {
|
||||
retVal->setTitle(title);
|
||||
}
|
||||
if (width != -1 && height != -1) {
|
||||
retVal->setSize(width, height);
|
||||
}
|
||||
object->setProperty(SOURCE_PROPERTY, url);
|
||||
if (visible) {
|
||||
object->setProperty("visible", true);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
retVal->_source = url;
|
||||
connect(engine, &QScriptEngine::destroyed, retVal, &QmlWindowClass::deleteLater);
|
||||
return engine->newQObject(retVal);
|
||||
return properties;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Method called by Qt scripts to create a new web window in the overlay
|
||||
QScriptValue QmlWindowClass::constructor(QScriptContext* context, QScriptEngine* engine) {
|
||||
return internalConstructor("QmlWindow.qml", context, engine, [&](QObject* object){
|
||||
return new QmlWindowClass(object);
|
||||
});
|
||||
auto properties = parseArguments(context);
|
||||
QmlWindowClass* retVal { nullptr };
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->executeOnUiThread([&] {
|
||||
retVal = new QmlWindowClass();
|
||||
retVal->initQml(properties);
|
||||
}, true);
|
||||
Q_ASSERT(retVal);
|
||||
connect(engine, &QScriptEngine::destroyed, retVal, &QmlWindowClass::deleteLater);
|
||||
return engine->newQObject(retVal);
|
||||
}
|
||||
|
||||
QmlWindowClass::QmlWindowClass(QObject* qmlWindow)
|
||||
: _windowId(++nextWindowId), _qmlWindow(qmlWindow)
|
||||
{
|
||||
qDebug() << "Created window with ID " << _windowId;
|
||||
QmlWindowClass::QmlWindowClass() {
|
||||
|
||||
}
|
||||
|
||||
void QmlWindowClass::initQml(QVariantMap properties) {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
_toolWindow = properties.contains(TOOLWINDOW_PROPERTY) && properties[TOOLWINDOW_PROPERTY].toBool();
|
||||
_source = properties[SOURCE_PROPERTY].toString();
|
||||
|
||||
if (_toolWindow) {
|
||||
// Build the event bridge and wrapper on the main thread
|
||||
_qmlWindow = offscreenUi->getToolWindow();
|
||||
properties[EVENT_BRIDGE_PROPERTY] = QVariant::fromValue(this);
|
||||
QVariant newTabVar;
|
||||
bool invokeResult = QMetaObject::invokeMethod(_qmlWindow, "addWebTab", Qt::DirectConnection,
|
||||
Q_RETURN_ARG(QVariant, newTabVar),
|
||||
Q_ARG(QVariant, QVariant::fromValue(properties)));
|
||||
Q_ASSERT(invokeResult);
|
||||
} else {
|
||||
// Build the event bridge and wrapper on the main thread
|
||||
offscreenUi->load(qmlSource(), [&](QQmlContext* context, QObject* object) {
|
||||
_qmlWindow = object;
|
||||
_qmlWindow->setProperty("eventBridge", QVariant::fromValue(this));
|
||||
context->engine()->setObjectOwnership(this, QQmlEngine::CppOwnership);
|
||||
context->engine()->setObjectOwnership(object, QQmlEngine::CppOwnership);
|
||||
if (properties.contains(TITLE_PROPERTY)) {
|
||||
object->setProperty(TITLE_PROPERTY, properties[TITLE_PROPERTY].toString());
|
||||
}
|
||||
if (properties.contains(HEIGHT_PROPERTY) && properties.contains(WIDTH_PROPERTY)) {
|
||||
uvec2 requestedSize { properties[WIDTH_PROPERTY].toUInt(), properties[HEIGHT_PROPERTY].toUInt() };
|
||||
requestedSize = glm::clamp(requestedSize, MIN_QML_WINDOW_SIZE, MAX_QML_WINDOW_SIZE);
|
||||
asQuickItem()->setSize(QSize(requestedSize.x, requestedSize.y));
|
||||
}
|
||||
|
||||
bool visible = !properties.contains(VISIBILE_PROPERTY) || properties[VISIBILE_PROPERTY].toBool();
|
||||
object->setProperty(VISIBILE_PROPERTY, visible);
|
||||
object->setProperty(SOURCE_PROPERTY, _source);
|
||||
|
||||
// Forward messages received from QML on to the script
|
||||
connect(_qmlWindow, SIGNAL(sendToScript(QVariant)), this, SIGNAL(fromQml(const QVariant&)), Qt::QueuedConnection);
|
||||
});
|
||||
}
|
||||
Q_ASSERT(_qmlWindow);
|
||||
Q_ASSERT(dynamic_cast<const QQuickItem*>(_qmlWindow.data()));
|
||||
// Forward messages received from QML on to the script
|
||||
connect(_qmlWindow, SIGNAL(sendToScript(QVariant)), this, SIGNAL(fromQml(const QVariant&)), Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
void QmlWindowClass::sendToQml(const QVariant& message) {
|
||||
|
@ -228,14 +138,6 @@ QmlWindowClass::~QmlWindowClass() {
|
|||
close();
|
||||
}
|
||||
|
||||
void QmlWindowClass::registerObject(const QString& name, QObject* object) {
|
||||
webChannel.registerObject(name, object);
|
||||
}
|
||||
|
||||
void QmlWindowClass::deregisterObject(QObject* object) {
|
||||
webChannel.deregisterObject(object);
|
||||
}
|
||||
|
||||
QQuickItem* QmlWindowClass::asQuickItem() const {
|
||||
if (_toolWindow) {
|
||||
return DependencyManager::get<OffscreenUi>()->getToolWindow();
|
||||
|
@ -248,7 +150,6 @@ void QmlWindowClass::setVisible(bool visible) {
|
|||
if (_toolWindow) {
|
||||
// For tool window tabs we special case visibility as a function call on the tab parent
|
||||
// The tool window itself has special logic based on whether any tabs are visible
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
QMetaObject::invokeMethod(targetWindow, "showTabForUrl", Qt::QueuedConnection, Q_ARG(QVariant, _source), Q_ARG(QVariant, visible));
|
||||
} else {
|
||||
DependencyManager::get<OffscreenUi>()->executeOnUiThread([=] {
|
||||
|
@ -359,5 +260,3 @@ void QmlWindowClass::raise() {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
#include "QmlWindowClass.moc"
|
||||
|
|
|
@ -13,45 +13,22 @@
|
|||
#include <QtCore/QPointer>
|
||||
#include <QtScript/QScriptValue>
|
||||
#include <QtQuick/QQuickItem>
|
||||
#include <QtWebChannel/QWebChannelAbstractTransport>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
class QScriptEngine;
|
||||
class QScriptContext;
|
||||
class QmlWindowClass;
|
||||
class QWebSocketServer;
|
||||
class QWebSocket;
|
||||
|
||||
class QmlScriptEventBridge : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
QmlScriptEventBridge(const QmlWindowClass* webWindow) : _webWindow(webWindow) {}
|
||||
|
||||
public slots :
|
||||
void emitWebEvent(const QString& data);
|
||||
void emitScriptEvent(const QString& data);
|
||||
|
||||
signals:
|
||||
void webEventReceived(const QString& data);
|
||||
void scriptEventReceived(int windowId, const QString& data);
|
||||
|
||||
private:
|
||||
const QmlWindowClass* _webWindow { nullptr };
|
||||
QWebSocket *_socket { nullptr };
|
||||
};
|
||||
// FIXME refactor this class to be a QQuickItem derived type and eliminate the needless wrapping
|
||||
class QmlWindowClass : public QObject {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(QObject* eventBridge READ getEventBridge CONSTANT)
|
||||
Q_PROPERTY(int windowId READ getWindowId CONSTANT)
|
||||
Q_PROPERTY(glm::vec2 position READ getPosition WRITE setPosition NOTIFY positionChanged)
|
||||
Q_PROPERTY(glm::vec2 size READ getSize WRITE setSize NOTIFY sizeChanged)
|
||||
Q_PROPERTY(bool visible READ isVisible WRITE setVisible NOTIFY visibilityChanged)
|
||||
|
||||
public:
|
||||
static QScriptValue constructor(QScriptContext* context, QScriptEngine* engine);
|
||||
QmlWindowClass(QObject* qmlWindow);
|
||||
QmlWindowClass();
|
||||
~QmlWindowClass();
|
||||
|
||||
public slots:
|
||||
|
@ -69,8 +46,7 @@ public slots:
|
|||
|
||||
Q_INVOKABLE void raise();
|
||||
Q_INVOKABLE void close();
|
||||
Q_INVOKABLE int getWindowId() const { return _windowId; };
|
||||
Q_INVOKABLE QmlScriptEventBridge* getEventBridge() const { return _eventBridge; };
|
||||
Q_INVOKABLE QObject* getEventBridge() { return this; };
|
||||
|
||||
// Scripts can use this to send a message to the QML object
|
||||
void sendToQml(const QVariant& message);
|
||||
|
@ -89,21 +65,18 @@ protected slots:
|
|||
void hasClosed();
|
||||
|
||||
protected:
|
||||
static QScriptValue internalConstructor(const QString& qmlSource,
|
||||
QScriptContext* context, QScriptEngine* engine,
|
||||
std::function<QmlWindowClass*(QObject*)> function);
|
||||
static void setupServer();
|
||||
static void registerObject(const QString& name, QObject* object);
|
||||
static void deregisterObject(QObject* object);
|
||||
static QWebSocketServer* _webChannelServer;
|
||||
static QVariantMap parseArguments(QScriptContext* context);
|
||||
static QScriptValue internalConstructor(QScriptContext* context, QScriptEngine* engine,
|
||||
std::function<QmlWindowClass*(QVariantMap)> function);
|
||||
|
||||
virtual QString qmlSource() const { return "QmlWindow.qml"; }
|
||||
|
||||
virtual void initQml(QVariantMap properties);
|
||||
QQuickItem* asQuickItem() const;
|
||||
QmlScriptEventBridge* const _eventBridge { new QmlScriptEventBridge(this) };
|
||||
|
||||
// FIXME needs to be initialized in the ctor once we have support
|
||||
// for tool window panes in QML
|
||||
bool _toolWindow { false };
|
||||
const int _windowId;
|
||||
QPointer<QObject> _qmlWindow;
|
||||
QString _source;
|
||||
};
|
||||
|
|
|
@ -451,10 +451,10 @@ bool NeuronPlugin::isSupported() const {
|
|||
#endif
|
||||
}
|
||||
|
||||
void NeuronPlugin::activate() {
|
||||
#ifdef HAVE_NEURON
|
||||
bool NeuronPlugin::activate() {
|
||||
InputPlugin::activate();
|
||||
|
||||
#ifdef HAVE_NEURON
|
||||
// register with userInputMapper
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
userInputMapper->registerDevice(_inputDevice);
|
||||
|
@ -473,11 +473,15 @@ void NeuronPlugin::activate() {
|
|||
if (!_socketRef) {
|
||||
// error
|
||||
qCCritical(inputplugins) << "NeuronPlugin: error connecting to " << _serverAddress.c_str() << ":" << _serverPort << ", error = " << BRGetLastErrorMessage();
|
||||
return false;
|
||||
} else {
|
||||
qCDebug(inputplugins) << "NeuronPlugin: success connecting to " << _serverAddress.c_str() << ":" << _serverPort;
|
||||
|
||||
BRRegisterAutoSyncParmeter(_socketRef, Cmd_CombinationMode);
|
||||
return true;
|
||||
}
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ public:
|
|||
virtual const QString& getName() const override { return NAME; }
|
||||
const QString& getID() const override { return NEURON_ID_STRING; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual bool activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
||||
|
|
|
@ -99,15 +99,19 @@ void SDL2Manager::deinit() {
|
|||
#endif
|
||||
}
|
||||
|
||||
void SDL2Manager::activate() {
|
||||
bool SDL2Manager::activate() {
|
||||
InputPlugin::activate();
|
||||
|
||||
#ifdef HAVE_SDL2
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
for (auto joystick : _openJoysticks) {
|
||||
userInputMapper->registerDevice(joystick);
|
||||
emit joystickAdded(joystick.get());
|
||||
}
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
InputPlugin::activate();
|
||||
}
|
||||
|
||||
void SDL2Manager::deactivate() {
|
||||
|
|
|
@ -35,7 +35,7 @@ public:
|
|||
virtual void deinit() override;
|
||||
|
||||
/// Called when a plugin is being activated for use. May be called multiple times.
|
||||
virtual void activate() override;
|
||||
virtual bool activate() override;
|
||||
/// Called when a plugin is no longer being used. May be called multiple times.
|
||||
virtual void deactivate() override;
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ bool SixenseManager::isSupported() const {
|
|||
#endif
|
||||
}
|
||||
|
||||
void SixenseManager::activate() {
|
||||
bool SixenseManager::activate() {
|
||||
InputPlugin::activate();
|
||||
|
||||
#ifdef HAVE_SIXENSE
|
||||
|
@ -101,6 +101,9 @@ void SixenseManager::activate() {
|
|||
|
||||
loadSettings();
|
||||
_sixenseLoaded = (sixenseInit() == SIXENSE_SUCCESS);
|
||||
return _sixenseLoaded;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ public:
|
|||
virtual const QString& getName() const override { return NAME; }
|
||||
virtual const QString& getID() const override { return HYDRA_ID_STRING; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual bool activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
||||
|
|
|
@ -34,8 +34,11 @@ void OculusBaseDisplayPlugin::customizeContext() {
|
|||
Parent::customizeContext();
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::internalActivate() {
|
||||
bool OculusBaseDisplayPlugin::internalActivate() {
|
||||
_session = acquireOculusSession();
|
||||
if (!_session) {
|
||||
return false;
|
||||
}
|
||||
|
||||
_hmdDesc = ovr_GetHmdDesc(_session);
|
||||
|
||||
|
@ -65,7 +68,7 @@ void OculusBaseDisplayPlugin::internalActivate() {
|
|||
|
||||
if (!OVR_SUCCESS(ovr_ConfigureTracking(_session,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
|
||||
qWarning() << "Could not attach to sensor device";
|
||||
logWarning("Failed to attach to sensor device");
|
||||
}
|
||||
|
||||
// Parent class relies on our _session intialization, so it must come after that.
|
||||
|
@ -81,7 +84,7 @@ void OculusBaseDisplayPlugin::internalActivate() {
|
|||
// This must come after the initialization, so that the values calculated
|
||||
// above are available during the customizeContext call (when not running
|
||||
// in threaded present mode)
|
||||
Parent::internalActivate();
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::internalDeactivate() {
|
||||
|
|
|
@ -24,7 +24,7 @@ public:
|
|||
|
||||
protected:
|
||||
void customizeContext() override;
|
||||
void internalActivate() override;
|
||||
bool internalActivate() override;
|
||||
void internalDeactivate() override;
|
||||
|
||||
protected:
|
||||
|
|
|
@ -67,7 +67,7 @@ void OculusDisplayPlugin::hmdPresent() {
|
|||
ovrLayerHeader* layers = &_sceneLayer.Header;
|
||||
ovrResult result = ovr_SubmitFrame(_session, _currentRenderFrameIndex, &_viewScaleDesc, &layers, 1);
|
||||
if (!OVR_SUCCESS(result)) {
|
||||
qDebug() << result;
|
||||
logWarning("Failed to present");
|
||||
}
|
||||
}
|
||||
_sceneFbo->Increment();
|
||||
|
|
|
@ -10,16 +10,34 @@
|
|||
|
||||
#include <atomic>
|
||||
#include <QtCore/QLoggingCategory>
|
||||
|
||||
using Mutex = std::mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(oculus)
|
||||
Q_LOGGING_CATEGORY(oculus, "hifi.plugins.oculus")
|
||||
|
||||
static std::atomic<uint32_t> refCount { 0 };
|
||||
static ovrSession session { nullptr };
|
||||
|
||||
inline ovrErrorInfo getError() {
|
||||
ovrErrorInfo error;
|
||||
ovr_GetLastErrorInfo(&error);
|
||||
return error;
|
||||
}
|
||||
|
||||
void logWarning(const char* what) {
|
||||
qWarning(oculus) << what << ":" << getError().ErrorString;
|
||||
}
|
||||
|
||||
void logFatal(const char* what) {
|
||||
std::string error("[oculus] ");
|
||||
error += what;
|
||||
error += ": ";
|
||||
error += getError().ErrorString;
|
||||
qFatal(error.c_str());
|
||||
}
|
||||
|
||||
bool oculusAvailable() {
|
||||
ovrDetectResult detect = ovr_Detect(0);
|
||||
return (detect.IsOculusServiceRunning && detect.IsOculusHMDConnected);
|
||||
|
@ -37,14 +55,14 @@ ovrSession acquireOculusSession() {
|
|||
init.ConnectionTimeoutMS = 0;
|
||||
init.LogCallback = nullptr;
|
||||
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
|
||||
qCWarning(oculus) << "Failed to initialize Oculus SDK";
|
||||
logWarning("Failed to initialize Oculus SDK");
|
||||
return session;
|
||||
}
|
||||
|
||||
Q_ASSERT(0 == refCount);
|
||||
ovrGraphicsLuid luid;
|
||||
if (!OVR_SUCCESS(ovr_Create(&session, &luid))) {
|
||||
qCWarning(oculus) << "Failed to acquire Oculus session";
|
||||
logWarning("Failed to acquire Oculus session");
|
||||
return session;
|
||||
}
|
||||
}
|
||||
|
@ -105,7 +123,7 @@ void SwapFramebufferWrapper::initColor() {
|
|||
destroyColor();
|
||||
|
||||
if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(_session, GL_SRGB8_ALPHA8, size.x, size.y, &color))) {
|
||||
qFatal("Unable to create swap textures");
|
||||
logFatal("Failed to create swap textures");
|
||||
}
|
||||
|
||||
for (int i = 0; i < color->TextureCount; ++i) {
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
#include <gl/OglplusHelpers.h>
|
||||
|
||||
void logWarning(const char* what);
|
||||
void logFatal(const char* what);
|
||||
bool oculusAvailable();
|
||||
ovrSession acquireOculusSession();
|
||||
void releaseOculusSession();
|
||||
|
|
|
@ -68,18 +68,20 @@ bool OculusLegacyDisplayPlugin::isSupported() const {
|
|||
return result;
|
||||
}
|
||||
|
||||
void OculusLegacyDisplayPlugin::internalActivate() {
|
||||
bool OculusLegacyDisplayPlugin::internalActivate() {
|
||||
Parent::internalActivate();
|
||||
|
||||
if (!(ovr_Initialize(nullptr))) {
|
||||
Q_ASSERT(false);
|
||||
qFatal("Failed to Initialize SDK");
|
||||
return false;
|
||||
}
|
||||
|
||||
_hswDismissed = false;
|
||||
_hmd = ovrHmd_Create(0);
|
||||
if (!_hmd) {
|
||||
qFatal("Failed to acquire HMD");
|
||||
return false;
|
||||
}
|
||||
|
||||
_ipd = ovrHmd_GetFloat(_hmd, OVR_KEY_IPD, _ipd);
|
||||
|
@ -107,6 +109,8 @@ void OculusLegacyDisplayPlugin::internalActivate() {
|
|||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
|
||||
qFatal("Could not attach to sensor device");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OculusLegacyDisplayPlugin::internalDeactivate() {
|
||||
|
|
|
@ -31,7 +31,7 @@ public:
|
|||
virtual float getTargetFrameRate() override;
|
||||
|
||||
protected:
|
||||
virtual void internalActivate() override;
|
||||
virtual bool internalActivate() override;
|
||||
virtual void internalDeactivate() override;
|
||||
|
||||
virtual void customizeContext() override;
|
||||
|
|
|
@ -41,14 +41,18 @@ bool OpenVrDisplayPlugin::isSupported() const {
|
|||
return !isOculusPresent() && vr::VR_IsHmdPresent();
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::internalActivate() {
|
||||
bool OpenVrDisplayPlugin::internalActivate() {
|
||||
Parent::internalActivate();
|
||||
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
if (!_system) {
|
||||
_system = acquireOpenVrSystem();
|
||||
}
|
||||
Q_ASSERT(_system);
|
||||
if (!_system) {
|
||||
qWarning() << "Failed to initialize OpenVR";
|
||||
return false;
|
||||
}
|
||||
|
||||
_system->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
|
||||
// Recommended render target size is per-eye, so double the X size for
|
||||
|
@ -86,6 +90,8 @@ void OpenVrDisplayPlugin::internalActivate() {
|
|||
} else {
|
||||
qDebug() << "OpenVR: error could not get chaperone pointer";
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::internalDeactivate() {
|
||||
|
|
|
@ -30,7 +30,7 @@ public:
|
|||
virtual void updateHeadPose(uint32_t frameIndex) override;
|
||||
|
||||
protected:
|
||||
void internalActivate() override;
|
||||
bool internalActivate() override;
|
||||
void internalDeactivate() override;
|
||||
|
||||
void hmdPresent() override;
|
||||
|
|
|
@ -53,8 +53,9 @@ bool ViveControllerManager::isSupported() const {
|
|||
return !isOculusPresent() && vr::VR_IsHmdPresent();
|
||||
}
|
||||
|
||||
void ViveControllerManager::activate() {
|
||||
bool ViveControllerManager::activate() {
|
||||
InputPlugin::activate();
|
||||
|
||||
_container->addMenu(MENU_PATH);
|
||||
_container->addMenuItem(PluginType::INPUT_PLUGIN, MENU_PATH, RENDER_CONTROLLERS,
|
||||
[this] (bool clicked) { this->setRenderControllers(clicked); },
|
||||
|
@ -122,6 +123,8 @@ void ViveControllerManager::activate() {
|
|||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
userInputMapper->registerDevice(_inputDevice);
|
||||
_registeredWithInputMapper = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void ViveControllerManager::deactivate() {
|
||||
|
|
|
@ -37,7 +37,7 @@ public:
|
|||
virtual bool isJointController() const override { return true; }
|
||||
const QString& getName() const override { return NAME; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual bool activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
||||
|
@ -111,8 +111,6 @@ private:
|
|||
std::shared_ptr<InputDevice> _inputDevice { std::make_shared<InputDevice>(_system) };
|
||||
|
||||
static const QString NAME;
|
||||
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi__ViveControllerManager
|
||||
|
|
Before Width: | Height: | Size: 61 KiB After Width: | Height: | Size: 40 KiB |
Before Width: | Height: | Size: 320 KiB After Width: | Height: | Size: 110 KiB |
Before Width: | Height: | Size: 65 KiB After Width: | Height: | Size: 43 KiB |