mirror of
https://github.com/overte-org/overte.git
synced 2025-04-23 09:33:29 +02:00
Merge pull request #16 from ericrius1/atpHomeReset
added portkeys to poster and adjusted clock chime time
This commit is contained in:
commit
6bb78fbecf
97 changed files with 2342 additions and 1680 deletions
|
@ -1484,10 +1484,10 @@ PropertiesTool = function(opts) {
|
|||
selections.push(entity);
|
||||
}
|
||||
data.selections = selections;
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
});
|
||||
|
||||
webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
webView.webEventReceived.connect(function(data) {
|
||||
data = JSON.parse(data);
|
||||
if (data.type == "print") {
|
||||
if (data.message) {
|
||||
|
@ -1802,7 +1802,7 @@ var showMenuItem = propertyMenu.addMenuItem("Show in Marketplace");
|
|||
propertiesTool = PropertiesTool();
|
||||
var particleExplorerTool = ParticleExplorerTool();
|
||||
var selectedParticleEntity = 0;
|
||||
entityListTool.webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
entityListTool.webView.webEventReceived.connect(function(data) {
|
||||
var data = JSON.parse(data);
|
||||
if (data.type == "selectionUpdate") {
|
||||
var ids = data.entityIds;
|
||||
|
@ -1823,10 +1823,10 @@ entityListTool.webView.eventBridge.webEventReceived.connect(function(data) {
|
|||
selectedParticleEntity = ids[0];
|
||||
particleExplorerTool.setActiveParticleEntity(ids[0]);
|
||||
|
||||
particleExplorerTool.webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
particleExplorerTool.webView.webEventReceived.connect(function(data) {
|
||||
var data = JSON.parse(data);
|
||||
if (data.messageType === "page_loaded") {
|
||||
particleExplorerTool.webView.eventBridge.emitScriptEvent(JSON.stringify(particleData));
|
||||
particleExplorerTool.webView.emitScriptEvent(JSON.stringify(particleData));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
|
|
@ -9,51 +9,11 @@
|
|||
//
|
||||
|
||||
var EventBridge;
|
||||
|
||||
EventBridgeConnectionProxy = function(parent) {
|
||||
this.parent = parent;
|
||||
this.realSignal = this.parent.realBridge.scriptEventReceived
|
||||
this.webWindowId = this.parent.webWindow.windowId;
|
||||
}
|
||||
|
||||
EventBridgeConnectionProxy.prototype.connect = function(callback) {
|
||||
var that = this;
|
||||
this.realSignal.connect(function(id, message) {
|
||||
if (id === that.webWindowId) { callback(message); }
|
||||
});
|
||||
}
|
||||
|
||||
EventBridgeProxy = function(webWindow) {
|
||||
this.webWindow = webWindow;
|
||||
this.realBridge = this.webWindow.eventBridge;
|
||||
this.scriptEventReceived = new EventBridgeConnectionProxy(this);
|
||||
}
|
||||
|
||||
EventBridgeProxy.prototype.emitWebEvent = function(data) {
|
||||
this.realBridge.emitWebEvent(data);
|
||||
}
|
||||
var WebChannel;
|
||||
|
||||
openEventBridge = function(callback) {
|
||||
EVENT_BRIDGE_URI = "ws://localhost:51016";
|
||||
socket = new WebSocket(this.EVENT_BRIDGE_URI);
|
||||
|
||||
socket.onclose = function() {
|
||||
console.error("web channel closed");
|
||||
};
|
||||
|
||||
socket.onerror = function(error) {
|
||||
console.error("web channel error: " + error);
|
||||
};
|
||||
|
||||
socket.onopen = function() {
|
||||
channel = new QWebChannel(socket, function(channel) {
|
||||
console.log("Document url is " + document.URL);
|
||||
var webWindow = channel.objects[document.URL.toLowerCase()];
|
||||
console.log("WebWindow is " + webWindow)
|
||||
eventBridgeProxy = new EventBridgeProxy(webWindow);
|
||||
EventBridge = eventBridgeProxy;
|
||||
if (callback) { callback(eventBridgeProxy); }
|
||||
});
|
||||
}
|
||||
WebChannel = new QWebChannel(qt.webChannelTransport, function (channel) {
|
||||
EventBridge = WebChannel.objects.eventBridgeWrapper.eventBridge;
|
||||
callback(EventBridge);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -4,21 +4,17 @@
|
|||
<script type="text/javascript" src="jquery-2.1.4.min.js"></script>
|
||||
<script type="text/javascript" src="qrc:///qtwebchannel/qwebchannel.js"></script>
|
||||
<script type="text/javascript" src="eventBridgeLoader.js"></script>
|
||||
|
||||
<script>
|
||||
var myBridge;
|
||||
|
||||
window.onload = function() {
|
||||
openEventBridge(function(eventBridge) {
|
||||
myBridge = eventBridge;
|
||||
myBridge.scriptEventReceived.connect(function(message) {
|
||||
openEventBridge(function() {
|
||||
EventBridge.scriptEventReceived.connect(function(message) {
|
||||
console.log("HTML side received message: " + message);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
testClick = function() {
|
||||
myBridge.emitWebEvent("HTML side sending message - button click");
|
||||
EventBridge.emitWebEvent(["Foo", "Bar", { "baz": 1} ]);
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
|
|
|
@ -38,14 +38,14 @@ EntityListTool = function(opts) {
|
|||
type: 'selectionUpdate',
|
||||
selectedIDs: selectedIDs,
|
||||
};
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
});
|
||||
|
||||
that.clearEntityList = function () {
|
||||
var data = {
|
||||
type: 'clearEntityList'
|
||||
}
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
};
|
||||
|
||||
that.sendUpdate = function() {
|
||||
|
@ -72,11 +72,11 @@ EntityListTool = function(opts) {
|
|||
entities: entities,
|
||||
selectedIDs: selectedIDs,
|
||||
};
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
}
|
||||
|
||||
|
||||
webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
webView.webEventReceived.connect(function(data) {
|
||||
data = JSON.parse(data);
|
||||
if (data.type == "selectionUpdate") {
|
||||
var ids = data.entityIds;
|
||||
|
|
|
@ -234,11 +234,11 @@ GridTool = function(opts) {
|
|||
});
|
||||
|
||||
horizontalGrid.addListener(function(data) {
|
||||
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
webView.emitScriptEvent(JSON.stringify(data));
|
||||
selectionDisplay.updateHandles();
|
||||
});
|
||||
|
||||
webView.eventBridge.webEventReceived.connect(function(data) {
|
||||
webView.webEventReceived.connect(function(data) {
|
||||
data = JSON.parse(data);
|
||||
if (data.type == "init") {
|
||||
horizontalGrid.emitUpdate();
|
||||
|
|
|
@ -26,7 +26,7 @@ ParticleExplorerTool = function() {
|
|||
});
|
||||
|
||||
that.webView.setVisible(true);
|
||||
that.webView.eventBridge.webEventReceived.connect(that.webEventReceived);
|
||||
that.webView.webEventReceived.connect(that.webEventReceived);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -2,32 +2,18 @@ print("Launching web window");
|
|||
|
||||
var htmlUrl = Script.resolvePath("..//html/qmlWebTest.html")
|
||||
webWindow = new OverlayWebWindow('Test Event Bridge', htmlUrl, 320, 240, false);
|
||||
print("JS Side window: " + webWindow);
|
||||
print("JS Side bridge: " + webWindow.eventBridge);
|
||||
webWindow.eventBridge.webEventReceived.connect(function(data) {
|
||||
webWindow.webEventReceived.connect(function(data) {
|
||||
print("JS Side event received: " + data);
|
||||
});
|
||||
|
||||
var titles = ["A", "B", "C"];
|
||||
var titleIndex = 0;
|
||||
|
||||
Script.setInterval(function() {
|
||||
webWindow.eventBridge.emitScriptEvent("JS Event sent");
|
||||
var size = webWindow.size;
|
||||
var position = webWindow.position;
|
||||
print("Window url: " + webWindow.url)
|
||||
print("Window visible: " + webWindow.visible)
|
||||
print("Window size: " + size.x + "x" + size.y)
|
||||
print("Window pos: " + position.x + "x" + position.y)
|
||||
webWindow.setVisible(!webWindow.visible);
|
||||
webWindow.setTitle(titles[titleIndex]);
|
||||
webWindow.setSize(320 + Math.random() * 100, 240 + Math.random() * 100);
|
||||
titleIndex += 1;
|
||||
titleIndex %= titles.length;
|
||||
}, 2 * 1000);
|
||||
var message = [ Math.random(), Math.random() ];
|
||||
print("JS Side sending: " + message);
|
||||
webWindow.emitScriptEvent(message);
|
||||
}, 5 * 1000);
|
||||
|
||||
Script.setTimeout(function() {
|
||||
print("Closing script");
|
||||
Script.scriptEnding.connect(function(){
|
||||
webWindow.close();
|
||||
Script.stop();
|
||||
}, 15 * 1000)
|
||||
webWindow.deleteLater();
|
||||
});
|
||||
|
||||
|
|
186
examples/utilities/tools/render/plotperf/PlotPerf.qml
Normal file
186
examples/utilities/tools/render/plotperf/PlotPerf.qml
Normal file
|
@ -0,0 +1,186 @@
|
|||
//
|
||||
// PlotPerf.qml
|
||||
// examples/utilities/tools/render
|
||||
//
|
||||
// Created by Sam Gateau on 3//2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
|
||||
Item {
|
||||
id: root
|
||||
width: parent.width
|
||||
height: 100
|
||||
property string title
|
||||
property var config
|
||||
property string parameters
|
||||
|
||||
// THis is my hack to get the name of the first property and assign it to a trigger var in order to get
|
||||
// a signal called whenever the value changed
|
||||
property var trigger: config[parameters.split(":")[3].split("-")[0]]
|
||||
|
||||
property var inputs: parameters.split(":")
|
||||
property var valueScale: +inputs[0]
|
||||
property var valueUnit: inputs[1]
|
||||
property var valueNumDigits: inputs[2]
|
||||
property var input_VALUE_OFFSET: 3
|
||||
property var valueMax : 1
|
||||
|
||||
property var _values : new Array()
|
||||
property var tick : 0
|
||||
|
||||
function createValues() {
|
||||
if (inputs.length > input_VALUE_OFFSET) {
|
||||
for (var i = input_VALUE_OFFSET; i < inputs.length; i++) {
|
||||
var varProps = inputs[i].split("-")
|
||||
_values.push( {
|
||||
value: varProps[0],
|
||||
valueMax: 1,
|
||||
numSamplesConstantMax: 0,
|
||||
valueHistory: new Array(),
|
||||
label: varProps[1],
|
||||
color: varProps[2],
|
||||
scale: (varProps.length > 3 ? varProps[3] : 1),
|
||||
unit: (varProps.length > 4 ? varProps[4] : valueUnit)
|
||||
})
|
||||
}
|
||||
}
|
||||
print("in creator" + JSON.stringify(_values));
|
||||
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
createValues();
|
||||
print(JSON.stringify(_values));
|
||||
|
||||
}
|
||||
|
||||
function pullFreshValues() {
|
||||
//print("pullFreshValues");
|
||||
var VALUE_HISTORY_SIZE = 100;
|
||||
var UPDATE_CANVAS_RATE = 20;
|
||||
tick++;
|
||||
|
||||
|
||||
var currentValueMax = 0
|
||||
for (var i = 0; i < _values.length; i++) {
|
||||
|
||||
var currentVal = config[_values[i].value] * _values[i].scale;
|
||||
_values[i].valueHistory.push(currentVal)
|
||||
_values[i].numSamplesConstantMax++;
|
||||
|
||||
if (_values[i].valueHistory.length > VALUE_HISTORY_SIZE) {
|
||||
var lostValue = _values[i].valueHistory.shift();
|
||||
if (lostValue >= _values[i].valueMax) {
|
||||
_values[i].valueMax *= 0.99
|
||||
_values[i].numSamplesConstantMax = 0
|
||||
}
|
||||
}
|
||||
|
||||
if (_values[i].valueMax < currentVal) {
|
||||
_values[i].valueMax = currentVal;
|
||||
_values[i].numSamplesConstantMax = 0
|
||||
}
|
||||
|
||||
if (_values[i].numSamplesConstantMax > VALUE_HISTORY_SIZE) {
|
||||
_values[i].numSamplesConstantMax = 0
|
||||
_values[i].valueMax *= 0.95 // lower slowly the current max if no new above max since a while
|
||||
}
|
||||
|
||||
if (currentValueMax < _values[i].valueMax) {
|
||||
currentValueMax = _values[i].valueMax
|
||||
}
|
||||
}
|
||||
|
||||
if ((valueMax < currentValueMax) || (tick % VALUE_HISTORY_SIZE == 0)) {
|
||||
valueMax = currentValueMax;
|
||||
}
|
||||
|
||||
if (tick % UPDATE_CANVAS_RATE == 0) {
|
||||
mycanvas.requestPaint()
|
||||
}
|
||||
}
|
||||
onTriggerChanged: pullFreshValues()
|
||||
|
||||
Canvas {
|
||||
id: mycanvas
|
||||
anchors.fill:parent
|
||||
onPaint: {
|
||||
var lineHeight = 12;
|
||||
|
||||
function displayValue(val, unit) {
|
||||
return (val / root.valueScale).toFixed(root.valueNumDigits) + " " + unit
|
||||
}
|
||||
|
||||
function pixelFromVal(val, valScale) {
|
||||
return lineHeight + (height - lineHeight) * (1 - (0.9) * val / valueMax);
|
||||
}
|
||||
function valueFromPixel(pixY) {
|
||||
return ((pixY - lineHeight) / (height - lineHeight) - 1) * valueMax / (-0.9);
|
||||
}
|
||||
function plotValueHistory(ctx, valHistory, color) {
|
||||
var widthStep= width / (valHistory.length - 1);
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.strokeStyle= color; // Green path
|
||||
ctx.lineWidth="2";
|
||||
ctx.moveTo(0, pixelFromVal(valHistory[0]));
|
||||
|
||||
for (var i = 1; i < valHistory.length; i++) {
|
||||
ctx.lineTo(i * widthStep, pixelFromVal(valHistory[i]));
|
||||
}
|
||||
|
||||
ctx.stroke();
|
||||
}
|
||||
function displayValueLegend(ctx, val, num) {
|
||||
ctx.fillStyle = val.color;
|
||||
var bestValue = val.valueHistory[val.valueHistory.length -1];
|
||||
ctx.textAlign = "right";
|
||||
ctx.fillText(displayValue(bestValue, val.unit), width, (num + 2) * lineHeight * 1.5);
|
||||
ctx.textAlign = "left";
|
||||
ctx.fillText(val.label, 0, (num + 2) * lineHeight * 1.5);
|
||||
}
|
||||
|
||||
function displayTitle(ctx, text, maxVal) {
|
||||
ctx.fillStyle = "grey";
|
||||
ctx.textAlign = "right";
|
||||
ctx.fillText(displayValue(valueFromPixel(lineHeight), root.valueUnit), width, lineHeight);
|
||||
|
||||
ctx.fillStyle = "white";
|
||||
ctx.textAlign = "left";
|
||||
ctx.fillText(text, 0, lineHeight);
|
||||
}
|
||||
function displayBackground(ctx) {
|
||||
ctx.fillStyle = Qt.rgba(0, 0, 0, 0.6);
|
||||
ctx.fillRect(0, 0, width, height);
|
||||
|
||||
ctx.strokeStyle= "grey";
|
||||
ctx.lineWidth="2";
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, lineHeight + 1);
|
||||
ctx.lineTo(width, lineHeight + 1);
|
||||
ctx.moveTo(0, height);
|
||||
ctx.lineTo(width, height);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
var ctx = getContext("2d");
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
ctx.font="12px Verdana";
|
||||
|
||||
displayBackground(ctx);
|
||||
|
||||
for (var i = 0; i < _values.length; i++) {
|
||||
plotValueHistory(ctx, _values[i].valueHistory, _values[i].color)
|
||||
displayValueLegend(ctx, _values[i], i)
|
||||
}
|
||||
|
||||
displayTitle(ctx, title, valueMax)
|
||||
}
|
||||
}
|
||||
}
|
1
examples/utilities/tools/render/plotperf/qmldir
Normal file
1
examples/utilities/tools/render/plotperf/qmldir
Normal file
|
@ -0,0 +1 @@
|
|||
PlotPerf 1.0 PlotPerf.qml
|
21
examples/utilities/tools/render/renderStats.js
Normal file
21
examples/utilities/tools/render/renderStats.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
//
|
||||
// renderStats.js
|
||||
// examples/utilities/tools/render
|
||||
//
|
||||
// Sam Gateau, created on 3/22/2016.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// Set up the qml ui
|
||||
var qml = Script.resolvePath('stats.qml');
|
||||
var window = new OverlayWindow({
|
||||
title: 'Render Stats',
|
||||
source: qml,
|
||||
width: 300,
|
||||
height: 200
|
||||
});
|
||||
window.setPosition(500, 50);
|
||||
window.closed.connect(function() { Script.stop(); });
|
69
examples/utilities/tools/render/stats.qml
Normal file
69
examples/utilities/tools/render/stats.qml
Normal file
|
@ -0,0 +1,69 @@
|
|||
//
|
||||
// stats.qml
|
||||
// examples/utilities/tools/render
|
||||
//
|
||||
// Created by Zach Pomerantz on 2/8/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "plotperf"
|
||||
|
||||
Item {
|
||||
id: statsUI
|
||||
anchors.fill:parent
|
||||
|
||||
Column {
|
||||
id: stats
|
||||
spacing: 8
|
||||
anchors.fill:parent
|
||||
|
||||
property var config: Render.getConfig("Stats")
|
||||
|
||||
function evalEvenHeight() {
|
||||
// Why do we have to do that manually ? cannot seem to find a qml / anchor / layout mode that does that ?
|
||||
return (height - spacing * (children.length - 1)) / children.length
|
||||
}
|
||||
|
||||
PlotPerf {
|
||||
title: "Num Buffers"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1::0:bufferCPUCount-CPU-#00B4EF:bufferGPUCount-GPU-#1AC567"
|
||||
}
|
||||
PlotPerf {
|
||||
title: "gpu::Buffer Memory"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1048576:Mb:1:bufferCPUMemoryUsage-CPU-#00B4EF:bufferGPUMemoryUsage-GPU-#1AC567"
|
||||
}
|
||||
|
||||
PlotPerf {
|
||||
title: "Num Textures"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1::0:textureCPUCount-CPU-#00B4EF:textureGPUCount-GPU-#1AC567:frameTextureCount-Frame-#E2334D"
|
||||
}
|
||||
PlotPerf {
|
||||
title: "gpu::Texture Memory"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1048576:Mb:1:textureCPUMemoryUsage-CPU-#00B4EF:textureGPUMemoryUsage-GPU-#1AC567"
|
||||
}
|
||||
PlotPerf {
|
||||
title: "Drawcalls"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1::0:frameDrawcallCount-frame-#E2334D:frameDrawcallRate-rate-#1AC567-0.001-K/s"
|
||||
}
|
||||
PlotPerf {
|
||||
title: "Triangles"
|
||||
config: stats.config
|
||||
height: parent.evalEvenHeight()
|
||||
parameters: "1000:K:0:frameTriangleCount-frame-#E2334D:frameTriangleRate-rate-#1AC567-0.001-MT/s"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,7 +11,6 @@
|
|||
|
||||
#include "IceServer.h"
|
||||
|
||||
#include <openssl/rsa.h>
|
||||
#include <openssl/x509.h>
|
||||
|
||||
#include <QtCore/QJsonDocument>
|
||||
|
@ -68,7 +67,9 @@ bool IceServer::packetVersionMatch(const udt::Packet& packet) {
|
|||
}
|
||||
|
||||
void IceServer::processPacket(std::unique_ptr<udt::Packet> packet) {
|
||||
|
||||
|
||||
_lastPacketTimestamp = QDateTime::currentMSecsSinceEpoch();
|
||||
|
||||
auto nlPacket = NLPacket::fromBase(std::move(packet));
|
||||
|
||||
// make sure that this packet at least looks like something we can read
|
||||
|
@ -161,15 +162,12 @@ SharedNetworkPeer IceServer::addOrUpdateHeartbeatingPeer(NLPacket& packet) {
|
|||
}
|
||||
|
||||
bool IceServer::isVerifiedHeartbeat(const QUuid& domainID, const QByteArray& plaintext, const QByteArray& signature) {
|
||||
// check if we have a private key for this domain ID - if we do not then fire off the request for it
|
||||
// check if we have a public key for this domain ID - if we do not then fire off the request for it
|
||||
auto it = _domainPublicKeys.find(domainID);
|
||||
if (it != _domainPublicKeys.end()) {
|
||||
|
||||
// attempt to verify the signature for this heartbeat
|
||||
const unsigned char* publicKeyData = reinterpret_cast<const unsigned char*>(it->second.constData());
|
||||
|
||||
// first load up the public key into an RSA struct
|
||||
RSA* rsaPublicKey = d2i_RSA_PUBKEY(NULL, &publicKeyData, it->second.size());
|
||||
const auto rsaPublicKey = it->second.get();
|
||||
|
||||
if (rsaPublicKey) {
|
||||
auto hashedPlaintext = QCryptographicHash::hash(plaintext, QCryptographicHash::Sha256);
|
||||
|
@ -180,9 +178,6 @@ bool IceServer::isVerifiedHeartbeat(const QUuid& domainID, const QByteArray& pla
|
|||
signature.size(),
|
||||
rsaPublicKey);
|
||||
|
||||
// free up the public key and remove connection token before we return
|
||||
RSA_free(rsaPublicKey);
|
||||
|
||||
if (verificationResult == 1) {
|
||||
// this is the only success case - we return true here to indicate that the heartbeat is verified
|
||||
return true;
|
||||
|
@ -192,7 +187,7 @@ bool IceServer::isVerifiedHeartbeat(const QUuid& domainID, const QByteArray& pla
|
|||
|
||||
} else {
|
||||
// we can't let this user in since we couldn't convert their public key to an RSA key we could use
|
||||
qWarning() << "Could not convert in-memory public key for" << domainID << "to usable RSA public key.";
|
||||
qWarning() << "Public key for" << domainID << "is not a usable RSA* public key.";
|
||||
qWarning() << "Re-requesting public key from API";
|
||||
}
|
||||
}
|
||||
|
@ -240,7 +235,22 @@ void IceServer::publicKeyReplyFinished(QNetworkReply* reply) {
|
|||
if (responseObject[STATUS_KEY].toString() == SUCCESS_VALUE) {
|
||||
auto dataObject = responseObject[DATA_KEY].toObject();
|
||||
if (dataObject.contains(PUBLIC_KEY_KEY)) {
|
||||
_domainPublicKeys[domainID] = QByteArray::fromBase64(dataObject[PUBLIC_KEY_KEY].toString().toUtf8());
|
||||
|
||||
// grab the base 64 public key from the API response
|
||||
auto apiPublicKey = QByteArray::fromBase64(dataObject[PUBLIC_KEY_KEY].toString().toUtf8());
|
||||
|
||||
// convert the downloaded public key to an RSA struct, if possible
|
||||
const unsigned char* publicKeyData = reinterpret_cast<const unsigned char*>(apiPublicKey.constData());
|
||||
|
||||
RSA* rsaPublicKey = d2i_RSA_PUBKEY(NULL, &publicKeyData, apiPublicKey.size());
|
||||
|
||||
if (rsaPublicKey) {
|
||||
_domainPublicKeys[domainID] = { rsaPublicKey, RSA_free };
|
||||
} else {
|
||||
qWarning() << "Could not convert in-memory public key for" << domainID << "to usable RSA public key.";
|
||||
qWarning() << "Public key will be re-requested on next heartbeat.";
|
||||
}
|
||||
|
||||
} else {
|
||||
qWarning() << "There was no public key present in response for domain with ID" << domainID;
|
||||
}
|
||||
|
@ -254,6 +264,8 @@ void IceServer::publicKeyReplyFinished(QNetworkReply* reply) {
|
|||
|
||||
qWarning() << "Error retreiving public key for domain with ID" << domainID << "-" << reply->errorString();
|
||||
}
|
||||
|
||||
reply->deleteLater();
|
||||
}
|
||||
|
||||
void IceServer::sendPeerInformationPacket(const NetworkPeer& peer, const HifiSockAddr* destinationSockAddr) {
|
||||
|
@ -274,6 +286,11 @@ void IceServer::clearInactivePeers() {
|
|||
|
||||
if ((usecTimestampNow() - peer->getLastHeardMicrostamp()) > (PEER_SILENCE_THRESHOLD_MSECS * 1000)) {
|
||||
qDebug() << "Removing peer from memory for inactivity -" << *peer;
|
||||
|
||||
// if we had a public key for this domain, remove it now
|
||||
_domainPublicKeys.erase(peer->getUUID());
|
||||
|
||||
// remove the peer object
|
||||
peerItem = _activePeers.erase(peerItem);
|
||||
} else {
|
||||
// we didn't kill this peer, push the iterator forwards
|
||||
|
@ -288,7 +305,14 @@ bool IceServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url, b
|
|||
|
||||
if (connection->requestOperation() == QNetworkAccessManager::GetOperation) {
|
||||
if (url.path() == "/status") {
|
||||
connection->respond(HTTPConnection::StatusCode200, QByteArray::number(_activePeers.size()));
|
||||
// figure out if we respond with 0 (we're good) or 1 (we think we're in trouble)
|
||||
|
||||
const quint64 MAX_PACKET_GAP_MS_FOR_STUCK_SOCKET = 10 * 1000;
|
||||
|
||||
int statusNumber = (QDateTime::currentMSecsSinceEpoch() - _lastPacketTimestamp > MAX_PACKET_GAP_MS_FOR_STUCK_SOCKET)
|
||||
? 1 : 0;
|
||||
|
||||
connection->respond(HTTPConnection::StatusCode200, QByteArray::number(statusNumber));
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#include <QtCore/QSharedPointer>
|
||||
#include <QUdpSocket>
|
||||
|
||||
#include <openssl/rsa.h>
|
||||
|
||||
#include <UUIDHasher.h>
|
||||
|
||||
#include <NetworkPeer.h>
|
||||
|
@ -52,8 +54,11 @@ private:
|
|||
|
||||
HTTPManager _httpManager;
|
||||
|
||||
using DomainPublicKeyHash = std::unordered_map<QUuid, QByteArray>;
|
||||
using RSAUniquePtr = std::unique_ptr<RSA, std::function<void(RSA*)>>;
|
||||
using DomainPublicKeyHash = std::unordered_map<QUuid, RSAUniquePtr>;
|
||||
DomainPublicKeyHash _domainPublicKeys;
|
||||
|
||||
quint64 _lastPacketTimestamp;
|
||||
};
|
||||
|
||||
#endif // hifi_IceServer_h
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import QtQuick 2.3
|
||||
import QtQuick.Controls 1.2
|
||||
import QtWebEngine 1.1
|
||||
import QtWebChannel 1.0
|
||||
|
||||
import "windows" as Windows
|
||||
import "controls" as Controls
|
||||
|
@ -15,11 +16,24 @@ Windows.Window {
|
|||
// Don't destroy on close... otherwise the JS/C++ will have a dangling pointer
|
||||
destroyOnCloseButton: false
|
||||
property alias source: webview.url
|
||||
property alias eventBridge: eventBridgeWrapper.eventBridge;
|
||||
|
||||
QtObject {
|
||||
id: eventBridgeWrapper
|
||||
WebChannel.id: "eventBridgeWrapper"
|
||||
property var eventBridge;
|
||||
}
|
||||
|
||||
// This is for JS/QML communication, which is unused in a WebWindow,
|
||||
// but not having this here results in spurious warnings about a
|
||||
// missing signal
|
||||
signal sendToScript(var message);
|
||||
|
||||
Controls.WebView {
|
||||
id: webview
|
||||
url: "about:blank"
|
||||
anchors.fill: parent
|
||||
focus: true
|
||||
webChannel.registeredObjects: [eventBridgeWrapper]
|
||||
}
|
||||
} // dialog
|
||||
|
|
|
@ -20,6 +20,7 @@ Windows.Window {
|
|||
// Don't destroy on close... otherwise the JS/C++ will have a dangling pointer
|
||||
destroyOnCloseButton: false
|
||||
property var source;
|
||||
property var eventBridge;
|
||||
property var component;
|
||||
property var dynamicContent;
|
||||
onSourceChanged: {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtWebEngine 1.1
|
||||
|
||||
import QtWebChannel 1.0
|
||||
import Qt.labs.settings 1.0
|
||||
|
||||
import "windows" as Windows
|
||||
|
@ -37,14 +37,26 @@ Windows.Window {
|
|||
Repeater {
|
||||
model: 4
|
||||
Tab {
|
||||
// Force loading of the content even if the tab is not visible
|
||||
// (required for letting the C++ code access the webview)
|
||||
active: true
|
||||
enabled: false;
|
||||
// we need to store the original url here for future identification
|
||||
enabled: false
|
||||
property string originalUrl: "";
|
||||
onEnabledChanged: toolWindow.updateVisiblity();
|
||||
|
||||
Controls.WebView {
|
||||
id: webView;
|
||||
anchors.fill: parent
|
||||
enabled: false
|
||||
property alias eventBridgeWrapper: eventBridgeWrapper
|
||||
|
||||
QtObject {
|
||||
id: eventBridgeWrapper
|
||||
WebChannel.id: "eventBridgeWrapper"
|
||||
property var eventBridge;
|
||||
}
|
||||
|
||||
webChannel.registeredObjects: [eventBridgeWrapper]
|
||||
onEnabledChanged: toolWindow.updateVisiblity();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -113,20 +125,23 @@ Windows.Window {
|
|||
|
||||
var tab = tabView.getTab(index);
|
||||
tab.title = "";
|
||||
tab.originalUrl = "";
|
||||
tab.enabled = false;
|
||||
tab.originalUrl = "";
|
||||
tab.item.url = "about:blank";
|
||||
tab.item.enabled = false;
|
||||
}
|
||||
|
||||
function addWebTab(properties) {
|
||||
if (!properties.source) {
|
||||
console.warn("Attempted to open Web Tool Pane without URL")
|
||||
console.warn("Attempted to open Web Tool Pane without URL");
|
||||
return;
|
||||
}
|
||||
|
||||
var existingTabIndex = findIndexForUrl(properties.source);
|
||||
if (existingTabIndex >= 0) {
|
||||
console.log("Existing tab " + existingTabIndex + " found with URL " + properties.source)
|
||||
return tabView.getTab(existingTabIndex);
|
||||
console.log("Existing tab " + existingTabIndex + " found with URL " + properties.source);
|
||||
var tab = tabView.getTab(existingTabIndex);
|
||||
return tab.item;
|
||||
}
|
||||
|
||||
var freeTabIndex = findFreeTab();
|
||||
|
@ -135,25 +150,28 @@ Windows.Window {
|
|||
return;
|
||||
}
|
||||
|
||||
var newTab = tabView.getTab(freeTabIndex);
|
||||
newTab.title = properties.title || "Unknown";
|
||||
newTab.originalUrl = properties.source;
|
||||
newTab.item.url = properties.source;
|
||||
newTab.active = true;
|
||||
|
||||
if (properties.width) {
|
||||
tabView.width = Math.min(Math.max(tabView.width, properties.width),
|
||||
toolWindow.maxSize.x);
|
||||
tabView.width = Math.min(Math.max(tabView.width, properties.width), toolWindow.maxSize.x);
|
||||
}
|
||||
|
||||
if (properties.height) {
|
||||
tabView.height = Math.min(Math.max(tabView.height, properties.height),
|
||||
toolWindow.maxSize.y);
|
||||
tabView.height = Math.min(Math.max(tabView.height, properties.height), toolWindow.maxSize.y);
|
||||
}
|
||||
|
||||
console.log("Updating visibility based on child tab added");
|
||||
newTab.enabledChanged.connect(updateVisiblity)
|
||||
updateVisiblity();
|
||||
return newTab
|
||||
var tab = tabView.getTab(freeTabIndex);
|
||||
tab.title = properties.title || "Unknown";
|
||||
tab.enabled = true;
|
||||
console.log("New tab URL: " + properties.source)
|
||||
tab.originalUrl = properties.source;
|
||||
|
||||
var eventBridge = properties.eventBridge;
|
||||
console.log("Event bridge: " + eventBridge);
|
||||
|
||||
var result = tab.item;
|
||||
result.enabled = true;
|
||||
console.log("Setting event bridge: " + eventBridge);
|
||||
result.eventBridgeWrapper.eventBridge = eventBridge;
|
||||
result.url = properties.source;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,6 +59,7 @@ WebEngineView {
|
|||
request.openIn(newWindow.webView)
|
||||
}
|
||||
|
||||
|
||||
profile: desktop.browserProfile
|
||||
// This breaks the webchannel used for passing messages. Fixed in Qt 5.6
|
||||
// See https://bugreports.qt.io/browse/QTBUG-49521
|
||||
//profile: desktop.browserProfile
|
||||
}
|
||||
|
|
|
@ -2893,14 +2893,7 @@ void Application::saveSettings() {
|
|||
bool Application::importEntities(const QString& urlOrFilename) {
|
||||
_entityClipboard->eraseAllOctreeElements();
|
||||
|
||||
QUrl url(urlOrFilename);
|
||||
|
||||
// if the URL appears to be invalid or relative, then it is probably a local file
|
||||
if (!url.isValid() || url.isRelative()) {
|
||||
url = QUrl::fromLocalFile(urlOrFilename);
|
||||
}
|
||||
|
||||
bool success = _entityClipboard->readFromURL(url.toString());
|
||||
bool success = _entityClipboard->readFromURL(urlOrFilename);
|
||||
if (success) {
|
||||
_entityClipboard->remapIDs();
|
||||
_entityClipboard->reaverageOctreeElements();
|
||||
|
@ -4886,13 +4879,39 @@ void Application::updateDisplayMode() {
|
|||
{
|
||||
std::unique_lock<std::mutex> lock(_displayPluginLock);
|
||||
|
||||
auto oldDisplayPlugin = _displayPlugin;
|
||||
if (_displayPlugin) {
|
||||
_displayPlugin->deactivate();
|
||||
}
|
||||
|
||||
// FIXME probably excessive and useless context switching
|
||||
_offscreenContext->makeCurrent();
|
||||
newDisplayPlugin->activate();
|
||||
|
||||
bool active = newDisplayPlugin->activate();
|
||||
|
||||
if (!active) {
|
||||
// If the new plugin fails to activate, fallback to last display
|
||||
qWarning() << "Failed to activate display: " << newDisplayPlugin->getName();
|
||||
newDisplayPlugin = oldDisplayPlugin;
|
||||
|
||||
if (newDisplayPlugin) {
|
||||
qWarning() << "Falling back to last display: " << newDisplayPlugin->getName();
|
||||
active = newDisplayPlugin->activate();
|
||||
}
|
||||
|
||||
// If there is no last display, or
|
||||
// If the last display fails to activate, fallback to desktop
|
||||
if (!active) {
|
||||
newDisplayPlugin = displayPlugins.at(0);
|
||||
qWarning() << "Falling back to display: " << newDisplayPlugin->getName();
|
||||
active = newDisplayPlugin->activate();
|
||||
}
|
||||
|
||||
if (!active) {
|
||||
qFatal("Failed to activate fallback plugin");
|
||||
}
|
||||
}
|
||||
|
||||
_offscreenContext->makeCurrent();
|
||||
offscreenUi->resize(fromGlm(newDisplayPlugin->getRecommendedUiSize()));
|
||||
_offscreenContext->makeCurrent();
|
||||
|
|
|
@ -466,8 +466,8 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
* (1.0f - ((float)(now - getHead()->getLookingAtMeStarted()))
|
||||
/ (LOOKING_AT_ME_DURATION * (float)USECS_PER_SECOND));
|
||||
if (alpha > 0.0f) {
|
||||
QSharedPointer<NetworkGeometry> geometry = _skeletonModel->getGeometry();
|
||||
if (geometry && geometry->isLoaded()) {
|
||||
if (_skeletonModel->isLoaded()) {
|
||||
const auto& geometry = _skeletonModel->getFBXGeometry();
|
||||
const float DEFAULT_EYE_DIAMETER = 0.048f; // Typical human eye
|
||||
const float RADIUS_INCREMENT = 0.005f;
|
||||
batch.setModelTransform(Transform());
|
||||
|
@ -475,7 +475,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
glm::vec3 position = getHead()->getLeftEyePosition();
|
||||
Transform transform;
|
||||
transform.setTranslation(position);
|
||||
float eyeDiameter = geometry->getFBXGeometry().leftEyeSize;
|
||||
float eyeDiameter = geometry.leftEyeSize;
|
||||
if (eyeDiameter == 0.0f) {
|
||||
eyeDiameter = DEFAULT_EYE_DIAMETER;
|
||||
}
|
||||
|
@ -486,7 +486,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
|
||||
position = getHead()->getRightEyePosition();
|
||||
transform.setTranslation(position);
|
||||
eyeDiameter = geometry->getFBXGeometry().rightEyeSize;
|
||||
eyeDiameter = geometry.rightEyeSize;
|
||||
if (eyeDiameter == 0.0f) {
|
||||
eyeDiameter = DEFAULT_EYE_DIAMETER;
|
||||
}
|
||||
|
@ -815,7 +815,7 @@ int Avatar::getJointIndex(const QString& name) const {
|
|||
Q_RETURN_ARG(int, result), Q_ARG(const QString&, name));
|
||||
return result;
|
||||
}
|
||||
return _skeletonModel->isActive() ? _skeletonModel->getGeometry()->getFBXGeometry().getJointIndex(name) : -1;
|
||||
return _skeletonModel->isActive() ? _skeletonModel->getFBXGeometry().getJointIndex(name) : -1;
|
||||
}
|
||||
|
||||
QStringList Avatar::getJointNames() const {
|
||||
|
@ -825,7 +825,7 @@ QStringList Avatar::getJointNames() const {
|
|||
Q_RETURN_ARG(QStringList, result));
|
||||
return result;
|
||||
}
|
||||
return _skeletonModel->isActive() ? _skeletonModel->getGeometry()->getFBXGeometry().getJointNames() : QStringList();
|
||||
return _skeletonModel->isActive() ? _skeletonModel->getFBXGeometry().getJointNames() : QStringList();
|
||||
}
|
||||
|
||||
glm::vec3 Avatar::getJointPosition(int index) const {
|
||||
|
|
|
@ -1272,8 +1272,8 @@ void MyAvatar::setVisibleInSceneIfReady(Model* model, render::ScenePointer scene
|
|||
|
||||
void MyAvatar::initHeadBones() {
|
||||
int neckJointIndex = -1;
|
||||
if (_skeletonModel->getGeometry()) {
|
||||
neckJointIndex = _skeletonModel->getGeometry()->getFBXGeometry().neckJointIndex;
|
||||
if (_skeletonModel->isLoaded()) {
|
||||
neckJointIndex = _skeletonModel->getFBXGeometry().neckJointIndex;
|
||||
}
|
||||
if (neckJointIndex == -1) {
|
||||
return;
|
||||
|
|
|
@ -39,12 +39,12 @@ SkeletonModel::~SkeletonModel() {
|
|||
}
|
||||
|
||||
void SkeletonModel::initJointStates() {
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
glm::mat4 modelOffset = glm::scale(_scale) * glm::translate(_offset);
|
||||
_rig->initJointStates(geometry, modelOffset);
|
||||
|
||||
// Determine the default eye position for avatar scale = 1.0
|
||||
int headJointIndex = _geometry->getFBXGeometry().headJointIndex;
|
||||
int headJointIndex = geometry.headJointIndex;
|
||||
if (0 > headJointIndex || headJointIndex >= _rig->getJointStateCount()) {
|
||||
qCWarning(interfaceapp) << "Bad head joint! Got:" << headJointIndex << "jointCount:" << _rig->getJointStateCount();
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ void SkeletonModel::initJointStates() {
|
|||
getEyeModelPositions(leftEyePosition, rightEyePosition);
|
||||
glm::vec3 midEyePosition = (leftEyePosition + rightEyePosition) / 2.0f;
|
||||
|
||||
int rootJointIndex = _geometry->getFBXGeometry().rootJointIndex;
|
||||
int rootJointIndex = geometry.rootJointIndex;
|
||||
glm::vec3 rootModelPosition;
|
||||
getJointPosition(rootJointIndex, rootModelPosition);
|
||||
|
||||
|
@ -87,10 +87,12 @@ Rig::CharacterControllerState convertCharacterControllerState(CharacterControlle
|
|||
|
||||
// Called within Model::simulate call, below.
|
||||
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
||||
Head* head = _owningAvatar->getHead();
|
||||
|
||||
if (_owningAvatar->isMyAvatar()) {
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
|
||||
Rig::HeadParameters headParams;
|
||||
headParams.enableLean = qApp->isHMDMode();
|
||||
|
@ -183,7 +185,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
// Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
|
||||
// However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
|
||||
// We will revisit that as priorities allow, and particularly after the new rig/animation/joints.
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
|
||||
// If the head is not positioned, updateEyeJoints won't get the math right
|
||||
glm::quat headOrientation;
|
||||
|
@ -329,22 +330,23 @@ float SkeletonModel::getRightArmLength() const {
|
|||
}
|
||||
|
||||
bool SkeletonModel::getHeadPosition(glm::vec3& headPosition) const {
|
||||
return isActive() && getJointPositionInWorldFrame(_geometry->getFBXGeometry().headJointIndex, headPosition);
|
||||
return isActive() && getJointPositionInWorldFrame(getFBXGeometry().headJointIndex, headPosition);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getNeckPosition(glm::vec3& neckPosition) const {
|
||||
return isActive() && getJointPositionInWorldFrame(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
|
||||
return isActive() && getJointPositionInWorldFrame(getFBXGeometry().neckJointIndex, neckPosition);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getLocalNeckPosition(glm::vec3& neckPosition) const {
|
||||
return isActive() && getJointPosition(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
|
||||
return isActive() && getJointPosition(getFBXGeometry().neckJointIndex, neckPosition);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {
|
||||
if (!isActive()) {
|
||||
return false;
|
||||
}
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
||||
if (getJointPosition(geometry.leftEyeJointIndex, firstEyePosition) &&
|
||||
getJointPosition(geometry.rightEyeJointIndex, secondEyePosition)) {
|
||||
return true;
|
||||
|
@ -386,11 +388,11 @@ float VERY_BIG_MASS = 1.0e6f;
|
|||
|
||||
// virtual
|
||||
void SkeletonModel::computeBoundingShape() {
|
||||
if (_geometry == NULL || _rig->jointStatesEmpty()) {
|
||||
if (!isLoaded() || _rig->jointStatesEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
if (geometry.joints.isEmpty() || geometry.rootJointIndex == -1) {
|
||||
// rootJointIndex == -1 if the avatar model has no skeleton
|
||||
return;
|
||||
|
@ -429,7 +431,7 @@ void SkeletonModel::renderBoundingCollisionShapes(gpu::Batch& batch, float scale
|
|||
}
|
||||
|
||||
bool SkeletonModel::hasSkeleton() {
|
||||
return isActive() ? _geometry->getFBXGeometry().rootJointIndex != -1 : false;
|
||||
return isActive() ? getFBXGeometry().rootJointIndex != -1 : false;
|
||||
}
|
||||
|
||||
void SkeletonModel::onInvalidate() {
|
||||
|
|
|
@ -38,10 +38,10 @@ public:
|
|||
void updateAttitude();
|
||||
|
||||
/// Returns the index of the left hand joint, or -1 if not found.
|
||||
int getLeftHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().leftHandJointIndex : -1; }
|
||||
int getLeftHandJointIndex() const { return isActive() ? getFBXGeometry().leftHandJointIndex : -1; }
|
||||
|
||||
/// Returns the index of the right hand joint, or -1 if not found.
|
||||
int getRightHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().rightHandJointIndex : -1; }
|
||||
int getRightHandJointIndex() const { return isActive() ? getFBXGeometry().rightHandJointIndex : -1; }
|
||||
|
||||
bool getLeftGrabPosition(glm::vec3& position) const;
|
||||
bool getRightGrabPosition(glm::vec3& position) const;
|
||||
|
|
|
@ -43,7 +43,7 @@ void SoftAttachmentModel::updateClusterMatrices(glm::vec3 modelPosition, glm::qu
|
|||
}
|
||||
_needsUpdateClusterMatrices = false;
|
||||
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
||||
glm::mat4 modelToWorld = glm::mat4_cast(modelOrientation);
|
||||
for (int i = 0; i < _meshStates.size(); i++) {
|
||||
|
|
|
@ -19,9 +19,7 @@ const QString Basic2DWindowOpenGLDisplayPlugin::NAME("Desktop");
|
|||
|
||||
static const QString FULLSCREEN = "Fullscreen";
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
|
||||
Parent::internalActivate();
|
||||
|
||||
bool Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
|
||||
_framerateActions.clear();
|
||||
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), FULLSCREEN,
|
||||
[this](bool clicked) {
|
||||
|
@ -33,6 +31,8 @@ void Basic2DWindowOpenGLDisplayPlugin::internalActivate() {
|
|||
}, true, false);
|
||||
|
||||
updateFramerate();
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void Basic2DWindowOpenGLDisplayPlugin::submitSceneTexture(uint32_t frameIndex, const gpu::TexturePointer& sceneTexture) {
|
||||
|
|
|
@ -22,7 +22,7 @@ public:
|
|||
|
||||
virtual float getTargetFrameRate() override { return _framerateTarget ? (float) _framerateTarget : TARGET_FRAMERATE_Basic2DWindowOpenGL; }
|
||||
|
||||
virtual void internalActivate() override;
|
||||
virtual bool internalActivate() override;
|
||||
|
||||
virtual void submitSceneTexture(uint32_t frameIndex, const gpu::TexturePointer& sceneTexture) override;
|
||||
|
||||
|
|
|
@ -219,7 +219,7 @@ void OpenGLDisplayPlugin::cleanupForSceneTexture(const gpu::TexturePointer& scen
|
|||
}
|
||||
|
||||
|
||||
void OpenGLDisplayPlugin::activate() {
|
||||
bool OpenGLDisplayPlugin::activate() {
|
||||
if (!_cursorsData.size()) {
|
||||
auto& cursorManager = Cursor::Manager::instance();
|
||||
for (const auto iconId : cursorManager.registeredIcons()) {
|
||||
|
@ -238,7 +238,9 @@ void OpenGLDisplayPlugin::activate() {
|
|||
|
||||
// Child classes may override this in order to do things like initialize
|
||||
// libraries, etc
|
||||
internalActivate();
|
||||
if (!internalActivate()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
#if THREADED_PRESENT
|
||||
|
@ -263,7 +265,8 @@ void OpenGLDisplayPlugin::activate() {
|
|||
customizeContext();
|
||||
_container->makeRenderingContextCurrent();
|
||||
#endif
|
||||
DisplayPlugin::activate();
|
||||
|
||||
return DisplayPlugin::activate();
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::deactivate() {
|
||||
|
|
|
@ -32,7 +32,7 @@ public:
|
|||
|
||||
// These must be final to ensure proper ordering of operations
|
||||
// between the main thread and the presentation thread
|
||||
void activate() override final;
|
||||
bool activate() override final;
|
||||
void deactivate() override final;
|
||||
|
||||
bool eventFilter(QObject* receiver, QEvent* event) override;
|
||||
|
@ -77,7 +77,8 @@ protected:
|
|||
virtual void customizeContext();
|
||||
virtual void uncustomizeContext();
|
||||
|
||||
virtual void internalActivate() {}
|
||||
// Returns true on successful activation
|
||||
virtual bool internalActivate() { return true; }
|
||||
virtual void internalDeactivate() {}
|
||||
virtual void cleanupForSceneTexture(const gpu::TexturePointer& sceneTexture);
|
||||
// Plugin specific functionality to send the composed scene to the output window or device
|
||||
|
|
|
@ -32,7 +32,7 @@ glm::uvec2 HmdDisplayPlugin::getRecommendedUiSize() const {
|
|||
return CompositorHelper::VIRTUAL_SCREEN_SIZE;
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::internalActivate() {
|
||||
bool HmdDisplayPlugin::internalActivate() {
|
||||
_monoPreview = _container->getBoolSetting("monoPreview", DEFAULT_MONO_VIEW);
|
||||
|
||||
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), MONO_PREVIEW,
|
||||
|
@ -41,7 +41,8 @@ void HmdDisplayPlugin::internalActivate() {
|
|||
_container->setBoolSetting("monoPreview", _monoPreview);
|
||||
}, true, _monoPreview);
|
||||
_container->removeMenu(FRAMERATE);
|
||||
Parent::internalActivate();
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::customizeContext() {
|
||||
|
|
|
@ -33,7 +33,7 @@ protected:
|
|||
virtual bool isHmdMounted() const = 0;
|
||||
virtual void postPreview() {};
|
||||
|
||||
void internalActivate() override;
|
||||
bool internalActivate() override;
|
||||
void compositeOverlay() override;
|
||||
void compositePointer() override;
|
||||
void internalPresent() override;
|
||||
|
|
|
@ -58,7 +58,7 @@ glm::mat4 StereoDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& basePr
|
|||
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
|
||||
|
||||
std::vector<QAction*> _screenActions;
|
||||
void StereoDisplayPlugin::internalActivate() {
|
||||
bool StereoDisplayPlugin::internalActivate() {
|
||||
auto screens = qApp->screens();
|
||||
_screenActions.resize(screens.size());
|
||||
for (int i = 0; i < screens.size(); ++i) {
|
||||
|
@ -77,7 +77,8 @@ void StereoDisplayPlugin::internalActivate() {
|
|||
|
||||
_screen = qApp->primaryScreen();
|
||||
_container->setFullscreen(_screen);
|
||||
Parent::internalActivate();
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void StereoDisplayPlugin::updateScreen() {
|
||||
|
|
|
@ -29,7 +29,7 @@ public:
|
|||
// virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
|
||||
|
||||
protected:
|
||||
virtual void internalActivate() override;
|
||||
virtual bool internalActivate() override;
|
||||
virtual void internalDeactivate() override;
|
||||
void updateScreen();
|
||||
|
||||
|
|
|
@ -421,8 +421,8 @@ const FBXGeometry* EntityTreeRenderer::getGeometryForEntity(EntityItemPointer en
|
|||
std::dynamic_pointer_cast<RenderableModelEntityItem>(entityItem);
|
||||
assert(modelEntityItem); // we need this!!!
|
||||
ModelPointer model = modelEntityItem->getModel(this);
|
||||
if (model) {
|
||||
result = &model->getGeometry()->getFBXGeometry();
|
||||
if (model && model->isLoaded()) {
|
||||
result = &model->getFBXGeometry();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -446,11 +446,8 @@ const FBXGeometry* EntityTreeRenderer::getCollisionGeometryForEntity(EntityItemP
|
|||
std::dynamic_pointer_cast<RenderableModelEntityItem>(entityItem);
|
||||
if (modelEntityItem->hasCompoundShapeURL()) {
|
||||
ModelPointer model = modelEntityItem->getModel(this);
|
||||
if (model) {
|
||||
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = model->getCollisionGeometry();
|
||||
if (collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) {
|
||||
result = &collisionNetworkGeometry->getFBXGeometry();
|
||||
}
|
||||
if (model && model->isCollisionLoaded()) {
|
||||
result = &model->getCollisionFBXGeometry();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -463,14 +460,17 @@ void EntityTreeRenderer::processEraseMessage(ReceivedMessage& message, const Sha
|
|||
|
||||
ModelPointer EntityTreeRenderer::allocateModel(const QString& url, const QString& collisionUrl) {
|
||||
ModelPointer model = nullptr;
|
||||
// Make sure we only create and delete models on the thread that owns the EntityTreeRenderer
|
||||
|
||||
// Only create and delete models on the thread that owns the EntityTreeRenderer
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "allocateModel", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(ModelPointer, model),
|
||||
Q_ARG(const QString&, url));
|
||||
Q_ARG(const QString&, url),
|
||||
Q_ARG(const QString&, collisionUrl));
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
model = std::make_shared<Model>(std::make_shared<Rig>());
|
||||
model->init();
|
||||
model->setURL(QUrl(url));
|
||||
|
@ -478,37 +478,20 @@ ModelPointer EntityTreeRenderer::allocateModel(const QString& url, const QString
|
|||
return model;
|
||||
}
|
||||
|
||||
ModelPointer EntityTreeRenderer::updateModel(ModelPointer original, const QString& newUrl, const QString& collisionUrl) {
|
||||
ModelPointer model = nullptr;
|
||||
|
||||
// The caller shouldn't call us if the URL doesn't need to change. But if they
|
||||
// do, we just return their original back to them.
|
||||
if (!original || (QUrl(newUrl) == original->getURL())) {
|
||||
return original;
|
||||
}
|
||||
|
||||
// Before we do any creating or deleting, make sure we're on our renderer thread
|
||||
ModelPointer EntityTreeRenderer::updateModel(ModelPointer model, const QString& newUrl, const QString& collisionUrl) {
|
||||
// Only create and delete models on the thread that owns the EntityTreeRenderer
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "updateModel", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(ModelPointer, model),
|
||||
Q_ARG(ModelPointer, original),
|
||||
Q_ARG(const QString&, newUrl));
|
||||
Q_ARG(ModelPointer, model),
|
||||
Q_ARG(const QString&, newUrl),
|
||||
Q_ARG(const QString&, collisionUrl));
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
// at this point we know we need to replace the model, and we know we're on the
|
||||
// correct thread, so we can do all our work.
|
||||
if (original) {
|
||||
original.reset(); // delete the old model...
|
||||
}
|
||||
|
||||
// create the model and correctly initialize it with the new url
|
||||
model = std::make_shared<Model>(std::make_shared<Rig>());
|
||||
model->init();
|
||||
model->setURL(QUrl(newUrl));
|
||||
model->setCollisionModelURL(QUrl(collisionUrl));
|
||||
|
||||
return model;
|
||||
}
|
||||
|
||||
|
|
|
@ -69,14 +69,10 @@ void RenderableModelEntityItem::loader() {
|
|||
_needsModelReload = true;
|
||||
EntityTreeRenderer* renderer = DependencyManager::get<EntityTreeRenderer>().data();
|
||||
assert(renderer);
|
||||
if (!_model || _needsModelReload) {
|
||||
{
|
||||
PerformanceTimer perfTimer("getModel");
|
||||
getModel(renderer);
|
||||
}
|
||||
if (_model) {
|
||||
_model->setURL(getParsedModelURL());
|
||||
_model->setCollisionModelURL(QUrl(getCompoundShapeURL()));
|
||||
}
|
||||
}
|
||||
|
||||
void RenderableModelEntityItem::setDimensions(const glm::vec3& value) {
|
||||
|
@ -110,7 +106,7 @@ int RenderableModelEntityItem::readEntitySubclassDataFromBuffer(const unsigned c
|
|||
QVariantMap RenderableModelEntityItem::parseTexturesToMap(QString textures) {
|
||||
// If textures are unset, revert to original textures
|
||||
if (textures == "") {
|
||||
return _originalTexturesMap;
|
||||
return _originalTextures;
|
||||
}
|
||||
|
||||
// Legacy: a ,\n-delimited list of filename:"texturepath"
|
||||
|
@ -122,7 +118,7 @@ QVariantMap RenderableModelEntityItem::parseTexturesToMap(QString textures) {
|
|||
QJsonDocument texturesJson = QJsonDocument::fromJson(textures.toUtf8(), &error);
|
||||
if (error.error != QJsonParseError::NoError) {
|
||||
qCWarning(entitiesrenderer) << "Could not evaluate textures property value:" << _textures;
|
||||
return _originalTexturesMap;
|
||||
return _originalTextures;
|
||||
}
|
||||
return texturesJson.object().toVariantMap();
|
||||
}
|
||||
|
@ -135,44 +131,23 @@ void RenderableModelEntityItem::remapTextures() {
|
|||
if (!_model->isLoaded()) {
|
||||
return; // nothing to do if the model has not yet loaded
|
||||
}
|
||||
|
||||
|
||||
auto& geometry = _model->getGeometry()->getGeometry();
|
||||
|
||||
if (!_originalTexturesRead) {
|
||||
const QSharedPointer<NetworkGeometry>& networkGeometry = _model->getGeometry();
|
||||
if (networkGeometry) {
|
||||
_originalTextures = networkGeometry->getTextureNames();
|
||||
_originalTexturesMap = parseTexturesToMap(_originalTextures.join(",\n"));
|
||||
_originalTexturesRead = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (_currentTextures == _textures) {
|
||||
return; // nothing to do if our recently mapped textures match our desired textures
|
||||
}
|
||||
|
||||
// since we're changing here, we need to run through our current texture map
|
||||
// and any textures in the recently mapped texture, that is not in our desired
|
||||
// textures, we need to "unset"
|
||||
QVariantMap currentTextureMap = parseTexturesToMap(_currentTextures);
|
||||
QVariantMap textureMap = parseTexturesToMap(_textures);
|
||||
_originalTextures = geometry->getTextures();
|
||||
_originalTexturesRead = true;
|
||||
|
||||
foreach(const QString& key, currentTextureMap.keys()) {
|
||||
// if the desired texture map (what we're setting the textures to) doesn't
|
||||
// contain this texture, then remove it by setting the URL to null
|
||||
if (!textureMap.contains(key)) {
|
||||
QUrl noURL;
|
||||
qCDebug(entitiesrenderer) << "Removing texture named" << key << "by replacing it with no URL";
|
||||
_model->setTextureWithNameToURL(key, noURL);
|
||||
}
|
||||
// Default to _originalTextures to avoid remapping immediately and lagging on load
|
||||
_currentTextures = _originalTextures;
|
||||
}
|
||||
|
||||
// here's where we remap any textures if needed...
|
||||
foreach(const QString& key, textureMap.keys()) {
|
||||
QUrl newTextureURL = textureMap[key].toUrl();
|
||||
qCDebug(entitiesrenderer) << "Updating texture named" << key << "to texture at URL" << newTextureURL;
|
||||
_model->setTextureWithNameToURL(key, newTextureURL);
|
||||
auto textures = parseTexturesToMap(_textures);
|
||||
|
||||
if (textures != _currentTextures) {
|
||||
geometry->setTextures(textures);
|
||||
_currentTextures = textures;
|
||||
}
|
||||
|
||||
_currentTextures = _textures;
|
||||
}
|
||||
|
||||
// TODO: we need a solution for changes to the postion/rotation/etc of a model...
|
||||
|
@ -385,13 +360,6 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
|
|||
|
||||
if (hasModel()) {
|
||||
if (_model) {
|
||||
// check if the URL has changed
|
||||
auto& currentURL = getParsedModelURL();
|
||||
if (currentURL != _model->getURL()) {
|
||||
qCDebug(entitiesrenderer).noquote() << "Updating model URL: " << currentURL.toDisplayString();
|
||||
_model->setURL(currentURL);
|
||||
}
|
||||
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
|
||||
// check to see if when we added our models to the scene they were ready, if they were not ready, then
|
||||
|
@ -456,6 +424,15 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
|
|||
}
|
||||
});
|
||||
updateModelBounds();
|
||||
|
||||
// Check if the URL has changed
|
||||
// Do this last as the getModel is queued for the next frame,
|
||||
// and we need to keep state directing the model to reinitialize
|
||||
auto& currentURL = getParsedModelURL();
|
||||
if (currentURL != _model->getURL()) {
|
||||
// Defer setting the url to the render thread
|
||||
getModel(_myRenderer);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -471,10 +448,8 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
|
|||
}
|
||||
|
||||
ModelPointer RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
|
||||
ModelPointer result = nullptr;
|
||||
|
||||
if (!renderer) {
|
||||
return result;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// make sure our renderer is setup
|
||||
|
@ -489,21 +464,22 @@ ModelPointer RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
|
|||
|
||||
_needsModelReload = false; // this is the reload
|
||||
|
||||
// if we have a URL, then we will want to end up returning a model...
|
||||
// If we have a URL, then we will want to end up returning a model...
|
||||
if (!getModelURL().isEmpty()) {
|
||||
|
||||
// if we have a previously allocated model, but its URL doesn't match
|
||||
// then we need to let our renderer update our model for us.
|
||||
if (_model && (QUrl(getModelURL()) != _model->getURL() ||
|
||||
QUrl(getCompoundShapeURL()) != _model->getCollisionURL())) {
|
||||
result = _model = _myRenderer->updateModel(_model, getModelURL(), getCompoundShapeURL());
|
||||
// If we don't have a model, allocate one *immediately*
|
||||
if (!_model) {
|
||||
_model = _myRenderer->allocateModel(getModelURL(), getCompoundShapeURL());
|
||||
_needsInitialSimulation = true;
|
||||
} else if (!_model) { // if we don't yet have a model, then we want our renderer to allocate one
|
||||
result = _model = _myRenderer->allocateModel(getModelURL(), getCompoundShapeURL());
|
||||
// If we need to change URLs, update it *after rendering* (to avoid access violations)
|
||||
} else if ((QUrl(getModelURL()) != _model->getURL() || QUrl(getCompoundShapeURL()) != _model->getCollisionURL())) {
|
||||
QMetaObject::invokeMethod(_myRenderer, "updateModel", Qt::QueuedConnection,
|
||||
Q_ARG(ModelPointer, _model),
|
||||
Q_ARG(const QString&, getModelURL()),
|
||||
Q_ARG(const QString&, getCompoundShapeURL()));
|
||||
_needsInitialSimulation = true;
|
||||
} else { // we already have the model we want...
|
||||
result = _model;
|
||||
}
|
||||
// Else we can just return the _model
|
||||
// If we have no URL, then we can delete any model we do have...
|
||||
} else if (_model) {
|
||||
// remove from scene
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
|
@ -513,11 +489,11 @@ ModelPointer RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
|
|||
|
||||
// release interest
|
||||
_myRenderer->releaseModel(_model);
|
||||
result = _model = nullptr;
|
||||
_model = nullptr;
|
||||
_needsInitialSimulation = true;
|
||||
}
|
||||
|
||||
return result;
|
||||
return _model;
|
||||
}
|
||||
|
||||
bool RenderableModelEntityItem::needsToCallUpdate() const {
|
||||
|
@ -526,8 +502,7 @@ bool RenderableModelEntityItem::needsToCallUpdate() const {
|
|||
|
||||
void RenderableModelEntityItem::update(const quint64& now) {
|
||||
if (!_dimensionsInitialized && _model && _model->isActive()) {
|
||||
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();
|
||||
if (renderNetworkGeometry && renderNetworkGeometry->isLoaded()) {
|
||||
if (_model->isLoaded()) {
|
||||
EntityItemProperties properties;
|
||||
auto extents = _model->getMeshExtents();
|
||||
properties.setDimensions(extents.maximum - extents.minimum);
|
||||
|
@ -593,13 +568,8 @@ bool RenderableModelEntityItem::isReadyToComputeShape() {
|
|||
return false;
|
||||
}
|
||||
|
||||
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = _model->getCollisionGeometry();
|
||||
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();
|
||||
|
||||
if ((collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) &&
|
||||
(renderNetworkGeometry && renderNetworkGeometry->isLoaded())) {
|
||||
if (_model->isLoaded() && _model->isCollisionLoaded()) {
|
||||
// we have both URLs AND both geometries AND they are both fully loaded.
|
||||
|
||||
if (_needsInitialSimulation) {
|
||||
// the _model's offset will be wrong until _needsInitialSimulation is false
|
||||
PerformanceTimer perfTimer("_model->simulate");
|
||||
|
@ -624,15 +594,12 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
|
|||
adjustShapeInfoByRegistration(info);
|
||||
} else {
|
||||
updateModelBounds();
|
||||
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = _model->getCollisionGeometry();
|
||||
|
||||
// should never fall in here when collision model not fully loaded
|
||||
// hence we assert collisionNetworkGeometry is not NULL
|
||||
assert(collisionNetworkGeometry);
|
||||
|
||||
const FBXGeometry& collisionGeometry = collisionNetworkGeometry->getFBXGeometry();
|
||||
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();
|
||||
const FBXGeometry& renderGeometry = renderNetworkGeometry->getFBXGeometry();
|
||||
// hence we assert that all geometries exist and are loaded
|
||||
assert(_model->isLoaded() && _model->isCollisionLoaded());
|
||||
const FBXGeometry& renderGeometry = _model->getFBXGeometry();
|
||||
const FBXGeometry& collisionGeometry = _model->getCollisionFBXGeometry();
|
||||
|
||||
_points.clear();
|
||||
unsigned int i = 0;
|
||||
|
@ -734,10 +701,8 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
|
|||
}
|
||||
|
||||
bool RenderableModelEntityItem::contains(const glm::vec3& point) const {
|
||||
if (EntityItem::contains(point) && _model && _model->getCollisionGeometry()) {
|
||||
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = _model->getCollisionGeometry();
|
||||
const FBXGeometry& collisionGeometry = collisionNetworkGeometry->getFBXGeometry();
|
||||
return collisionGeometry.convexHullContains(worldToEntity(point));
|
||||
if (EntityItem::contains(point) && _model && _model->isCollisionLoaded()) {
|
||||
return _model->getCollisionFBXGeometry().convexHullContains(worldToEntity(point));
|
||||
}
|
||||
|
||||
return false;
|
||||
|
|
|
@ -90,9 +90,8 @@ private:
|
|||
bool _needsInitialSimulation = true;
|
||||
bool _needsModelReload = true;
|
||||
EntityTreeRenderer* _myRenderer = nullptr;
|
||||
QString _currentTextures;
|
||||
QStringList _originalTextures;
|
||||
QVariantMap _originalTexturesMap;
|
||||
QVariantMap _currentTextures;
|
||||
QVariantMap _originalTextures;
|
||||
bool _originalTexturesRead = false;
|
||||
QVector<QVector<glm::vec3>> _points;
|
||||
bool _dimensionsInitialized = true;
|
||||
|
|
|
@ -529,9 +529,9 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
|
|||
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER_NO_SKIP(boundingBox, boundingBox); // gettable, but not settable
|
||||
}
|
||||
|
||||
QString textureNamesList = _textureNames.join(",\n");
|
||||
QString textureNamesStr = QJsonDocument::fromVariant(_textureNames).toJson();
|
||||
if (!skipDefaults) {
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER_NO_SKIP(originalTextures, textureNamesList); // gettable, but not settable
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER_NO_SKIP(originalTextures, textureNamesStr); // gettable, but not settable
|
||||
}
|
||||
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_PARENT_ID, parentID);
|
||||
|
|
|
@ -245,8 +245,8 @@ public:
|
|||
const glm::vec3& getNaturalPosition() const { return _naturalPosition; }
|
||||
void calculateNaturalPosition(const glm::vec3& min, const glm::vec3& max);
|
||||
|
||||
const QStringList& getTextureNames() const { return _textureNames; }
|
||||
void setTextureNames(const QStringList& value) { _textureNames = value; }
|
||||
const QVariantMap& getTextureNames() const { return _textureNames; }
|
||||
void setTextureNames(const QVariantMap& value) { _textureNames = value; }
|
||||
|
||||
QString getSimulatorIDAsString() const { return _simulationOwner.getID().toString().mid(1,36).toUpper(); }
|
||||
|
||||
|
@ -297,7 +297,7 @@ private:
|
|||
// NOTE: The following are pseudo client only properties. They are only used in clients which can access
|
||||
// properties of model geometry. But these properties are not serialized like other properties.
|
||||
QVector<SittingPoint> _sittingPoints;
|
||||
QStringList _textureNames;
|
||||
QVariantMap _textureNames;
|
||||
glm::vec3 _naturalDimensions;
|
||||
glm::vec3 _naturalPosition;
|
||||
|
||||
|
|
|
@ -190,25 +190,21 @@ void OffscreenQmlRenderThread::setupFbo() {
|
|||
using namespace oglplus;
|
||||
_textures.setSize(_size);
|
||||
|
||||
// Before making any ogl calls, clear any outstanding errors
|
||||
// FIXME: Something upstream is polluting the context with a GL_INVALID_ENUM,
|
||||
// likely from glewExperimental = true
|
||||
GLenum err = glGetError();
|
||||
if (err != GL_NO_ERROR) {
|
||||
qDebug() << "Clearing outstanding GL error to set up QML FBO:" << glewGetErrorString(err);
|
||||
try {
|
||||
_depthStencil.reset(new Renderbuffer());
|
||||
Context::Bound(Renderbuffer::Target::Renderbuffer, *_depthStencil)
|
||||
.Storage(
|
||||
PixelDataInternalFormat::DepthComponent,
|
||||
_size.x, _size.y);
|
||||
|
||||
_fbo.reset(new Framebuffer());
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
_fbo->AttachRenderbuffer(Framebuffer::Target::Draw,
|
||||
FramebufferAttachment::Depth, *_depthStencil);
|
||||
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
|
||||
} catch (oglplus::Error& error) {
|
||||
qWarning() << "OpenGL error in QML render setup: " << error.what();
|
||||
}
|
||||
|
||||
_depthStencil.reset(new Renderbuffer());
|
||||
Context::Bound(Renderbuffer::Target::Renderbuffer, *_depthStencil)
|
||||
.Storage(
|
||||
PixelDataInternalFormat::DepthComponent,
|
||||
_size.x, _size.y);
|
||||
|
||||
_fbo.reset(new Framebuffer());
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
_fbo->AttachRenderbuffer(Framebuffer::Target::Draw,
|
||||
FramebufferAttachment::Depth, *_depthStencil);
|
||||
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
|
||||
}
|
||||
|
||||
void OffscreenQmlRenderThread::init() {
|
||||
|
@ -299,10 +295,21 @@ void OffscreenQmlRenderThread::render() {
|
|||
|
||||
try {
|
||||
PROFILE_RANGE("qml_render")
|
||||
TexturePtr texture = _textures.getNextTexture();
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
_fbo->AttachTexture(Framebuffer::Target::Draw, FramebufferAttachment::Color, *texture, 0);
|
||||
_fbo->Complete(Framebuffer::Target::Draw);
|
||||
|
||||
TexturePtr texture = _textures.getNextTexture();
|
||||
|
||||
try {
|
||||
_fbo->Bind(Framebuffer::Target::Draw);
|
||||
_fbo->AttachTexture(Framebuffer::Target::Draw, FramebufferAttachment::Color, *texture, 0);
|
||||
_fbo->Complete(Framebuffer::Target::Draw);
|
||||
} catch (oglplus::Error& error) {
|
||||
qWarning() << "OpenGL error in QML render: " << error.what();
|
||||
|
||||
// In case we are failing from a failed setupFbo, reset fbo before next render
|
||||
setupFbo();
|
||||
throw;
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE("qml_render->rendercontrol")
|
||||
_renderControl->render();
|
||||
|
@ -311,13 +318,14 @@ void OffscreenQmlRenderThread::render() {
|
|||
// for now just clear the errors
|
||||
glGetError();
|
||||
}
|
||||
|
||||
// FIXME probably unecessary
|
||||
DefaultFramebuffer().Bind(Framebuffer::Target::Draw);
|
||||
_quickWindow->resetOpenGLState();
|
||||
_escrow.submit(GetName(*texture));
|
||||
_lastRenderTime = usecTimestampNow();
|
||||
} catch (std::runtime_error& error) {
|
||||
qWarning() << "Failed to render QML " << error.what();
|
||||
qWarning() << "Failed to render QML: " << error.what();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -74,6 +74,11 @@ void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, cons
|
|||
_backend->downloadFramebuffer(srcFramebuffer, region, destImage);
|
||||
}
|
||||
|
||||
|
||||
void Context::getStats(ContextStats& stats) const {
|
||||
_backend->getStats(stats);
|
||||
}
|
||||
|
||||
const Backend::TransformCamera& Backend::TransformCamera::recomputeDerived(const Transform& xformView) const {
|
||||
_projectionInverse = glm::inverse(_projection);
|
||||
|
||||
|
@ -102,3 +107,68 @@ Backend::TransformCamera Backend::TransformCamera::getEyeCamera(int eye, const S
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Counters for Buffer and Texture usage in GPU/Context
|
||||
std::atomic<uint32_t> Context::_bufferGPUCount{ 0 };
|
||||
std::atomic<Buffer::Size> Context::_bufferGPUMemoryUsage{ 0 };
|
||||
|
||||
std::atomic<uint32_t> Context::_textureGPUCount{ 0 };
|
||||
std::atomic<Texture::Size> Context::_textureGPUMemoryUsage{ 0 };
|
||||
|
||||
void Context::incrementBufferGPUCount() {
|
||||
_bufferGPUCount++;
|
||||
}
|
||||
void Context::decrementBufferGPUCount() {
|
||||
_bufferGPUCount--;
|
||||
}
|
||||
void Context::updateBufferGPUMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (newObjectSize > prevObjectSize) {
|
||||
_bufferGPUMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
} else {
|
||||
_bufferGPUMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
void Context::incrementTextureGPUCount() {
|
||||
_textureGPUCount++;
|
||||
}
|
||||
void Context::decrementTextureGPUCount() {
|
||||
_textureGPUCount--;
|
||||
}
|
||||
void Context::updateTextureGPUMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (newObjectSize > prevObjectSize) {
|
||||
_textureGPUMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
} else {
|
||||
_textureGPUMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t Context::getBufferGPUCount() {
|
||||
return _bufferGPUCount.load();
|
||||
}
|
||||
|
||||
Context::Size Context::getBufferGPUMemoryUsage() {
|
||||
return _bufferGPUMemoryUsage.load();
|
||||
}
|
||||
|
||||
uint32_t Context::getTextureGPUCount() {
|
||||
return _textureGPUCount.load();
|
||||
}
|
||||
|
||||
Context::Size Context::getTextureGPUMemoryUsage() {
|
||||
return _textureGPUMemoryUsage.load();
|
||||
}
|
||||
|
||||
void Backend::incrementBufferGPUCount() { Context::incrementBufferGPUCount(); }
|
||||
void Backend::decrementBufferGPUCount() { Context::decrementBufferGPUCount(); }
|
||||
void Backend::updateBufferGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize) { Context::updateBufferGPUMemoryUsage(prevObjectSize, newObjectSize); }
|
||||
void Backend::incrementTextureGPUCount() { Context::incrementTextureGPUCount(); }
|
||||
void Backend::decrementTextureGPUCount() { Context::decrementTextureGPUCount(); }
|
||||
void Backend::updateTextureGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize) { Context::updateTextureGPUMemoryUsage(prevObjectSize, newObjectSize); }
|
||||
|
||||
|
|
|
@ -27,6 +27,21 @@ class QImage;
|
|||
|
||||
namespace gpu {
|
||||
|
||||
struct ContextStats {
|
||||
public:
|
||||
int _ISNumFormatChanges = 0;
|
||||
int _ISNumInputBufferChanges = 0;
|
||||
int _ISNumIndexBufferChanges = 0;
|
||||
|
||||
int _RSNumTextureBounded = 0;
|
||||
|
||||
int _DSNumDrawcalls = 0;
|
||||
int _DSNumTriangles = 0;
|
||||
|
||||
ContextStats() {}
|
||||
ContextStats(const ContextStats& stats) = default;
|
||||
};
|
||||
|
||||
struct StereoState {
|
||||
bool _enable{ false };
|
||||
bool _skybox{ false };
|
||||
|
@ -100,13 +115,27 @@ public:
|
|||
return reinterpret_cast<T*>(object.gpuObject.getGPUObject());
|
||||
}
|
||||
|
||||
void getStats(ContextStats& stats) const { stats = _stats; }
|
||||
|
||||
|
||||
|
||||
// These should only be accessed by Backend implementation to repport the buffer and texture allocations,
|
||||
// they are NOT public calls
|
||||
static void incrementBufferGPUCount();
|
||||
static void decrementBufferGPUCount();
|
||||
static void updateBufferGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize);
|
||||
static void incrementTextureGPUCount();
|
||||
static void decrementTextureGPUCount();
|
||||
static void updateTextureGPUMemoryUsage(Resource::Size prevObjectSize, Resource::Size newObjectSize);
|
||||
|
||||
protected:
|
||||
StereoState _stereo;
|
||||
ContextStats _stats;
|
||||
};
|
||||
|
||||
class Context {
|
||||
public:
|
||||
using Size = Resource::Size;
|
||||
typedef Backend* (*CreateBackend)();
|
||||
typedef bool (*MakeProgram)(Shader& shader, const Shader::BindingSet& bindings);
|
||||
|
||||
|
@ -125,6 +154,7 @@ public:
|
|||
~Context();
|
||||
|
||||
void render(Batch& batch);
|
||||
|
||||
void enableStereo(bool enable = true);
|
||||
bool isStereo();
|
||||
void setStereoProjections(const mat4 eyeProjections[2]);
|
||||
|
@ -137,6 +167,16 @@ public:
|
|||
// It s here for convenience to easily capture a snapshot
|
||||
void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage);
|
||||
|
||||
// Repporting stats of the context
|
||||
void getStats(ContextStats& stats) const;
|
||||
|
||||
|
||||
static uint32_t getBufferGPUCount();
|
||||
static Size getBufferGPUMemoryUsage();
|
||||
|
||||
static uint32_t getTextureGPUCount();
|
||||
static Size getTextureGPUMemoryUsage();
|
||||
|
||||
protected:
|
||||
Context(const Context& context);
|
||||
|
||||
|
@ -153,6 +193,23 @@ protected:
|
|||
static std::once_flag _initialized;
|
||||
|
||||
friend class Shader;
|
||||
|
||||
// These should only be accessed by the Backend, they are NOT public calls
|
||||
static void incrementBufferGPUCount();
|
||||
static void decrementBufferGPUCount();
|
||||
static void updateBufferGPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
static void incrementTextureGPUCount();
|
||||
static void decrementTextureGPUCount();
|
||||
static void updateTextureGPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
|
||||
// Buffer and Texture Counters
|
||||
static std::atomic<uint32_t> _bufferGPUCount;
|
||||
static std::atomic<Size> _bufferGPUMemoryUsage;
|
||||
|
||||
static std::atomic<uint32_t> _textureGPUCount;
|
||||
static std::atomic<Size> _textureGPUMemoryUsage;
|
||||
|
||||
friend class Backend;
|
||||
};
|
||||
typedef std::shared_ptr<Context> ContextPointer;
|
||||
|
||||
|
|
|
@ -324,7 +324,10 @@ void GLBackend::do_draw(Batch& batch, size_t paramOffset) {
|
|||
uint32 numVertices = batch._params[paramOffset + 1]._uint;
|
||||
uint32 startVertex = batch._params[paramOffset + 0]._uint;
|
||||
glDrawArrays(mode, startVertex, numVertices);
|
||||
(void) CHECK_GL_ERROR();
|
||||
_stats._DSNumTriangles += numVertices / 3;
|
||||
_stats._DSNumDrawcalls++;
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
void GLBackend::do_drawIndexed(Batch& batch, size_t paramOffset) {
|
||||
|
@ -339,6 +342,9 @@ void GLBackend::do_drawIndexed(Batch& batch, size_t paramOffset) {
|
|||
GLvoid* indexBufferByteOffset = reinterpret_cast<GLvoid*>(startIndex * typeByteSize + _input._indexBufferOffset);
|
||||
|
||||
glDrawElements(mode, numIndices, glType, indexBufferByteOffset);
|
||||
_stats._DSNumTriangles += numIndices / 3;
|
||||
_stats._DSNumDrawcalls++;
|
||||
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
|
@ -350,6 +356,9 @@ void GLBackend::do_drawInstanced(Batch& batch, size_t paramOffset) {
|
|||
uint32 startVertex = batch._params[paramOffset + 1]._uint;
|
||||
|
||||
glDrawArraysInstancedARB(mode, startVertex, numVertices, numInstances);
|
||||
_stats._DSNumTriangles += (numInstances * numVertices) / 3;
|
||||
_stats._DSNumDrawcalls += numInstances;
|
||||
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
|
@ -372,6 +381,9 @@ void GLBackend::do_drawIndexedInstanced(Batch& batch, size_t paramOffset) {
|
|||
glDrawElementsInstanced(mode, numIndices, glType, indexBufferByteOffset, numInstances);
|
||||
Q_UNUSED(startInstance);
|
||||
#endif
|
||||
_stats._DSNumTriangles += (numInstances * numIndices) / 3;
|
||||
_stats._DSNumDrawcalls += numInstances;
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
|
@ -382,6 +394,7 @@ void GLBackend::do_multiDrawIndirect(Batch& batch, size_t paramOffset) {
|
|||
GLenum mode = _primitiveToGLmode[(Primitive)batch._params[paramOffset + 1]._uint];
|
||||
|
||||
glMultiDrawArraysIndirect(mode, reinterpret_cast<GLvoid*>(_input._indirectBufferOffset), commandCount, (GLsizei)_input._indirectBufferStride);
|
||||
_stats._DSNumDrawcalls += commandCount;
|
||||
#else
|
||||
// FIXME implement the slow path
|
||||
#endif
|
||||
|
@ -396,6 +409,8 @@ void GLBackend::do_multiDrawIndexedIndirect(Batch& batch, size_t paramOffset) {
|
|||
GLenum indexType = _elementTypeToGLType[_input._indexBufferType];
|
||||
|
||||
glMultiDrawElementsIndirect(mode, indexType, reinterpret_cast<GLvoid*>(_input._indirectBufferOffset), commandCount, (GLsizei)_input._indirectBufferStride);
|
||||
_stats._DSNumDrawcalls += commandCount;
|
||||
|
||||
#else
|
||||
// FIXME implement the slow path
|
||||
#endif
|
||||
|
|
|
@ -67,6 +67,8 @@ public:
|
|||
|
||||
GLBuffer();
|
||||
~GLBuffer();
|
||||
|
||||
void setSize(GLuint size);
|
||||
};
|
||||
static GLBuffer* syncGPUObject(const Buffer& buffer);
|
||||
static GLuint getBufferID(const Buffer& buffer);
|
||||
|
@ -77,10 +79,15 @@ public:
|
|||
Stamp _contentStamp;
|
||||
GLuint _texture;
|
||||
GLenum _target;
|
||||
GLuint _size;
|
||||
|
||||
GLTexture();
|
||||
~GLTexture();
|
||||
|
||||
void setSize(GLuint size);
|
||||
GLuint size() const { return _size; }
|
||||
|
||||
private:
|
||||
GLuint _size;
|
||||
};
|
||||
static GLTexture* syncGPUObject(const Texture& texture);
|
||||
static GLuint getTextureID(const TexturePointer& texture, bool sync = true);
|
||||
|
@ -230,26 +237,11 @@ public:
|
|||
void do_setStateBlend(State::BlendFunction blendFunction);
|
||||
|
||||
void do_setStateColorWriteMask(uint32 mask);
|
||||
|
||||
// Repporting stats of the context
|
||||
class Stats {
|
||||
public:
|
||||
int _ISNumFormatChanges = 0;
|
||||
int _ISNumInputBufferChanges = 0;
|
||||
int _ISNumIndexBufferChanges = 0;
|
||||
|
||||
Stats() {}
|
||||
Stats(const Stats& stats) = default;
|
||||
};
|
||||
|
||||
void getStats(Stats& stats) const { stats = _stats; }
|
||||
|
||||
|
||||
protected:
|
||||
void renderPassTransfer(Batch& batch);
|
||||
void renderPassDraw(Batch& batch);
|
||||
|
||||
Stats _stats;
|
||||
|
||||
// Draw Stage
|
||||
void do_draw(Batch& batch, size_t paramOffset);
|
||||
void do_drawIndexed(Batch& batch, size_t paramOffset);
|
||||
|
|
|
@ -16,12 +16,21 @@ GLBackend::GLBuffer::GLBuffer() :
|
|||
_stamp(0),
|
||||
_buffer(0),
|
||||
_size(0)
|
||||
{}
|
||||
{
|
||||
Backend::incrementBufferGPUCount();
|
||||
}
|
||||
|
||||
GLBackend::GLBuffer::~GLBuffer() {
|
||||
if (_buffer != 0) {
|
||||
glDeleteBuffers(1, &_buffer);
|
||||
}
|
||||
Backend::updateBufferGPUMemoryUsage(_size, 0);
|
||||
Backend::decrementBufferGPUCount();
|
||||
}
|
||||
|
||||
void GLBackend::GLBuffer::setSize(GLuint size) {
|
||||
Backend::updateBufferGPUMemoryUsage(_size, size);
|
||||
_size = size;
|
||||
}
|
||||
|
||||
GLBackend::GLBuffer* GLBackend::syncGPUObject(const Buffer& buffer) {
|
||||
|
@ -46,7 +55,7 @@ GLBackend::GLBuffer* GLBackend::syncGPUObject(const Buffer& buffer) {
|
|||
glBufferData(GL_ARRAY_BUFFER, buffer.getSysmem().getSize(), buffer.getSysmem().readData(), GL_DYNAMIC_DRAW);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
object->_stamp = buffer.getSysmem().getStamp();
|
||||
object->_size = (GLuint)buffer.getSysmem().getSize();
|
||||
object->setSize((GLuint)buffer.getSysmem().getSize());
|
||||
//}
|
||||
(void) CHECK_GL_ERROR();
|
||||
|
||||
|
|
|
@ -251,6 +251,9 @@ void GLBackend::do_setResourceTexture(Batch& batch, size_t paramOffset) {
|
|||
return;
|
||||
}
|
||||
|
||||
// One more True texture bound
|
||||
_stats._RSNumTextureBounded++;
|
||||
|
||||
// Always make sure the GLObject is in sync
|
||||
GLTexture* object = GLBackend::syncGPUObject(*resourceTexture);
|
||||
if (object) {
|
||||
|
|
|
@ -19,12 +19,21 @@ GLBackend::GLTexture::GLTexture() :
|
|||
_texture(0),
|
||||
_target(GL_TEXTURE_2D),
|
||||
_size(0)
|
||||
{}
|
||||
{
|
||||
Backend::incrementTextureGPUCount();
|
||||
}
|
||||
|
||||
GLBackend::GLTexture::~GLTexture() {
|
||||
if (_texture != 0) {
|
||||
glDeleteTextures(1, &_texture);
|
||||
}
|
||||
Backend::updateTextureGPUMemoryUsage(_size, 0);
|
||||
Backend::decrementTextureGPUCount();
|
||||
}
|
||||
|
||||
void GLBackend::GLTexture::setSize(GLuint size) {
|
||||
Backend::updateTextureGPUMemoryUsage(_size, size);
|
||||
_size = size;
|
||||
}
|
||||
|
||||
class GLTexelFormat {
|
||||
|
@ -427,8 +436,8 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
if (needUpdate) {
|
||||
if (texture.isStoredMipFaceAvailable(0)) {
|
||||
Texture::PixelsPointer mip = texture.accessStoredMipFace(0);
|
||||
const GLvoid* bytes = mip->_sysmem.read<Byte>();
|
||||
Element srcFormat = mip->_format;
|
||||
const GLvoid* bytes = mip->readData();
|
||||
Element srcFormat = mip->getFormat();
|
||||
|
||||
GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(texture.getTexelFormat(), srcFormat);
|
||||
|
||||
|
@ -458,8 +467,8 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
if (texture.isStoredMipFaceAvailable(0)) {
|
||||
Texture::PixelsPointer mip = texture.accessStoredMipFace(0);
|
||||
|
||||
bytes = mip->_sysmem.read<Byte>();
|
||||
srcFormat = mip->_format;
|
||||
bytes = mip->readData();
|
||||
srcFormat = mip->getFormat();
|
||||
|
||||
object->_contentStamp = texture.getDataStamp();
|
||||
}
|
||||
|
@ -483,7 +492,7 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
|
||||
object->_storageStamp = texture.getStamp();
|
||||
object->_contentStamp = texture.getDataStamp();
|
||||
object->_size = (GLuint)texture.getSize();
|
||||
object->setSize((GLuint)texture.getSize());
|
||||
}
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, boundTex);
|
||||
|
@ -507,11 +516,11 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
for (int f = 0; f < NUM_FACES; f++) {
|
||||
if (texture.isStoredMipFaceAvailable(0, f)) {
|
||||
Texture::PixelsPointer mipFace = texture.accessStoredMipFace(0, f);
|
||||
Element srcFormat = mipFace->_format;
|
||||
Element srcFormat = mipFace->getFormat();
|
||||
GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(texture.getTexelFormat(), srcFormat);
|
||||
|
||||
glTexSubImage2D(FACE_LAYOUT[f], 0, texelFormat.internalFormat, texture.getWidth(), texture.getWidth(), 0,
|
||||
texelFormat.format, texelFormat.type, (GLvoid*) (mipFace->_sysmem.read<Byte>()));
|
||||
texelFormat.format, texelFormat.type, (GLvoid*) (mipFace->readData()));
|
||||
|
||||
// At this point the mip pixels have been loaded, we can notify
|
||||
texture.notifyMipFaceGPULoaded(0, f);
|
||||
|
@ -536,11 +545,11 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
for (int f = 0; f < NUM_FACES; f++) {
|
||||
if (texture.isStoredMipFaceAvailable(0, f)) {
|
||||
Texture::PixelsPointer mipFace = texture.accessStoredMipFace(0, f);
|
||||
Element srcFormat = mipFace->_format;
|
||||
Element srcFormat = mipFace->getFormat();
|
||||
GLTexelFormat texelFormat = GLTexelFormat::evalGLTexelFormat(texture.getTexelFormat(), srcFormat);
|
||||
|
||||
glTexImage2D(FACE_LAYOUT[f], 0, texelFormat.internalFormat, texture.getWidth(), texture.getWidth(), 0,
|
||||
texelFormat.format, texelFormat.type, (GLvoid*) (mipFace->_sysmem.read<Byte>()));
|
||||
texelFormat.format, texelFormat.type, (GLvoid*) (mipFace->readData()));
|
||||
|
||||
// At this point the mip pixels have been loaded, we can notify
|
||||
texture.notifyMipFaceGPULoaded(0, f);
|
||||
|
@ -561,7 +570,7 @@ GLBackend::GLTexture* GLBackend::syncGPUObject(const Texture& texture) {
|
|||
|
||||
object->_storageStamp = texture.getStamp();
|
||||
object->_contentStamp = texture.getDataStamp();
|
||||
object->_size = (GLuint)texture.getSize();
|
||||
object->setSize((GLuint)texture.getSize());
|
||||
}
|
||||
|
||||
glBindTexture(GL_TEXTURE_CUBE_MAP, boundTex);
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#include <NumericalConstants.h>
|
||||
#include <QDebug>
|
||||
|
||||
#include "Context.h"
|
||||
|
||||
using namespace gpu;
|
||||
|
||||
class AllocationDebugger {
|
||||
|
@ -232,19 +234,55 @@ Resource::Size Resource::Sysmem::append(Size size, const Byte* bytes) {
|
|||
return 0;
|
||||
}
|
||||
|
||||
std::atomic<uint32_t> Buffer::_bufferCPUCount{ 0 };
|
||||
std::atomic<Buffer::Size> Buffer::_bufferCPUMemoryUsage{ 0 };
|
||||
|
||||
void Buffer::updateBufferCPUMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (prevObjectSize > newObjectSize) {
|
||||
_bufferCPUMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
} else {
|
||||
_bufferCPUMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t Buffer::getBufferCPUCount() {
|
||||
return _bufferCPUCount.load();
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::getBufferCPUMemoryUsage() {
|
||||
return _bufferCPUMemoryUsage.load();
|
||||
}
|
||||
|
||||
uint32_t Buffer::getBufferGPUCount() {
|
||||
return Context::getBufferGPUCount();
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::getBufferGPUMemoryUsage() {
|
||||
return Context::getBufferGPUMemoryUsage();
|
||||
}
|
||||
|
||||
Buffer::Buffer() :
|
||||
Resource(),
|
||||
_sysmem(new Sysmem()) {
|
||||
_bufferCPUCount++;
|
||||
|
||||
}
|
||||
|
||||
Buffer::Buffer(Size size, const Byte* bytes) :
|
||||
Resource(),
|
||||
_sysmem(new Sysmem(size, bytes)) {
|
||||
_bufferCPUCount++;
|
||||
Buffer::updateBufferCPUMemoryUsage(0, _sysmem->getSize());
|
||||
}
|
||||
|
||||
Buffer::Buffer(const Buffer& buf) :
|
||||
Resource(),
|
||||
_sysmem(new Sysmem(buf.getSysmem())) {
|
||||
_bufferCPUCount++;
|
||||
Buffer::updateBufferCPUMemoryUsage(0, _sysmem->getSize());
|
||||
}
|
||||
|
||||
Buffer& Buffer::operator=(const Buffer& buf) {
|
||||
|
@ -253,18 +291,27 @@ Buffer& Buffer::operator=(const Buffer& buf) {
|
|||
}
|
||||
|
||||
Buffer::~Buffer() {
|
||||
_bufferCPUCount--;
|
||||
|
||||
if (_sysmem) {
|
||||
Buffer::updateBufferCPUMemoryUsage(_sysmem->getSize(), 0);
|
||||
delete _sysmem;
|
||||
_sysmem = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::resize(Size size) {
|
||||
return editSysmem().resize(size);
|
||||
auto prevSize = editSysmem().getSize();
|
||||
auto newSize = editSysmem().resize(size);
|
||||
Buffer::updateBufferCPUMemoryUsage(prevSize, newSize);
|
||||
return newSize;
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::setData(Size size, const Byte* data) {
|
||||
return editSysmem().setData(size, data);
|
||||
auto prevSize = editSysmem().getSize();
|
||||
auto newSize = editSysmem().setData(size, data);
|
||||
Buffer::updateBufferCPUMemoryUsage(prevSize, newSize);
|
||||
return newSize;
|
||||
}
|
||||
|
||||
Buffer::Size Buffer::setSubData(Size offset, Size size, const Byte* data) {
|
||||
|
@ -272,6 +319,9 @@ Buffer::Size Buffer::setSubData(Size offset, Size size, const Byte* data) {
|
|||
}
|
||||
|
||||
Buffer::Size Buffer::append(Size size, const Byte* data) {
|
||||
return editSysmem().append( size, data);
|
||||
auto prevSize = editSysmem().getSize();
|
||||
auto newSize = editSysmem().append( size, data);
|
||||
Buffer::updateBufferCPUMemoryUsage(prevSize, newSize);
|
||||
return newSize;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include "Format.h"
|
||||
|
||||
#include <vector>
|
||||
#include <atomic>
|
||||
|
||||
#include <memory>
|
||||
#ifdef _DEBUG
|
||||
|
@ -109,7 +110,15 @@ protected:
|
|||
};
|
||||
|
||||
class Buffer : public Resource {
|
||||
static std::atomic<uint32_t> _bufferCPUCount;
|
||||
static std::atomic<Size> _bufferCPUMemoryUsage;
|
||||
static void updateBufferCPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
|
||||
public:
|
||||
static uint32_t getBufferCPUCount();
|
||||
static Size getBufferCPUMemoryUsage();
|
||||
static uint32_t getBufferGPUCount();
|
||||
static Size getBufferGPUMemoryUsage();
|
||||
|
||||
Buffer();
|
||||
Buffer(Size size, const Byte* bytes);
|
||||
|
|
|
@ -12,20 +12,77 @@
|
|||
#include "Texture.h"
|
||||
|
||||
#include <glm/gtc/constants.hpp>
|
||||
|
||||
#include <QDebug>
|
||||
#include "GPULogging.h"
|
||||
#include "Context.h"
|
||||
|
||||
using namespace gpu;
|
||||
|
||||
|
||||
std::atomic<uint32_t> Texture::_textureCPUCount{ 0 };
|
||||
std::atomic<Texture::Size> Texture::_textureCPUMemoryUsage{ 0 };
|
||||
|
||||
void Texture::updateTextureCPUMemoryUsage(Size prevObjectSize, Size newObjectSize) {
|
||||
if (prevObjectSize == newObjectSize) {
|
||||
return;
|
||||
}
|
||||
if (prevObjectSize > newObjectSize) {
|
||||
_textureCPUMemoryUsage.fetch_sub(prevObjectSize - newObjectSize);
|
||||
} else {
|
||||
_textureCPUMemoryUsage.fetch_add(newObjectSize - prevObjectSize);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t Texture::getTextureCPUCount() {
|
||||
return _textureCPUCount.load();
|
||||
}
|
||||
|
||||
Texture::Size Texture::getTextureCPUMemoryUsage() {
|
||||
return _textureCPUMemoryUsage.load();
|
||||
}
|
||||
|
||||
uint32_t Texture::getTextureGPUCount() {
|
||||
return Context::getTextureGPUCount();
|
||||
}
|
||||
|
||||
Texture::Size Texture::getTextureGPUMemoryUsage() {
|
||||
return Context::getTextureGPUMemoryUsage();
|
||||
|
||||
}
|
||||
|
||||
uint8 Texture::NUM_FACES_PER_TYPE[NUM_TYPES] = {1, 1, 1, 6};
|
||||
|
||||
Texture::Pixels::Pixels(const Element& format, Size size, const Byte* bytes) :
|
||||
_sysmem(size, bytes),
|
||||
_format(format),
|
||||
_sysmem(size, bytes),
|
||||
_isGPULoaded(false) {
|
||||
Texture::updateTextureCPUMemoryUsage(0, _sysmem.getSize());
|
||||
}
|
||||
|
||||
Texture::Pixels::~Pixels() {
|
||||
Texture::updateTextureCPUMemoryUsage(_sysmem.getSize(), 0);
|
||||
}
|
||||
|
||||
Texture::Size Texture::Pixels::resize(Size pSize) {
|
||||
auto prevSize = _sysmem.getSize();
|
||||
auto newSize = _sysmem.resize(pSize);
|
||||
Texture::updateTextureCPUMemoryUsage(prevSize, newSize);
|
||||
return newSize;
|
||||
}
|
||||
|
||||
Texture::Size Texture::Pixels::setData(const Element& format, Size size, const Byte* bytes ) {
|
||||
_format = format;
|
||||
auto prevSize = _sysmem.getSize();
|
||||
auto newSize = _sysmem.setData(size, bytes);
|
||||
Texture::updateTextureCPUMemoryUsage(prevSize, newSize);
|
||||
_isGPULoaded = false;
|
||||
return newSize;
|
||||
}
|
||||
|
||||
void Texture::Pixels::notifyGPULoaded() {
|
||||
_isGPULoaded = true;
|
||||
auto prevSize = _sysmem.getSize();
|
||||
auto newSize = _sysmem.resize(0);
|
||||
Texture::updateTextureCPUMemoryUsage(prevSize, newSize);
|
||||
}
|
||||
|
||||
void Texture::Storage::assignTexture(Texture* texture) {
|
||||
|
@ -59,15 +116,15 @@ const Texture::PixelsPointer Texture::Storage::getMipFace(uint16 level, uint8 fa
|
|||
|
||||
void Texture::Storage::notifyMipFaceGPULoaded(uint16 level, uint8 face) const {
|
||||
PixelsPointer mipFace = getMipFace(level, face);
|
||||
if (mipFace && (_type != TEX_CUBE)) {
|
||||
mipFace->_isGPULoaded = true;
|
||||
mipFace->_sysmem.resize(0);
|
||||
// Free the mips
|
||||
if (mipFace) {
|
||||
mipFace->notifyGPULoaded();
|
||||
}
|
||||
}
|
||||
|
||||
bool Texture::Storage::isMipAvailable(uint16 level, uint8 face) const {
|
||||
PixelsPointer mipFace = getMipFace(level, face);
|
||||
return (mipFace && mipFace->_sysmem.getSize());
|
||||
return (mipFace && mipFace->getSize());
|
||||
}
|
||||
|
||||
bool Texture::Storage::allocateMip(uint16 level) {
|
||||
|
@ -103,9 +160,7 @@ bool Texture::Storage::assignMipData(uint16 level, const Element& format, Size s
|
|||
auto faceBytes = bytes;
|
||||
Size allocated = 0;
|
||||
for (auto& face : mip) {
|
||||
face->_format = format;
|
||||
allocated += face->_sysmem.setData(sizePerFace, faceBytes);
|
||||
face->_isGPULoaded = false;
|
||||
allocated += face->setData(format, sizePerFace, faceBytes);
|
||||
faceBytes += sizePerFace;
|
||||
}
|
||||
|
||||
|
@ -122,9 +177,7 @@ bool Texture::Storage::assignMipFaceData(uint16 level, const Element& format, Si
|
|||
Size allocated = 0;
|
||||
if (face < mip.size()) {
|
||||
auto mipFace = mip[face];
|
||||
mipFace->_format = format;
|
||||
allocated += mipFace->_sysmem.setData(size, bytes);
|
||||
mipFace->_isGPULoaded = false;
|
||||
allocated += mipFace->setData(format, size, bytes);
|
||||
bumpStamp();
|
||||
}
|
||||
|
||||
|
@ -171,10 +224,12 @@ Texture* Texture::createFromStorage(Storage* storage) {
|
|||
Texture::Texture():
|
||||
Resource()
|
||||
{
|
||||
_textureCPUCount++;
|
||||
}
|
||||
|
||||
Texture::~Texture()
|
||||
{
|
||||
_textureCPUCount--;
|
||||
}
|
||||
|
||||
Texture::Size Texture::resize(Type type, const Element& texelFormat, uint16 width, uint16 height, uint16 depth, uint16 numSamples, uint16 numSlices) {
|
||||
|
@ -292,7 +347,7 @@ bool Texture::assignStoredMip(uint16 level, const Element& format, Size size, co
|
|||
}
|
||||
}
|
||||
|
||||
// THen check that the mem buffer passed make sense with its format
|
||||
// THen check that the mem texture passed make sense with its format
|
||||
Size expectedSize = evalStoredMipSize(level, format);
|
||||
if (size == expectedSize) {
|
||||
_storage->assignMipData(level, format, size, bytes);
|
||||
|
@ -323,7 +378,7 @@ bool Texture::assignStoredMipFace(uint16 level, const Element& format, Size size
|
|||
}
|
||||
}
|
||||
|
||||
// THen check that the mem buffer passed make sense with its format
|
||||
// THen check that the mem texture passed make sense with its format
|
||||
Size expectedSize = evalStoredMipFaceSize(level, format);
|
||||
if (size == expectedSize) {
|
||||
_storage->assignMipFaceData(level, format, size, bytes, face);
|
||||
|
@ -364,7 +419,7 @@ uint16 Texture::autoGenerateMips(uint16 maxMip) {
|
|||
|
||||
uint16 Texture::getStoredMipWidth(uint16 level) const {
|
||||
PixelsPointer mipFace = accessStoredMipFace(level);
|
||||
if (mipFace && mipFace->_sysmem.getSize()) {
|
||||
if (mipFace && mipFace->getSize()) {
|
||||
return evalMipWidth(level);
|
||||
}
|
||||
return 0;
|
||||
|
@ -372,7 +427,7 @@ uint16 Texture::getStoredMipWidth(uint16 level) const {
|
|||
|
||||
uint16 Texture::getStoredMipHeight(uint16 level) const {
|
||||
PixelsPointer mip = accessStoredMipFace(level);
|
||||
if (mip && mip->_sysmem.getSize()) {
|
||||
if (mip && mip->getSize()) {
|
||||
return evalMipHeight(level);
|
||||
}
|
||||
return 0;
|
||||
|
@ -380,7 +435,7 @@ uint16 Texture::getStoredMipHeight(uint16 level) const {
|
|||
|
||||
uint16 Texture::getStoredMipDepth(uint16 level) const {
|
||||
PixelsPointer mipFace = accessStoredMipFace(level);
|
||||
if (mipFace && mipFace->_sysmem.getSize()) {
|
||||
if (mipFace && mipFace->getSize()) {
|
||||
return evalMipDepth(level);
|
||||
}
|
||||
return 0;
|
||||
|
@ -388,7 +443,7 @@ uint16 Texture::getStoredMipDepth(uint16 level) const {
|
|||
|
||||
uint32 Texture::getStoredMipNumTexels(uint16 level) const {
|
||||
PixelsPointer mipFace = accessStoredMipFace(level);
|
||||
if (mipFace && mipFace->_sysmem.getSize()) {
|
||||
if (mipFace && mipFace->getSize()) {
|
||||
return evalMipWidth(level) * evalMipHeight(level) * evalMipDepth(level);
|
||||
}
|
||||
return 0;
|
||||
|
@ -396,7 +451,7 @@ uint32 Texture::getStoredMipNumTexels(uint16 level) const {
|
|||
|
||||
uint32 Texture::getStoredMipSize(uint16 level) const {
|
||||
PixelsPointer mipFace = accessStoredMipFace(level);
|
||||
if (mipFace && mipFace->_sysmem.getSize()) {
|
||||
if (mipFace && mipFace->getSize()) {
|
||||
return evalMipWidth(level) * evalMipHeight(level) * evalMipDepth(level) * getTexelFormat().getSize();
|
||||
}
|
||||
return 0;
|
||||
|
@ -642,8 +697,8 @@ bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<
|
|||
// for each face of cube texture
|
||||
for(int face=0; face < gpu::Texture::NUM_CUBE_FACES; face++) {
|
||||
|
||||
auto numComponents = cubeTexture.accessStoredMipFace(0,face)->_format.getScalarCount();
|
||||
auto data = cubeTexture.accessStoredMipFace(0,face)->_sysmem.readData();
|
||||
auto numComponents = cubeTexture.accessStoredMipFace(0,face)->getFormat().getScalarCount();
|
||||
auto data = cubeTexture.accessStoredMipFace(0,face)->readData();
|
||||
if (data == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -138,7 +138,14 @@ protected:
|
|||
};
|
||||
|
||||
class Texture : public Resource {
|
||||
static std::atomic<uint32_t> _textureCPUCount;
|
||||
static std::atomic<Size> _textureCPUMemoryUsage;
|
||||
static void updateTextureCPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
public:
|
||||
static uint32_t getTextureCPUCount();
|
||||
static Size getTextureCPUMemoryUsage();
|
||||
static uint32_t getTextureGPUCount();
|
||||
static Size getTextureGPUMemoryUsage();
|
||||
|
||||
class Usage {
|
||||
public:
|
||||
|
@ -194,9 +201,21 @@ public:
|
|||
Pixels(const Element& format, Size size, const Byte* bytes);
|
||||
~Pixels();
|
||||
|
||||
Sysmem _sysmem;
|
||||
const Byte* readData() const { return _sysmem.readData(); }
|
||||
Size getSize() const { return _sysmem.getSize(); }
|
||||
Size resize(Size pSize);
|
||||
Size setData(const Element& format, Size size, const Byte* bytes );
|
||||
|
||||
const Element& getFormat() const { return _format; }
|
||||
|
||||
void notifyGPULoaded();
|
||||
|
||||
protected:
|
||||
Element _format;
|
||||
Sysmem _sysmem;
|
||||
bool _isGPULoaded;
|
||||
|
||||
friend class Texture;
|
||||
};
|
||||
typedef std::shared_ptr< Pixels > PixelsPointer;
|
||||
|
||||
|
@ -448,7 +467,7 @@ typedef std::shared_ptr<Texture> TexturePointer;
|
|||
typedef std::vector< TexturePointer > Textures;
|
||||
|
||||
|
||||
// TODO: For now TextureView works with Buffer as a place holder for the Texture.
|
||||
// TODO: For now TextureView works with Texture as a place holder for the Texture.
|
||||
// The overall logic should be about the same except that the Texture will be a real GL Texture under the hood
|
||||
class TextureView {
|
||||
public:
|
||||
|
|
|
@ -1,53 +1,109 @@
|
|||
//
|
||||
// ModelCache.cpp
|
||||
// interface/src/renderer
|
||||
// libraries/model-networking
|
||||
//
|
||||
// Created by Andrzej Kapolka on 6/21/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
// Created by Zach Pomerantz on 3/15/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "ModelCache.h"
|
||||
#include <FSTReader.h>
|
||||
#include "FBXReader.h"
|
||||
#include "OBJReader.h"
|
||||
|
||||
#include <cmath>
|
||||
#include <gpu/Batch.h>
|
||||
#include <gpu/Stream.h>
|
||||
|
||||
#include <QNetworkReply>
|
||||
#include <QThreadPool>
|
||||
|
||||
#include <FSTReader.h>
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
#include "TextureCache.h"
|
||||
#include "ModelNetworkingLogging.h"
|
||||
|
||||
#include "model/TextureMap.h"
|
||||
class GeometryReader;
|
||||
|
||||
//#define WANT_DEBUG
|
||||
class GeometryExtra {
|
||||
public:
|
||||
const QVariantHash& mapping;
|
||||
const QUrl& textureBaseUrl;
|
||||
};
|
||||
|
||||
ModelCache::ModelCache()
|
||||
{
|
||||
const qint64 GEOMETRY_DEFAULT_UNUSED_MAX_SIZE = DEFAULT_UNUSED_MAX_SIZE;
|
||||
setUnusedResourceCacheSize(GEOMETRY_DEFAULT_UNUSED_MAX_SIZE);
|
||||
class GeometryMappingResource : public GeometryResource {
|
||||
Q_OBJECT
|
||||
public:
|
||||
GeometryMappingResource(const QUrl& url) : GeometryResource(url) {};
|
||||
|
||||
virtual void downloadFinished(const QByteArray& data) override;
|
||||
|
||||
private slots:
|
||||
void onGeometryMappingLoaded(bool success);
|
||||
|
||||
private:
|
||||
GeometryResource::Pointer _geometryResource;
|
||||
};
|
||||
|
||||
void GeometryMappingResource::downloadFinished(const QByteArray& data) {
|
||||
auto mapping = FSTReader::readMapping(data);
|
||||
|
||||
QString filename = mapping.value("filename").toString();
|
||||
if (filename.isNull()) {
|
||||
qCDebug(modelnetworking) << "Mapping file" << _url << "has no \"filename\" field";
|
||||
finishedLoading(false);
|
||||
} else {
|
||||
QUrl url = _url.resolved(filename);
|
||||
QUrl textureBaseUrl;
|
||||
|
||||
QString texdir = mapping.value("texdir").toString();
|
||||
if (!texdir.isNull()) {
|
||||
if (!texdir.endsWith('/')) {
|
||||
texdir += '/';
|
||||
}
|
||||
textureBaseUrl = _url.resolved(texdir);
|
||||
}
|
||||
|
||||
auto modelCache = DependencyManager::get<ModelCache>();
|
||||
GeometryExtra extra{ mapping, textureBaseUrl };
|
||||
|
||||
// Get the raw GeometryResource, not the wrapped NetworkGeometry
|
||||
_geometryResource = modelCache->getResource(url, QUrl(), true, &extra).staticCast<GeometryResource>();
|
||||
|
||||
if (_geometryResource->isLoaded()) {
|
||||
onGeometryMappingLoaded(!_geometryResource->getURL().isEmpty());
|
||||
} else {
|
||||
connect(_geometryResource.data(), &Resource::finished, this, &GeometryMappingResource::onGeometryMappingLoaded);
|
||||
}
|
||||
|
||||
// Avoid caching nested resources - their references will be held by the parent
|
||||
_geometryResource->_isCacheable = false;
|
||||
}
|
||||
}
|
||||
|
||||
ModelCache::~ModelCache() {
|
||||
void GeometryMappingResource::onGeometryMappingLoaded(bool success) {
|
||||
if (success) {
|
||||
_geometry = _geometryResource->_geometry;
|
||||
_shapes = _geometryResource->_shapes;
|
||||
_meshes = _geometryResource->_meshes;
|
||||
_materials = _geometryResource->_materials;
|
||||
}
|
||||
finishedLoading(success);
|
||||
}
|
||||
|
||||
QSharedPointer<Resource> ModelCache::createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
|
||||
bool delayLoad, const void* extra) {
|
||||
// NetworkGeometry is no longer a subclass of Resource, but requires this method because, it is pure virtual.
|
||||
assert(false);
|
||||
return QSharedPointer<Resource>();
|
||||
}
|
||||
class GeometryReader : public QRunnable {
|
||||
public:
|
||||
GeometryReader(QWeakPointer<Resource>& resource, const QUrl& url, const QVariantHash& mapping,
|
||||
const QByteArray& data) :
|
||||
_resource(resource), _url(url), _mapping(mapping), _data(data) {}
|
||||
virtual ~GeometryReader() = default;
|
||||
|
||||
virtual void run() override;
|
||||
|
||||
GeometryReader::GeometryReader(const QUrl& url, const QByteArray& data, const QVariantHash& mapping) :
|
||||
_url(url),
|
||||
_data(data),
|
||||
_mapping(mapping) {
|
||||
}
|
||||
private:
|
||||
QWeakPointer<Resource> _resource;
|
||||
QUrl _url;
|
||||
QVariantHash _mapping;
|
||||
QByteArray _data;
|
||||
};
|
||||
|
||||
void GeometryReader::run() {
|
||||
auto originalPriority = QThread::currentThread()->priority();
|
||||
|
@ -55,458 +111,389 @@ void GeometryReader::run() {
|
|||
originalPriority = QThread::NormalPriority;
|
||||
}
|
||||
QThread::currentThread()->setPriority(QThread::LowPriority);
|
||||
|
||||
// Ensure the resource is still being requested
|
||||
auto resource = _resource.toStrongRef();
|
||||
if (!resource) {
|
||||
qCWarning(modelnetworking) << "Abandoning load of" << _url << "; could not get strong ref";
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (_data.isEmpty()) {
|
||||
throw QString("Reply is NULL ?!");
|
||||
throw QString("reply is NULL");
|
||||
}
|
||||
QString urlname = _url.path().toLower();
|
||||
bool urlValid = true;
|
||||
urlValid &= !urlname.isEmpty();
|
||||
urlValid &= !_url.path().isEmpty();
|
||||
urlValid &= _url.path().toLower().endsWith(".fbx") || _url.path().toLower().endsWith(".obj");
|
||||
|
||||
if (urlValid) {
|
||||
// Let's read the binaries from the network
|
||||
FBXGeometry* fbxgeo = nullptr;
|
||||
QString urlname = _url.path().toLower();
|
||||
if (!urlname.isEmpty() && !_url.path().isEmpty() &&
|
||||
(_url.path().toLower().endsWith(".fbx") || _url.path().toLower().endsWith(".obj"))) {
|
||||
FBXGeometry* fbxGeometry = nullptr;
|
||||
|
||||
if (_url.path().toLower().endsWith(".fbx")) {
|
||||
const bool grabLightmaps = true;
|
||||
const float lightmapLevel = 1.0f;
|
||||
fbxgeo = readFBX(_data, _mapping, _url.path(), grabLightmaps, lightmapLevel);
|
||||
if (fbxgeo->meshes.size() == 0 && fbxgeo->joints.size() == 0) {
|
||||
// empty fbx geometry, indicates error
|
||||
fbxGeometry = readFBX(_data, _mapping, _url.path());
|
||||
if (fbxGeometry->meshes.size() == 0 && fbxGeometry->joints.size() == 0) {
|
||||
throw QString("empty geometry, possibly due to an unsupported FBX version");
|
||||
}
|
||||
} else if (_url.path().toLower().endsWith(".obj")) {
|
||||
fbxgeo = OBJReader().readOBJ(_data, _mapping, _url);
|
||||
fbxGeometry = OBJReader().readOBJ(_data, _mapping, _url);
|
||||
} else {
|
||||
QString errorStr("unsupported format");
|
||||
throw errorStr;
|
||||
throw QString("unsupported format");
|
||||
}
|
||||
emit onSuccess(fbxgeo);
|
||||
|
||||
QMetaObject::invokeMethod(resource.data(), "setGeometryDefinition",
|
||||
Q_ARG(void*, fbxGeometry));
|
||||
} else {
|
||||
throw QString("url is invalid");
|
||||
}
|
||||
|
||||
} catch (const QString& error) {
|
||||
qCDebug(modelnetworking) << "Error reading " << _url << ": " << error;
|
||||
emit onError(NetworkGeometry::ModelParseError, error);
|
||||
QMetaObject::invokeMethod(resource.data(), "finishedLoading", Q_ARG(bool, false));
|
||||
}
|
||||
|
||||
QThread::currentThread()->setPriority(originalPriority);
|
||||
}
|
||||
|
||||
NetworkGeometry::NetworkGeometry(const QUrl& url, bool delayLoad, const QVariantHash& mapping, const QUrl& textureBaseUrl) :
|
||||
_url(url),
|
||||
_mapping(mapping),
|
||||
_textureBaseUrl(textureBaseUrl.isValid() ? textureBaseUrl : url) {
|
||||
class GeometryDefinitionResource : public GeometryResource {
|
||||
Q_OBJECT
|
||||
public:
|
||||
GeometryDefinitionResource(const QUrl& url, const QVariantHash& mapping, const QUrl& textureBaseUrl) :
|
||||
GeometryResource(url), _mapping(mapping), _textureBaseUrl(textureBaseUrl.isValid() ? textureBaseUrl : url) {}
|
||||
|
||||
if (delayLoad) {
|
||||
_state = DelayState;
|
||||
} else {
|
||||
attemptRequestInternal();
|
||||
}
|
||||
virtual void downloadFinished(const QByteArray& data) override;
|
||||
|
||||
protected:
|
||||
Q_INVOKABLE void setGeometryDefinition(void* fbxGeometry);
|
||||
|
||||
private:
|
||||
QVariantHash _mapping;
|
||||
QUrl _textureBaseUrl;
|
||||
};
|
||||
|
||||
void GeometryDefinitionResource::downloadFinished(const QByteArray& data) {
|
||||
QThreadPool::globalInstance()->start(new GeometryReader(_self, _url, _mapping, data));
|
||||
}
|
||||
|
||||
NetworkGeometry::~NetworkGeometry() {
|
||||
if (_resource) {
|
||||
_resource->deleteLater();
|
||||
}
|
||||
}
|
||||
void GeometryDefinitionResource::setGeometryDefinition(void* fbxGeometry) {
|
||||
// Assume ownership of the geometry pointer
|
||||
_geometry.reset(static_cast<FBXGeometry*>(fbxGeometry));
|
||||
|
||||
void NetworkGeometry::attemptRequest() {
|
||||
if (_state == DelayState) {
|
||||
attemptRequestInternal();
|
||||
}
|
||||
}
|
||||
|
||||
void NetworkGeometry::attemptRequestInternal() {
|
||||
if (_url.path().toLower().endsWith(".fst")) {
|
||||
_mappingUrl = _url;
|
||||
requestMapping(_url);
|
||||
} else {
|
||||
_modelUrl = _url;
|
||||
requestModel(_url);
|
||||
}
|
||||
}
|
||||
|
||||
bool NetworkGeometry::isLoaded() const {
|
||||
return _state == SuccessState;
|
||||
}
|
||||
|
||||
bool NetworkGeometry::isLoadedWithTextures() const {
|
||||
if (!isLoaded()) {
|
||||
return false;
|
||||
// Copy materials
|
||||
QHash<QString, size_t> materialIDAtlas;
|
||||
for (const FBXMaterial& material : _geometry->materials) {
|
||||
materialIDAtlas[material.materialID] = _materials.size();
|
||||
_materials.push_back(std::make_shared<NetworkMaterial>(material, _textureBaseUrl));
|
||||
}
|
||||
|
||||
if (!_isLoadedWithTextures) {
|
||||
_hasTransparentTextures = false;
|
||||
|
||||
for (auto&& material : _materials) {
|
||||
if ((material->albedoTexture && !material->albedoTexture->isLoaded()) ||
|
||||
(material->normalTexture && !material->normalTexture->isLoaded()) ||
|
||||
(material->roughnessTexture && !material->roughnessTexture->isLoaded()) ||
|
||||
(material->metallicTexture && !material->metallicTexture->isLoaded()) ||
|
||||
(material->occlusionTexture && !material->occlusionTexture->isLoaded()) ||
|
||||
(material->emissiveTexture && !material->emissiveTexture->isLoaded()) ||
|
||||
(material->lightmapTexture && !material->lightmapTexture->isLoaded())) {
|
||||
return false;
|
||||
}
|
||||
if (material->albedoTexture && material->albedoTexture->getGPUTexture()) {
|
||||
// Reassign the texture to make sure that itsalbedo alpha channel material key is detected correctly
|
||||
material->_material->setTextureMap(model::MaterialKey::ALBEDO_MAP, material->_material->getTextureMap(model::MaterialKey::ALBEDO_MAP));
|
||||
const auto& usage = material->albedoTexture->getGPUTexture()->getUsage();
|
||||
bool isTransparentTexture = usage.isAlpha() && !usage.isAlphaMask();
|
||||
_hasTransparentTextures |= isTransparentTexture;
|
||||
}
|
||||
}
|
||||
|
||||
_isLoadedWithTextures = true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void NetworkGeometry::setTextureWithNameToURL(const QString& name, const QUrl& url) {
|
||||
if (_meshes.size() > 0) {
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
for (auto&& material : _materials) {
|
||||
auto networkMaterial = material->_material;
|
||||
auto oldTextureMaps = networkMaterial->getTextureMaps();
|
||||
if (material->albedoTextureName == name) {
|
||||
material->albedoTexture = textureCache->getTexture(url, DEFAULT_TEXTURE);
|
||||
|
||||
auto albedoMap = model::TextureMapPointer(new model::TextureMap());
|
||||
albedoMap->setTextureSource(material->albedoTexture->_textureSource);
|
||||
albedoMap->setTextureTransform(oldTextureMaps[model::MaterialKey::ALBEDO_MAP]->getTextureTransform());
|
||||
// when reassigning the albedo texture we also check for the alpha channel used as opacity
|
||||
albedoMap->setUseAlphaChannel(true);
|
||||
networkMaterial->setTextureMap(model::MaterialKey::ALBEDO_MAP, albedoMap);
|
||||
} else if (material->normalTextureName == name) {
|
||||
material->normalTexture = textureCache->getTexture(url);
|
||||
|
||||
auto normalMap = model::TextureMapPointer(new model::TextureMap());
|
||||
normalMap->setTextureSource(material->normalTexture->_textureSource);
|
||||
|
||||
networkMaterial->setTextureMap(model::MaterialKey::NORMAL_MAP, normalMap);
|
||||
} else if (material->roughnessTextureName == name) {
|
||||
// FIXME: If passing a gloss map instead of a roughmap how to say that ? looking for gloss in the name ?
|
||||
material->roughnessTexture = textureCache->getTexture(url, ROUGHNESS_TEXTURE);
|
||||
|
||||
auto roughnessMap = model::TextureMapPointer(new model::TextureMap());
|
||||
roughnessMap->setTextureSource(material->roughnessTexture->_textureSource);
|
||||
|
||||
networkMaterial->setTextureMap(model::MaterialKey::ROUGHNESS_MAP, roughnessMap);
|
||||
} else if (material->metallicTextureName == name) {
|
||||
// FIXME: If passing a specular map instead of a metallic how to say that ? looking for wtf in the name ?
|
||||
material->metallicTexture = textureCache->getTexture(url, METALLIC_TEXTURE);
|
||||
|
||||
auto glossMap = model::TextureMapPointer(new model::TextureMap());
|
||||
glossMap->setTextureSource(material->metallicTexture->_textureSource);
|
||||
|
||||
networkMaterial->setTextureMap(model::MaterialKey::METALLIC_MAP, glossMap);
|
||||
} else if (material->emissiveTextureName == name) {
|
||||
material->emissiveTexture = textureCache->getTexture(url, EMISSIVE_TEXTURE);
|
||||
|
||||
auto emissiveMap = model::TextureMapPointer(new model::TextureMap());
|
||||
emissiveMap->setTextureSource(material->emissiveTexture->_textureSource);
|
||||
|
||||
networkMaterial->setTextureMap(model::MaterialKey::EMISSIVE_MAP, emissiveMap);
|
||||
} else if (material->lightmapTextureName == name) {
|
||||
material->lightmapTexture = textureCache->getTexture(url, LIGHTMAP_TEXTURE);
|
||||
|
||||
auto lightmapMap = model::TextureMapPointer(new model::TextureMap());
|
||||
lightmapMap->setTextureSource(material->lightmapTexture->_textureSource);
|
||||
lightmapMap->setTextureTransform(
|
||||
oldTextureMaps[model::MaterialKey::LIGHTMAP_MAP]->getTextureTransform());
|
||||
glm::vec2 oldOffsetScale =
|
||||
oldTextureMaps[model::MaterialKey::LIGHTMAP_MAP]->getLightmapOffsetScale();
|
||||
lightmapMap->setLightmapOffsetScale(oldOffsetScale.x, oldOffsetScale.y);
|
||||
|
||||
networkMaterial->setTextureMap(model::MaterialKey::LIGHTMAP_MAP, lightmapMap);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
qCWarning(modelnetworking) << "Ignoring setTextureWithNameToURL() geometry not ready." << name << url;
|
||||
}
|
||||
_isLoadedWithTextures = false;
|
||||
}
|
||||
|
||||
QStringList NetworkGeometry::getTextureNames() const {
|
||||
QStringList result;
|
||||
for (auto&& material : _materials) {
|
||||
if (!material->emissiveTextureName.isEmpty() && material->emissiveTexture) {
|
||||
QString textureURL = material->emissiveTexture->getURL().toString();
|
||||
result << material->emissiveTextureName + ":\"" + textureURL + "\"";
|
||||
}
|
||||
|
||||
if (!material->albedoTextureName.isEmpty() && material->albedoTexture) {
|
||||
QString textureURL = material->albedoTexture->getURL().toString();
|
||||
result << material->albedoTextureName + ":\"" + textureURL + "\"";
|
||||
}
|
||||
|
||||
if (!material->normalTextureName.isEmpty() && material->normalTexture) {
|
||||
QString textureURL = material->normalTexture->getURL().toString();
|
||||
result << material->normalTextureName + ":\"" + textureURL + "\"";
|
||||
}
|
||||
|
||||
if (!material->roughnessTextureName.isEmpty() && material->roughnessTexture) {
|
||||
QString textureURL = material->roughnessTexture->getURL().toString();
|
||||
result << material->roughnessTextureName + ":\"" + textureURL + "\"";
|
||||
}
|
||||
|
||||
if (!material->metallicTextureName.isEmpty() && material->metallicTexture) {
|
||||
QString textureURL = material->metallicTexture->getURL().toString();
|
||||
result << material->metallicTextureName + ":\"" + textureURL + "\"";
|
||||
}
|
||||
|
||||
if (!material->occlusionTextureName.isEmpty() && material->occlusionTexture) {
|
||||
QString textureURL = material->occlusionTexture->getURL().toString();
|
||||
result << material->occlusionTextureName + ":\"" + textureURL + "\"";
|
||||
}
|
||||
|
||||
if (!material->lightmapTextureName.isEmpty() && material->lightmapTexture) {
|
||||
QString textureURL = material->lightmapTexture->getURL().toString();
|
||||
result << material->lightmapTextureName + ":\"" + textureURL + "\"";
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void NetworkGeometry::requestMapping(const QUrl& url) {
|
||||
_state = RequestMappingState;
|
||||
if (_resource) {
|
||||
_resource->deleteLater();
|
||||
}
|
||||
_resource = new Resource(url, false);
|
||||
connect(_resource, &Resource::loaded, this, &NetworkGeometry::mappingRequestDone);
|
||||
connect(_resource, &Resource::failed, this, &NetworkGeometry::mappingRequestError);
|
||||
}
|
||||
|
||||
void NetworkGeometry::requestModel(const QUrl& url) {
|
||||
_state = RequestModelState;
|
||||
if (_resource) {
|
||||
_resource->deleteLater();
|
||||
}
|
||||
_modelUrl = url;
|
||||
_resource = new Resource(url, false);
|
||||
connect(_resource, &Resource::loaded, this, &NetworkGeometry::modelRequestDone);
|
||||
connect(_resource, &Resource::failed, this, &NetworkGeometry::modelRequestError);
|
||||
}
|
||||
|
||||
void NetworkGeometry::mappingRequestDone(const QByteArray data) {
|
||||
assert(_state == RequestMappingState);
|
||||
|
||||
// parse the mapping file
|
||||
_mapping = FSTReader::readMapping(data);
|
||||
|
||||
QUrl replyUrl = _mappingUrl;
|
||||
QString modelUrlStr = _mapping.value("filename").toString();
|
||||
if (modelUrlStr.isNull()) {
|
||||
qCDebug(modelnetworking) << "Mapping file " << _url << "has no \"filename\" entry";
|
||||
emit onFailure(*this, MissingFilenameInMapping);
|
||||
} else {
|
||||
// read _textureBase from mapping file, if present
|
||||
QString texdir = _mapping.value("texdir").toString();
|
||||
if (!texdir.isNull()) {
|
||||
if (!texdir.endsWith('/')) {
|
||||
texdir += '/';
|
||||
}
|
||||
_textureBaseUrl = replyUrl.resolved(texdir);
|
||||
}
|
||||
|
||||
_modelUrl = replyUrl.resolved(modelUrlStr);
|
||||
requestModel(_modelUrl);
|
||||
}
|
||||
}
|
||||
|
||||
void NetworkGeometry::mappingRequestError(QNetworkReply::NetworkError error) {
|
||||
assert(_state == RequestMappingState);
|
||||
_state = ErrorState;
|
||||
emit onFailure(*this, MappingRequestError);
|
||||
}
|
||||
|
||||
void NetworkGeometry::modelRequestDone(const QByteArray data) {
|
||||
assert(_state == RequestModelState);
|
||||
|
||||
_state = ParsingModelState;
|
||||
|
||||
// asynchronously parse the model file.
|
||||
GeometryReader* geometryReader = new GeometryReader(_modelUrl, data, _mapping);
|
||||
connect(geometryReader, SIGNAL(onSuccess(FBXGeometry*)), SLOT(modelParseSuccess(FBXGeometry*)));
|
||||
connect(geometryReader, SIGNAL(onError(int, QString)), SLOT(modelParseError(int, QString)));
|
||||
|
||||
QThreadPool::globalInstance()->start(geometryReader);
|
||||
}
|
||||
|
||||
void NetworkGeometry::modelRequestError(QNetworkReply::NetworkError error) {
|
||||
assert(_state == RequestModelState);
|
||||
_state = ErrorState;
|
||||
emit onFailure(*this, ModelRequestError);
|
||||
}
|
||||
|
||||
static NetworkMesh* buildNetworkMesh(const FBXMesh& mesh, const QUrl& textureBaseUrl) {
|
||||
NetworkMesh* networkMesh = new NetworkMesh();
|
||||
|
||||
networkMesh->_mesh = mesh._mesh;
|
||||
|
||||
return networkMesh;
|
||||
}
|
||||
|
||||
|
||||
static model::TextureMapPointer setupNetworkTextureMap(NetworkGeometry* geometry, const QUrl& textureBaseUrl,
|
||||
const FBXTexture& texture, TextureType type,
|
||||
NetworkTexturePointer& networkTexture, QString& networkTextureName) {
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
||||
// If content is inline, cache it under the fbx file, not its base url
|
||||
const auto baseUrl = texture.content.isEmpty() ? textureBaseUrl : QUrl(textureBaseUrl.url() + "/");
|
||||
const auto filename = baseUrl.resolved(QUrl(texture.filename));
|
||||
|
||||
networkTexture = textureCache->getTexture(filename, type, texture.content);
|
||||
QObject::connect(networkTexture.data(), &NetworkTexture::networkTextureCreated, geometry, &NetworkGeometry::textureLoaded);
|
||||
networkTextureName = texture.name;
|
||||
|
||||
auto map = std::make_shared<model::TextureMap>();
|
||||
map->setTextureSource(networkTexture->_textureSource);
|
||||
return map;
|
||||
}
|
||||
|
||||
static NetworkMaterial* buildNetworkMaterial(NetworkGeometry* geometry, const FBXMaterial& material, const QUrl& textureBaseUrl) {
|
||||
NetworkMaterial* networkMaterial = new NetworkMaterial();
|
||||
networkMaterial->_material = material._material;
|
||||
|
||||
if (!material.albedoTexture.filename.isEmpty()) {
|
||||
auto albedoMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.albedoTexture, DEFAULT_TEXTURE,
|
||||
networkMaterial->albedoTexture, networkMaterial->albedoTextureName);
|
||||
albedoMap->setTextureTransform(material.albedoTexture.transform);
|
||||
|
||||
if (!material.opacityTexture.filename.isEmpty()) {
|
||||
if (material.albedoTexture.filename == material.opacityTexture.filename) {
|
||||
// Best case scenario, just indicating that the albedo map contains transparency
|
||||
albedoMap->setUseAlphaChannel(true);
|
||||
} else {
|
||||
// Opacity Map is different from the Abledo map, not supported
|
||||
}
|
||||
}
|
||||
|
||||
material._material->setTextureMap(model::MaterialKey::ALBEDO_MAP, albedoMap);
|
||||
}
|
||||
|
||||
|
||||
if (!material.normalTexture.filename.isEmpty()) {
|
||||
auto normalMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.normalTexture,
|
||||
(material.normalTexture.isBumpmap ? BUMP_TEXTURE : NORMAL_TEXTURE),
|
||||
networkMaterial->normalTexture, networkMaterial->normalTextureName);
|
||||
networkMaterial->_material->setTextureMap(model::MaterialKey::NORMAL_MAP, normalMap);
|
||||
}
|
||||
|
||||
// Roughness first or gloss maybe
|
||||
if (!material.roughnessTexture.filename.isEmpty()) {
|
||||
auto roughnessMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.roughnessTexture, ROUGHNESS_TEXTURE,
|
||||
networkMaterial->roughnessTexture, networkMaterial->roughnessTextureName);
|
||||
material._material->setTextureMap(model::MaterialKey::ROUGHNESS_MAP, roughnessMap);
|
||||
} else if (!material.glossTexture.filename.isEmpty()) {
|
||||
auto roughnessMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.glossTexture, GLOSS_TEXTURE,
|
||||
networkMaterial->roughnessTexture, networkMaterial->roughnessTextureName);
|
||||
material._material->setTextureMap(model::MaterialKey::ROUGHNESS_MAP, roughnessMap);
|
||||
}
|
||||
|
||||
// Metallic first or specular maybe
|
||||
|
||||
if (!material.metallicTexture.filename.isEmpty()) {
|
||||
auto metallicMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.metallicTexture, METALLIC_TEXTURE,
|
||||
networkMaterial->metallicTexture, networkMaterial->metallicTextureName);
|
||||
material._material->setTextureMap(model::MaterialKey::METALLIC_MAP, metallicMap);
|
||||
} else if (!material.specularTexture.filename.isEmpty()) {
|
||||
|
||||
auto metallicMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.specularTexture, SPECULAR_TEXTURE,
|
||||
networkMaterial->metallicTexture, networkMaterial->metallicTextureName);
|
||||
material._material->setTextureMap(model::MaterialKey::METALLIC_MAP, metallicMap);
|
||||
}
|
||||
|
||||
if (!material.occlusionTexture.filename.isEmpty()) {
|
||||
auto occlusionMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.occlusionTexture, OCCLUSION_TEXTURE,
|
||||
networkMaterial->occlusionTexture, networkMaterial->occlusionTextureName);
|
||||
material._material->setTextureMap(model::MaterialKey::OCCLUSION_MAP, occlusionMap);
|
||||
}
|
||||
|
||||
if (!material.emissiveTexture.filename.isEmpty()) {
|
||||
auto emissiveMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.emissiveTexture, EMISSIVE_TEXTURE,
|
||||
networkMaterial->emissiveTexture, networkMaterial->emissiveTextureName);
|
||||
material._material->setTextureMap(model::MaterialKey::EMISSIVE_MAP, emissiveMap);
|
||||
}
|
||||
|
||||
if (!material.lightmapTexture.filename.isEmpty()) {
|
||||
auto lightmapMap = setupNetworkTextureMap(geometry, textureBaseUrl, material.lightmapTexture, LIGHTMAP_TEXTURE,
|
||||
networkMaterial->lightmapTexture, networkMaterial->lightmapTextureName);
|
||||
lightmapMap->setTextureTransform(material.lightmapTexture.transform);
|
||||
lightmapMap->setLightmapOffsetScale(material.lightmapParams.x, material.lightmapParams.y);
|
||||
material._material->setTextureMap(model::MaterialKey::LIGHTMAP_MAP, lightmapMap);
|
||||
}
|
||||
|
||||
return networkMaterial;
|
||||
}
|
||||
|
||||
|
||||
void NetworkGeometry::modelParseSuccess(FBXGeometry* geometry) {
|
||||
// assume owner ship of geometry pointer
|
||||
_geometry.reset(geometry);
|
||||
|
||||
|
||||
|
||||
foreach(const FBXMesh& mesh, _geometry->meshes) {
|
||||
_meshes.emplace_back(buildNetworkMesh(mesh, _textureBaseUrl));
|
||||
}
|
||||
|
||||
QHash<QString, size_t> fbxMatIDToMatID;
|
||||
foreach(const FBXMaterial& material, _geometry->materials) {
|
||||
fbxMatIDToMatID[material.materialID] = _materials.size();
|
||||
_materials.emplace_back(buildNetworkMaterial(this, material, _textureBaseUrl));
|
||||
}
|
||||
|
||||
|
||||
std::shared_ptr<NetworkMeshes> meshes = std::make_shared<NetworkMeshes>();
|
||||
std::shared_ptr<NetworkShapes> shapes = std::make_shared<NetworkShapes>();
|
||||
int meshID = 0;
|
||||
foreach(const FBXMesh& mesh, _geometry->meshes) {
|
||||
for (const FBXMesh& mesh : _geometry->meshes) {
|
||||
// Copy mesh pointers
|
||||
meshes->emplace_back(mesh._mesh);
|
||||
int partID = 0;
|
||||
foreach (const FBXMeshPart& part, mesh.parts) {
|
||||
NetworkShape* networkShape = new NetworkShape();
|
||||
networkShape->_meshID = meshID;
|
||||
networkShape->_partID = partID;
|
||||
networkShape->_materialID = (int)fbxMatIDToMatID[part.materialID];
|
||||
_shapes.emplace_back(networkShape);
|
||||
for (const FBXMeshPart& part : mesh.parts) {
|
||||
// Construct local shapes
|
||||
shapes->push_back(std::make_shared<NetworkShape>(meshID, partID, (int)materialIDAtlas[part.materialID]));
|
||||
partID++;
|
||||
}
|
||||
meshID++;
|
||||
}
|
||||
_meshes = meshes;
|
||||
_shapes = shapes;
|
||||
|
||||
_state = SuccessState;
|
||||
emit onSuccess(*this, *_geometry.get());
|
||||
|
||||
delete _resource;
|
||||
_resource = nullptr;
|
||||
finishedLoading(true);
|
||||
}
|
||||
|
||||
void NetworkGeometry::modelParseError(int error, QString str) {
|
||||
_state = ErrorState;
|
||||
emit onFailure(*this, (NetworkGeometry::Error)error);
|
||||
|
||||
delete _resource;
|
||||
_resource = nullptr;
|
||||
ModelCache::ModelCache() {
|
||||
const qint64 GEOMETRY_DEFAULT_UNUSED_MAX_SIZE = DEFAULT_UNUSED_MAX_SIZE;
|
||||
setUnusedResourceCacheSize(GEOMETRY_DEFAULT_UNUSED_MAX_SIZE);
|
||||
}
|
||||
|
||||
const NetworkMaterial* NetworkGeometry::getShapeMaterial(int shapeID) {
|
||||
if ((shapeID >= 0) && (shapeID < (int)_shapes.size())) {
|
||||
int materialID = _shapes[shapeID]->_materialID;
|
||||
if ((materialID >= 0) && ((unsigned int)materialID < _materials.size())) {
|
||||
return _materials[materialID].get();
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
QSharedPointer<Resource> ModelCache::createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
|
||||
bool delayLoad, const void* extra) {
|
||||
const GeometryExtra* geometryExtra = static_cast<const GeometryExtra*>(extra);
|
||||
|
||||
Resource* resource = nullptr;
|
||||
if (url.path().toLower().endsWith(".fst")) {
|
||||
resource = new GeometryMappingResource(url);
|
||||
} else {
|
||||
return 0;
|
||||
resource = new GeometryDefinitionResource(url, geometryExtra->mapping, geometryExtra->textureBaseUrl);
|
||||
}
|
||||
|
||||
return QSharedPointer<Resource>(resource, &Resource::allReferencesCleared);
|
||||
}
|
||||
|
||||
std::shared_ptr<NetworkGeometry> ModelCache::getGeometry(const QUrl& url, const QVariantHash& mapping, const QUrl& textureBaseUrl) {
|
||||
GeometryExtra geometryExtra = { mapping, textureBaseUrl };
|
||||
GeometryResource::Pointer resource = getResource(url, QUrl(), true, &geometryExtra).staticCast<GeometryResource>();
|
||||
return std::make_shared<NetworkGeometry>(resource);
|
||||
}
|
||||
|
||||
const QVariantMap Geometry::getTextures() const {
|
||||
QVariantMap textures;
|
||||
for (const auto& material : _materials) {
|
||||
for (const auto& texture : material->_textures) {
|
||||
if (texture.texture) {
|
||||
textures[texture.name] = texture.texture->getURL();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return textures;
|
||||
}
|
||||
|
||||
// FIXME: The materials should only be copied when modified, but the Model currently caches the original
|
||||
Geometry::Geometry(const Geometry& geometry) {
|
||||
_geometry = geometry._geometry;
|
||||
_meshes = geometry._meshes;
|
||||
_shapes = geometry._shapes;
|
||||
|
||||
_materials.reserve(geometry._materials.size());
|
||||
for (const auto& material : geometry._materials) {
|
||||
_materials.push_back(std::make_shared<NetworkMaterial>(*material));
|
||||
}
|
||||
}
|
||||
|
||||
void NetworkGeometry::textureLoaded(const QWeakPointer<NetworkTexture>& networkTexture) {
|
||||
numTextureLoaded++;
|
||||
void Geometry::setTextures(const QVariantMap& textureMap) {
|
||||
if (_meshes->size() > 0) {
|
||||
for (auto& material : _materials) {
|
||||
// Check if any material textures actually changed
|
||||
if (std::any_of(material->_textures.cbegin(), material->_textures.cend(),
|
||||
[&textureMap](const NetworkMaterial::Textures::value_type& it) { return it.texture && textureMap.contains(it.name); })) {
|
||||
|
||||
// FIXME: The Model currently caches the materials (waste of space!)
|
||||
// so they must be copied in the Geometry copy-ctor
|
||||
// if (material->isOriginal()) {
|
||||
// // Copy the material to avoid mutating the cached version
|
||||
// material = std::make_shared<NetworkMaterial>(*material);
|
||||
//}
|
||||
|
||||
material->setTextures(textureMap);
|
||||
_areTexturesLoaded = false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
qCWarning(modelnetworking) << "Ignoring setTextures(); geometry not ready";
|
||||
}
|
||||
}
|
||||
|
||||
bool Geometry::areTexturesLoaded() const {
|
||||
if (!_areTexturesLoaded) {
|
||||
_hasTransparentTextures = false;
|
||||
|
||||
for (auto& material : _materials) {
|
||||
// Check if material textures are loaded
|
||||
if (std::any_of(material->_textures.cbegin(), material->_textures.cend(),
|
||||
[](const NetworkMaterial::Textures::value_type& it) { return it.texture && !it.texture->isLoaded(); })) {
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// If material textures are loaded, check the material translucency
|
||||
const auto albedoTexture = material->_textures[NetworkMaterial::MapChannel::ALBEDO_MAP];
|
||||
if (albedoTexture.texture && albedoTexture.texture->getGPUTexture()) {
|
||||
material->resetOpacityMap();
|
||||
|
||||
_hasTransparentTextures |= material->getKey().isTranslucent();
|
||||
}
|
||||
}
|
||||
|
||||
_areTexturesLoaded = true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const std::shared_ptr<const NetworkMaterial> Geometry::getShapeMaterial(int shapeID) const {
|
||||
if ((shapeID >= 0) && (shapeID < (int)_shapes->size())) {
|
||||
int materialID = _shapes->at(shapeID)->materialID;
|
||||
if ((materialID >= 0) && (materialID < (int)_materials.size())) {
|
||||
return _materials[materialID];
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
NetworkGeometry::NetworkGeometry(const GeometryResource::Pointer& networkGeometry) : _resource(networkGeometry) {
|
||||
connect(_resource.data(), &Resource::finished, this, &NetworkGeometry::resourceFinished);
|
||||
connect(_resource.data(), &Resource::onRefresh, this, &NetworkGeometry::resourceRefreshed);
|
||||
if (_resource->isLoaded()) {
|
||||
resourceFinished(!_resource->getURL().isEmpty());
|
||||
}
|
||||
}
|
||||
|
||||
void NetworkGeometry::resourceFinished(bool success) {
|
||||
// FIXME: Model is not set up to handle a refresh
|
||||
if (_instance) {
|
||||
return;
|
||||
}
|
||||
if (success) {
|
||||
_instance = std::make_shared<Geometry>(*_resource);
|
||||
}
|
||||
emit finished(success);
|
||||
}
|
||||
|
||||
void NetworkGeometry::resourceRefreshed() {
|
||||
// FIXME: Model is not set up to handle a refresh
|
||||
// _instance.reset();
|
||||
}
|
||||
|
||||
const QString NetworkMaterial::NO_TEXTURE = QString();
|
||||
|
||||
const QString& NetworkMaterial::getTextureName(MapChannel channel) {
|
||||
if (_textures[channel].texture) {
|
||||
return _textures[channel].name;
|
||||
}
|
||||
return NO_TEXTURE;
|
||||
}
|
||||
|
||||
QUrl NetworkMaterial::getTextureUrl(const QUrl& url, const FBXTexture& texture) {
|
||||
// If content is inline, cache it under the fbx file, not its url
|
||||
const auto baseUrl = texture.content.isEmpty() ? url : QUrl(url.url() + "/");
|
||||
return baseUrl.resolved(QUrl(texture.filename));
|
||||
}
|
||||
|
||||
model::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& baseUrl, const FBXTexture& fbxTexture,
|
||||
TextureType type, MapChannel channel) {
|
||||
const auto url = getTextureUrl(baseUrl, fbxTexture);
|
||||
const auto texture = DependencyManager::get<TextureCache>()->getTexture(url, type, fbxTexture.content);
|
||||
_textures[channel] = Texture { fbxTexture.name, texture };
|
||||
|
||||
auto map = std::make_shared<model::TextureMap>();
|
||||
map->setTextureSource(texture->_textureSource);
|
||||
return map;
|
||||
}
|
||||
|
||||
model::TextureMapPointer NetworkMaterial::fetchTextureMap(const QUrl& url, TextureType type, MapChannel channel) {
|
||||
const auto texture = DependencyManager::get<TextureCache>()->getTexture(url, type);
|
||||
_textures[channel].texture = texture;
|
||||
|
||||
auto map = std::make_shared<model::TextureMap>();
|
||||
map->setTextureSource(texture->_textureSource);
|
||||
return map;
|
||||
}
|
||||
|
||||
NetworkMaterial::NetworkMaterial(const FBXMaterial& material, const QUrl& textureBaseUrl) :
|
||||
model::Material(*material._material)
|
||||
{
|
||||
_textures = Textures(MapChannel::NUM_MAP_CHANNELS);
|
||||
if (!material.albedoTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.albedoTexture, DEFAULT_TEXTURE, MapChannel::ALBEDO_MAP);
|
||||
_albedoTransform = material.albedoTexture.transform;
|
||||
map->setTextureTransform(_albedoTransform);
|
||||
|
||||
if (!material.opacityTexture.filename.isEmpty()) {
|
||||
if (material.albedoTexture.filename == material.opacityTexture.filename) {
|
||||
// Best case scenario, just indicating that the albedo map contains transparency
|
||||
// TODO: Different albedo/opacity maps are not currently supported
|
||||
map->setUseAlphaChannel(true);
|
||||
}
|
||||
}
|
||||
|
||||
setTextureMap(MapChannel::ALBEDO_MAP, map);
|
||||
}
|
||||
|
||||
|
||||
if (!material.normalTexture.filename.isEmpty()) {
|
||||
auto type = (material.normalTexture.isBumpmap ? BUMP_TEXTURE : NORMAL_TEXTURE);
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.normalTexture, type, MapChannel::NORMAL_MAP);
|
||||
setTextureMap(MapChannel::NORMAL_MAP, map);
|
||||
}
|
||||
|
||||
if (!material.roughnessTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.roughnessTexture, ROUGHNESS_TEXTURE, MapChannel::ROUGHNESS_MAP);
|
||||
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
|
||||
} else if (!material.glossTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.glossTexture, GLOSS_TEXTURE, MapChannel::ROUGHNESS_MAP);
|
||||
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
|
||||
}
|
||||
|
||||
if (!material.metallicTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.metallicTexture, METALLIC_TEXTURE, MapChannel::METALLIC_MAP);
|
||||
setTextureMap(MapChannel::METALLIC_MAP, map);
|
||||
} else if (!material.specularTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.specularTexture, SPECULAR_TEXTURE, MapChannel::METALLIC_MAP);
|
||||
setTextureMap(MapChannel::METALLIC_MAP, map);
|
||||
}
|
||||
|
||||
if (!material.occlusionTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.occlusionTexture, OCCLUSION_TEXTURE, MapChannel::OCCLUSION_MAP);
|
||||
setTextureMap(MapChannel::OCCLUSION_MAP, map);
|
||||
}
|
||||
|
||||
if (!material.emissiveTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.emissiveTexture, EMISSIVE_TEXTURE, MapChannel::EMISSIVE_MAP);
|
||||
setTextureMap(MapChannel::EMISSIVE_MAP, map);
|
||||
}
|
||||
|
||||
if (!material.lightmapTexture.filename.isEmpty()) {
|
||||
auto map = fetchTextureMap(textureBaseUrl, material.lightmapTexture, LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
|
||||
_lightmapTransform = material.lightmapTexture.transform;
|
||||
_lightmapParams = material.lightmapParams;
|
||||
map->setTextureTransform(_lightmapTransform);
|
||||
map->setLightmapOffsetScale(_lightmapParams.x, _lightmapParams.y);
|
||||
setTextureMap(MapChannel::LIGHTMAP_MAP, map);
|
||||
}
|
||||
}
|
||||
|
||||
void NetworkMaterial::setTextures(const QVariantMap& textureMap) {
|
||||
_isOriginal = false;
|
||||
|
||||
const auto& albedoName = getTextureName(MapChannel::ALBEDO_MAP);
|
||||
const auto& normalName = getTextureName(MapChannel::NORMAL_MAP);
|
||||
const auto& roughnessName = getTextureName(MapChannel::ROUGHNESS_MAP);
|
||||
const auto& metallicName = getTextureName(MapChannel::METALLIC_MAP);
|
||||
const auto& occlusionName = getTextureName(MapChannel::OCCLUSION_MAP);
|
||||
const auto& emissiveName = getTextureName(MapChannel::EMISSIVE_MAP);
|
||||
const auto& lightmapName = getTextureName(MapChannel::LIGHTMAP_MAP);
|
||||
|
||||
if (!albedoName.isEmpty()) {
|
||||
auto url = textureMap.contains(albedoName) ? textureMap[albedoName].toUrl() : QUrl();
|
||||
auto map = fetchTextureMap(url, DEFAULT_TEXTURE, MapChannel::ALBEDO_MAP);
|
||||
map->setTextureTransform(_albedoTransform);
|
||||
// when reassigning the albedo texture we also check for the alpha channel used as opacity
|
||||
map->setUseAlphaChannel(true);
|
||||
setTextureMap(MapChannel::ALBEDO_MAP, map);
|
||||
}
|
||||
|
||||
if (!normalName.isEmpty()) {
|
||||
auto url = textureMap.contains(normalName) ? textureMap[normalName].toUrl() : QUrl();
|
||||
auto map = fetchTextureMap(url, DEFAULT_TEXTURE, MapChannel::NORMAL_MAP);
|
||||
setTextureMap(MapChannel::NORMAL_MAP, map);
|
||||
}
|
||||
|
||||
if (!roughnessName.isEmpty()) {
|
||||
auto url = textureMap.contains(roughnessName) ? textureMap[roughnessName].toUrl() : QUrl();
|
||||
// FIXME: If passing a gloss map instead of a roughmap how do we know?
|
||||
auto map = fetchTextureMap(url, ROUGHNESS_TEXTURE, MapChannel::ROUGHNESS_MAP);
|
||||
setTextureMap(MapChannel::ROUGHNESS_MAP, map);
|
||||
}
|
||||
|
||||
if (!metallicName.isEmpty()) {
|
||||
auto url = textureMap.contains(metallicName) ? textureMap[metallicName].toUrl() : QUrl();
|
||||
// FIXME: If passing a specular map instead of a metallic how do we know?
|
||||
auto map = fetchTextureMap(url, METALLIC_TEXTURE, MapChannel::METALLIC_MAP);
|
||||
setTextureMap(MapChannel::METALLIC_MAP, map);
|
||||
}
|
||||
|
||||
if (!occlusionName.isEmpty()) {
|
||||
auto url = textureMap.contains(occlusionName) ? textureMap[occlusionName].toUrl() : QUrl();
|
||||
auto map = fetchTextureMap(url, OCCLUSION_TEXTURE, MapChannel::OCCLUSION_MAP);
|
||||
setTextureMap(MapChannel::OCCLUSION_MAP, map);
|
||||
}
|
||||
|
||||
if (!emissiveName.isEmpty()) {
|
||||
auto url = textureMap.contains(emissiveName) ? textureMap[emissiveName].toUrl() : QUrl();
|
||||
auto map = fetchTextureMap(url, EMISSIVE_TEXTURE, MapChannel::EMISSIVE_MAP);
|
||||
setTextureMap(MapChannel::EMISSIVE_MAP, map);
|
||||
}
|
||||
|
||||
if (!lightmapName.isEmpty()) {
|
||||
auto url = textureMap.contains(lightmapName) ? textureMap[lightmapName].toUrl() : QUrl();
|
||||
auto map = fetchTextureMap(url, LIGHTMAP_TEXTURE, MapChannel::LIGHTMAP_MAP);
|
||||
map->setTextureTransform(_lightmapTransform);
|
||||
map->setLightmapOffsetScale(_lightmapParams.x, _lightmapParams.y);
|
||||
setTextureMap(MapChannel::LIGHTMAP_MAP, map);
|
||||
}
|
||||
}
|
||||
|
||||
#include "ModelCache.moc"
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
//
|
||||
// ModelCache.h
|
||||
// libraries/model-networking/src/model-networking
|
||||
// libraries/model-networking
|
||||
//
|
||||
// Created by Sam Gateau on 9/21/15.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
// Created by Zach Pomerantz on 3/15/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
@ -12,200 +12,176 @@
|
|||
#ifndef hifi_ModelCache_h
|
||||
#define hifi_ModelCache_h
|
||||
|
||||
#include <QMap>
|
||||
#include <QRunnable>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <ResourceCache.h>
|
||||
|
||||
#include "FBXReader.h"
|
||||
#include "OBJReader.h"
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
#include <gpu/Stream.h>
|
||||
|
||||
|
||||
#include <model/Material.h>
|
||||
#include <model/Asset.h>
|
||||
|
||||
class NetworkGeometry;
|
||||
class NetworkMesh;
|
||||
#include "FBXReader.h"
|
||||
#include "TextureCache.h"
|
||||
|
||||
// Alias instead of derive to avoid copying
|
||||
using NetworkMesh = model::Mesh;
|
||||
|
||||
class NetworkTexture;
|
||||
class NetworkMaterial;
|
||||
class NetworkShape;
|
||||
class NetworkGeometry;
|
||||
|
||||
/// Stores cached geometry.
|
||||
class GeometryMappingResource;
|
||||
|
||||
/// Stores cached model geometries.
|
||||
class ModelCache : public ResourceCache, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
virtual QSharedPointer<Resource> createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
|
||||
bool delayLoad, const void* extra);
|
||||
/// Loads a model geometry from the specified URL.
|
||||
std::shared_ptr<NetworkGeometry> getGeometry(const QUrl& url,
|
||||
const QVariantHash& mapping = QVariantHash(), const QUrl& textureBaseUrl = QUrl());
|
||||
|
||||
/// Loads geometry from the specified URL.
|
||||
/// \param fallback a fallback URL to load if the desired one is unavailable
|
||||
/// \param delayLoad if true, don't load the geometry immediately; wait until load is first requested
|
||||
QSharedPointer<NetworkGeometry> getGeometry(const QUrl& url, const QUrl& fallback = QUrl(), bool delayLoad = false);
|
||||
protected:
|
||||
friend class GeometryMappingResource;
|
||||
|
||||
virtual QSharedPointer<Resource> createResource(const QUrl& url,
|
||||
const QSharedPointer<Resource>& fallback, bool delayLoad, const void* extra);
|
||||
|
||||
private:
|
||||
ModelCache();
|
||||
virtual ~ModelCache();
|
||||
virtual ~ModelCache() = default;
|
||||
};
|
||||
|
||||
QHash<QUrl, QWeakPointer<NetworkGeometry> > _networkGeometry;
|
||||
class Geometry {
|
||||
public:
|
||||
using Pointer = std::shared_ptr<Geometry>;
|
||||
|
||||
Geometry() = default;
|
||||
Geometry(const Geometry& geometry);
|
||||
|
||||
// Immutable over lifetime
|
||||
using NetworkMeshes = std::vector<std::shared_ptr<const NetworkMesh>>;
|
||||
using NetworkShapes = std::vector<std::shared_ptr<const NetworkShape>>;
|
||||
|
||||
// Mutable, but must retain structure of vector
|
||||
using NetworkMaterials = std::vector<std::shared_ptr<NetworkMaterial>>;
|
||||
|
||||
const FBXGeometry& getGeometry() const { return *_geometry; }
|
||||
const NetworkMeshes& getMeshes() const { return *_meshes; }
|
||||
const std::shared_ptr<const NetworkMaterial> getShapeMaterial(int shapeID) const;
|
||||
|
||||
const QVariantMap getTextures() const;
|
||||
void setTextures(const QVariantMap& textureMap);
|
||||
|
||||
virtual bool areTexturesLoaded() const;
|
||||
// Returns true if any albedo texture has a non-masking alpha channel.
|
||||
// This can only be known after areTexturesLoaded().
|
||||
bool hasTransparentTextures() const { return _hasTransparentTextures; }
|
||||
|
||||
protected:
|
||||
friend class GeometryMappingResource;
|
||||
|
||||
// Shared across all geometries, constant throughout lifetime
|
||||
std::shared_ptr<const FBXGeometry> _geometry;
|
||||
std::shared_ptr<const NetworkMeshes> _meshes;
|
||||
std::shared_ptr<const NetworkShapes> _shapes;
|
||||
|
||||
// Copied to each geometry, mutable throughout lifetime via setTextures
|
||||
NetworkMaterials _materials;
|
||||
|
||||
private:
|
||||
mutable bool _areTexturesLoaded { false };
|
||||
mutable bool _hasTransparentTextures { false };
|
||||
};
|
||||
|
||||
/// A geometry loaded from the network.
|
||||
class GeometryResource : public Resource, public Geometry {
|
||||
public:
|
||||
using Pointer = QSharedPointer<GeometryResource>;
|
||||
|
||||
GeometryResource(const QUrl& url) : Resource(url) {}
|
||||
|
||||
virtual bool areTexturesLoaded() const { return isLoaded() && Geometry::areTexturesLoaded(); }
|
||||
|
||||
protected:
|
||||
friend class GeometryMappingResource;
|
||||
|
||||
virtual bool isCacheable() const override { return _loaded && _isCacheable; }
|
||||
|
||||
bool _isCacheable { true };
|
||||
};
|
||||
|
||||
class NetworkGeometry : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
// mapping is only used if url is a .fbx or .obj file, it is essentially the content of an fst file.
|
||||
// if delayLoad is true, the url will not be immediately downloaded.
|
||||
// use the attemptRequest method to initiate the download.
|
||||
NetworkGeometry(const QUrl& url, bool delayLoad, const QVariantHash& mapping, const QUrl& textureBaseUrl = QUrl());
|
||||
~NetworkGeometry();
|
||||
using Pointer = std::shared_ptr<NetworkGeometry>;
|
||||
|
||||
const QUrl& getURL() const { return _url; }
|
||||
NetworkGeometry() = delete;
|
||||
NetworkGeometry(const GeometryResource::Pointer& networkGeometry);
|
||||
|
||||
void attemptRequest();
|
||||
const QUrl& getURL() { return _resource->getURL(); }
|
||||
|
||||
// true when the geometry is loaded (but maybe not it's associated textures)
|
||||
bool isLoaded() const;
|
||||
|
||||
// true when the requested geometry and its textures are loaded.
|
||||
bool isLoadedWithTextures() const;
|
||||
|
||||
// true if the albedo texture has a non-masking alpha channel.
|
||||
// This can only be known after isLoadedWithTextures().
|
||||
bool hasTransparentTextures() const { return _hasTransparentTextures; }
|
||||
|
||||
// WARNING: only valid when isLoaded returns true.
|
||||
const FBXGeometry& getFBXGeometry() const { return *_geometry; }
|
||||
const std::vector<std::unique_ptr<NetworkMesh>>& getMeshes() const { return _meshes; }
|
||||
// const model::AssetPointer getAsset() const { return _asset; }
|
||||
|
||||
// model::MeshPointer getShapeMesh(int shapeID);
|
||||
// int getShapePart(int shapeID);
|
||||
|
||||
// This would be the final verison
|
||||
// model::MaterialPointer getShapeMaterial(int shapeID);
|
||||
const NetworkMaterial* getShapeMaterial(int shapeID);
|
||||
|
||||
|
||||
void setTextureWithNameToURL(const QString& name, const QUrl& url);
|
||||
QStringList getTextureNames() const;
|
||||
|
||||
enum Error {
|
||||
MissingFilenameInMapping = 0,
|
||||
MappingRequestError,
|
||||
ModelRequestError,
|
||||
ModelParseError
|
||||
};
|
||||
/// Returns the geometry, if it is loaded (must be checked!)
|
||||
const Geometry::Pointer& getGeometry() { return _instance; }
|
||||
|
||||
signals:
|
||||
// Fired when everything has downloaded and parsed successfully.
|
||||
void onSuccess(NetworkGeometry& networkGeometry, FBXGeometry& fbxGeometry);
|
||||
/// Emitted when the NetworkGeometry loads (or fails to)
|
||||
void finished(bool success);
|
||||
|
||||
// Fired when something went wrong.
|
||||
void onFailure(NetworkGeometry& networkGeometry, Error error);
|
||||
private slots:
|
||||
void resourceFinished(bool success);
|
||||
void resourceRefreshed();
|
||||
|
||||
public slots:
|
||||
void textureLoaded(const QWeakPointer<NetworkTexture>& networkTexture);
|
||||
private:
|
||||
GeometryResource::Pointer _resource;
|
||||
Geometry::Pointer _instance { nullptr };
|
||||
};
|
||||
|
||||
protected slots:
|
||||
void mappingRequestDone(const QByteArray data);
|
||||
void mappingRequestError(QNetworkReply::NetworkError error);
|
||||
|
||||
void modelRequestDone(const QByteArray data);
|
||||
void modelRequestError(QNetworkReply::NetworkError error);
|
||||
|
||||
void modelParseSuccess(FBXGeometry* geometry);
|
||||
void modelParseError(int error, QString str);
|
||||
class NetworkMaterial : public model::Material {
|
||||
public:
|
||||
using MapChannel = model::Material::MapChannel;
|
||||
|
||||
NetworkMaterial(const FBXMaterial& material, const QUrl& textureBaseUrl);
|
||||
|
||||
protected:
|
||||
void attemptRequestInternal();
|
||||
void requestMapping(const QUrl& url);
|
||||
void requestModel(const QUrl& url);
|
||||
friend class Geometry;
|
||||
|
||||
enum State { DelayState,
|
||||
RequestMappingState,
|
||||
RequestModelState,
|
||||
ParsingModelState,
|
||||
SuccessState,
|
||||
ErrorState };
|
||||
State _state;
|
||||
class Texture {
|
||||
public:
|
||||
QString name;
|
||||
QSharedPointer<NetworkTexture> texture;
|
||||
};
|
||||
using Textures = std::vector<Texture>;
|
||||
|
||||
QUrl _url;
|
||||
QUrl _mappingUrl;
|
||||
QUrl _modelUrl;
|
||||
QVariantHash _mapping;
|
||||
QUrl _textureBaseUrl;
|
||||
int numTextureLoaded = 0;
|
||||
Textures _textures;
|
||||
|
||||
Resource* _resource = nullptr;
|
||||
std::unique_ptr<FBXGeometry> _geometry; // This should go away evenutally once we can put everything we need in the model::AssetPointer
|
||||
std::vector<std::unique_ptr<NetworkMesh>> _meshes;
|
||||
std::vector<std::unique_ptr<NetworkMaterial>> _materials;
|
||||
std::vector<std::unique_ptr<NetworkShape>> _shapes;
|
||||
static const QString NO_TEXTURE;
|
||||
const QString& getTextureName(MapChannel channel);
|
||||
|
||||
void setTextures(const QVariantMap& textureMap);
|
||||
|
||||
// The model asset created from this NetworkGeometry
|
||||
// model::AssetPointer _asset;
|
||||
const bool& isOriginal() const { return _isOriginal; }
|
||||
|
||||
// cache for isLoadedWithTextures()
|
||||
mutable bool _isLoadedWithTextures = false;
|
||||
mutable bool _hasTransparentTextures = false;
|
||||
};
|
||||
|
||||
/// Reads geometry in a worker thread.
|
||||
class GeometryReader : public QObject, public QRunnable {
|
||||
Q_OBJECT
|
||||
public:
|
||||
GeometryReader(const QUrl& url, const QByteArray& data, const QVariantHash& mapping);
|
||||
virtual void run();
|
||||
signals:
|
||||
void onSuccess(FBXGeometry* geometry);
|
||||
void onError(int error, QString str);
|
||||
private:
|
||||
QUrl _url;
|
||||
QByteArray _data;
|
||||
QVariantHash _mapping;
|
||||
};
|
||||
// Helpers for the ctors
|
||||
QUrl getTextureUrl(const QUrl& baseUrl, const FBXTexture& fbxTexture);
|
||||
model::TextureMapPointer fetchTextureMap(const QUrl& baseUrl, const FBXTexture& fbxTexture,
|
||||
TextureType type, MapChannel channel);
|
||||
model::TextureMapPointer fetchTextureMap(const QUrl& url, TextureType type, MapChannel channel);
|
||||
|
||||
Transform _albedoTransform;
|
||||
Transform _lightmapTransform;
|
||||
vec2 _lightmapParams;
|
||||
|
||||
bool _isOriginal { true };
|
||||
};
|
||||
|
||||
class NetworkShape {
|
||||
public:
|
||||
int _meshID{ -1 };
|
||||
int _partID{ -1 };
|
||||
int _materialID{ -1 };
|
||||
NetworkShape(int mesh, int part, int material) : meshID { mesh }, partID { part }, materialID { material } {}
|
||||
int meshID { -1 };
|
||||
int partID { -1 };
|
||||
int materialID { -1 };
|
||||
};
|
||||
|
||||
class NetworkMaterial {
|
||||
public:
|
||||
|
||||
model::MaterialPointer _material;
|
||||
QString emissiveTextureName;
|
||||
QSharedPointer<NetworkTexture> emissiveTexture;
|
||||
QString albedoTextureName;
|
||||
QSharedPointer<NetworkTexture> albedoTexture;
|
||||
QString normalTextureName;
|
||||
QSharedPointer<NetworkTexture> normalTexture;
|
||||
QString roughnessTextureName;
|
||||
QSharedPointer<NetworkTexture> roughnessTexture;
|
||||
QString metallicTextureName;
|
||||
QSharedPointer<NetworkTexture> metallicTexture;
|
||||
QString occlusionTextureName;
|
||||
QSharedPointer<NetworkTexture> occlusionTexture;
|
||||
QString lightmapTextureName;
|
||||
QSharedPointer<NetworkTexture> lightmapTexture;
|
||||
};
|
||||
|
||||
|
||||
/// The state associated with a single mesh.
|
||||
class NetworkMesh {
|
||||
public:
|
||||
model::MeshPointer _mesh;
|
||||
};
|
||||
|
||||
#endif // hifi_GeometryCache_h
|
||||
#endif // hifi_ModelCache_h
|
||||
|
|
|
@ -18,26 +18,34 @@ using namespace gpu;
|
|||
Material::Material() :
|
||||
_key(0),
|
||||
_schemaBuffer(),
|
||||
_textureMaps() {
|
||||
|
||||
// only if created from nothing shall we create the Buffer to store the properties
|
||||
Schema schema;
|
||||
_schemaBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Schema), (const gpu::Byte*) &schema));
|
||||
_textureMaps()
|
||||
{
|
||||
// created from nothing: create the Buffer to store the properties
|
||||
Schema schema;
|
||||
_schemaBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Schema), (const gpu::Byte*) &schema));
|
||||
|
||||
|
||||
}
|
||||
|
||||
Material::Material(const Material& material) :
|
||||
_key(material._key),
|
||||
_schemaBuffer(material._schemaBuffer),
|
||||
_textureMaps(material._textureMaps) {
|
||||
_textureMaps(material._textureMaps)
|
||||
{
|
||||
// copied: create the Buffer to store the properties, avoid holding a ref to the old Buffer
|
||||
Schema schema;
|
||||
_schemaBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Schema), (const gpu::Byte*) &schema));
|
||||
_schemaBuffer.edit<Schema>() = material._schemaBuffer.get<Schema>();
|
||||
}
|
||||
|
||||
Material& Material::operator= (const Material& material) {
|
||||
_key = (material._key);
|
||||
_schemaBuffer = (material._schemaBuffer);
|
||||
_textureMaps = (material._textureMaps);
|
||||
|
||||
// copied: create the Buffer to store the properties, avoid holding a ref to the old Buffer
|
||||
Schema schema;
|
||||
_schemaBuffer = gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(Schema), (const gpu::Byte*) &schema));
|
||||
_schemaBuffer.edit<Schema>() = material._schemaBuffer.get<Schema>();
|
||||
|
||||
return (*this);
|
||||
}
|
||||
|
||||
|
@ -83,41 +91,48 @@ void Material::setMetallic(float metallic) {
|
|||
void Material::setTextureMap(MapChannel channel, const TextureMapPointer& textureMap) {
|
||||
if (textureMap) {
|
||||
_key.setMapChannel(channel, (true));
|
||||
|
||||
if (channel == MaterialKey::ALBEDO_MAP) {
|
||||
// clear the previous flags whatever they were:
|
||||
_key.setOpacityMaskMap(false);
|
||||
_key.setTranslucentMap(false);
|
||||
|
||||
if (textureMap->useAlphaChannel() && textureMap->isDefined() && textureMap->getTextureView().isValid()) {
|
||||
auto usage = textureMap->getTextureView()._texture->getUsage();
|
||||
if (usage.isAlpha()) {
|
||||
// Texture has alpha, is not just a mask or a true transparent channel
|
||||
if (usage.isAlphaMask()) {
|
||||
_key.setOpacityMaskMap(true);
|
||||
_key.setTranslucentMap(false);
|
||||
} else {
|
||||
_key.setOpacityMaskMap(false);
|
||||
_key.setTranslucentMap(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_textureMaps[channel] = textureMap;
|
||||
} else {
|
||||
_key.setMapChannel(channel, (false));
|
||||
|
||||
if (channel == MaterialKey::ALBEDO_MAP) {
|
||||
_key.setOpacityMaskMap(false);
|
||||
_key.setTranslucentMap(false);
|
||||
}
|
||||
|
||||
_textureMaps.erase(channel);
|
||||
}
|
||||
|
||||
_schemaBuffer.edit<Schema>()._key = (uint32)_key._flags.to_ulong();
|
||||
|
||||
if (channel == MaterialKey::ALBEDO_MAP) {
|
||||
resetOpacityMap();
|
||||
}
|
||||
|
||||
_schemaBuffer.edit<Schema>()._key = (uint32)_key._flags.to_ulong();
|
||||
|
||||
}
|
||||
|
||||
void Material::resetOpacityMap() const {
|
||||
// Clear the previous flags
|
||||
_key.setOpacityMaskMap(false);
|
||||
_key.setTranslucentMap(false);
|
||||
|
||||
const auto& textureMap = getTextureMap(MaterialKey::ALBEDO_MAP);
|
||||
if (textureMap &&
|
||||
textureMap->useAlphaChannel() &&
|
||||
textureMap->isDefined() &&
|
||||
textureMap->getTextureView().isValid()) {
|
||||
|
||||
auto usage = textureMap->getTextureView()._texture->getUsage();
|
||||
if (usage.isAlpha()) {
|
||||
if (usage.isAlphaMask()) {
|
||||
// Texture has alpha, but it is just a mask
|
||||
_key.setOpacityMaskMap(true);
|
||||
_key.setTranslucentMap(false);
|
||||
} else {
|
||||
// Texture has alpha, it is a true translucency channel
|
||||
_key.setOpacityMaskMap(false);
|
||||
_key.setTranslucentMap(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_schemaBuffer.edit<Schema>()._key = (uint32)_key._flags.to_ulong();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -291,15 +291,17 @@ public:
|
|||
const TextureMaps& getTextureMaps() const { return _textureMaps; }
|
||||
const TextureMapPointer getTextureMap(MapChannel channel) const;
|
||||
|
||||
// Albedo maps cannot have opacity detected until they are loaded
|
||||
// This method allows const changing of the key/schemaBuffer without touching the map
|
||||
void resetOpacityMap() const;
|
||||
|
||||
// conversion from legacy material properties to PBR equivalent
|
||||
static float shininessToRoughness(float shininess) { return 1.0f - shininess / 100.0f; }
|
||||
|
||||
protected:
|
||||
|
||||
MaterialKey _key;
|
||||
UniformBufferView _schemaBuffer;
|
||||
private:
|
||||
mutable MaterialKey _key;
|
||||
mutable UniformBufferView _schemaBuffer;
|
||||
TextureMaps _textureMaps;
|
||||
|
||||
};
|
||||
typedef std::shared_ptr< Material > MaterialPointer;
|
||||
|
||||
|
|
|
@ -15,6 +15,8 @@
|
|||
|
||||
#include <LogHandler.h>
|
||||
|
||||
#include "Socket.h"
|
||||
|
||||
using namespace udt;
|
||||
|
||||
static int packetMetaTypeId = qRegisterMetaType<Packet*>("Packet*");
|
||||
|
@ -28,15 +30,10 @@ static const std::array<Key, 4> KEYS {{
|
|||
}};
|
||||
|
||||
void xorHelper(char* start, int size, Key key) {
|
||||
const auto end = start + size;
|
||||
|
||||
auto p = start;
|
||||
for (; p + sizeof(Key) < end; p += sizeof(Key)) {
|
||||
*reinterpret_cast<Key*>(p) ^= key;
|
||||
}
|
||||
|
||||
for (int i = 0; p < end; ++p || ++i) {
|
||||
*p ^= *(reinterpret_cast<const char*>(&key) + i);
|
||||
auto current = start;
|
||||
auto xorValue = reinterpret_cast<const char*>(&key);
|
||||
for (int i = 0; i < size; ++i) {
|
||||
*(current++) ^= *(xorValue + (i % sizeof(Key)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ PacketQueue::PacketPointer PacketQueue::takePacket() {
|
|||
--_currentIndex;
|
||||
}
|
||||
|
||||
return std::move(packet);
|
||||
return packet;
|
||||
}
|
||||
|
||||
unsigned int PacketQueue::nextIndex() {
|
||||
|
|
|
@ -404,8 +404,8 @@ int SendQueue::maybeSendNewPacket() {
|
|||
_socket->writeBasePacket(*pairTailPacket, _destination);
|
||||
}
|
||||
|
||||
// we attempted to send two packets, return 2
|
||||
return 2;
|
||||
// return the number of attempted packet sends
|
||||
return shouldSendPairTail ? 2 : 1;
|
||||
} else {
|
||||
// we attempted to send a single packet, return 1
|
||||
return 1;
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
#include <NetworkAccessManager.h>
|
||||
#include <OctalCode.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <ResourceManager.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <PathUtils.h>
|
||||
#include <Gzip.h>
|
||||
|
@ -1674,35 +1675,24 @@ bool Octree::readJSONFromGzippedFile(QString qFileName) {
|
|||
}
|
||||
|
||||
bool Octree::readFromURL(const QString& urlString) {
|
||||
bool readOk = false;
|
||||
auto request = std::unique_ptr<ResourceRequest>(ResourceManager::createResourceRequest(this, urlString));
|
||||
|
||||
// determine if this is a local file or a network resource
|
||||
QUrl url(urlString);
|
||||
|
||||
if (url.isLocalFile()) {
|
||||
readOk = readFromFile(qPrintable(url.toLocalFile()));
|
||||
} else {
|
||||
QNetworkRequest request;
|
||||
request.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||
request.setUrl(url);
|
||||
|
||||
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
|
||||
QNetworkReply* reply = networkAccessManager.get(request);
|
||||
|
||||
qCDebug(octree) << "Downloading svo at" << qPrintable(urlString);
|
||||
|
||||
QEventLoop loop;
|
||||
QObject::connect(reply, SIGNAL(finished()), &loop, SLOT(quit()));
|
||||
loop.exec();
|
||||
|
||||
if (reply->error() == QNetworkReply::NoError) {
|
||||
int resourceSize = reply->bytesAvailable();
|
||||
QDataStream inputStream(reply);
|
||||
readOk = readFromStream(resourceSize, inputStream);
|
||||
}
|
||||
delete reply;
|
||||
if (!request) {
|
||||
return false;
|
||||
}
|
||||
return readOk;
|
||||
|
||||
QEventLoop loop;
|
||||
connect(request.get(), &ResourceRequest::finished, &loop, &QEventLoop::quit);
|
||||
request->send();
|
||||
loop.exec();
|
||||
|
||||
if (request->getResult() != ResourceRequest::Success) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto data = request->getData();
|
||||
QDataStream inputStream(data);
|
||||
return readFromStream(data.size(), inputStream);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
|
||||
#include "PluginContainer.h"
|
||||
|
||||
void DisplayPlugin::activate() {
|
||||
Parent::activate();
|
||||
bool DisplayPlugin::activate() {
|
||||
if (isHmd() && (getHmdScreen() >= 0)) {
|
||||
_container->showDisplayPluginsTools();
|
||||
}
|
||||
return Parent::activate();
|
||||
}
|
||||
|
||||
void DisplayPlugin::deactivate() {
|
||||
|
|
|
@ -59,7 +59,7 @@ class DisplayPlugin : public Plugin {
|
|||
Q_OBJECT
|
||||
using Parent = Plugin;
|
||||
public:
|
||||
void activate() override;
|
||||
bool activate() override;
|
||||
void deactivate() override;
|
||||
virtual bool isHmd() const { return false; }
|
||||
virtual int getHmdScreen() const { return -1; }
|
||||
|
|
|
@ -38,8 +38,10 @@ public:
|
|||
virtual void deinit();
|
||||
|
||||
/// Called when a plugin is being activated for use. May be called multiple times.
|
||||
virtual void activate() {
|
||||
/// Returns true if plugin was successfully activated.
|
||||
virtual bool activate() {
|
||||
_active = true;
|
||||
return _active;
|
||||
}
|
||||
|
||||
/// Called when a plugin is no longer being used. May be called multiple times.
|
||||
|
|
|
@ -45,14 +45,14 @@ template <> void payloadRender(const MeshPartPayload::Pointer& payload, RenderAr
|
|||
}
|
||||
}
|
||||
|
||||
MeshPartPayload::MeshPartPayload(model::MeshPointer mesh, int partIndex, model::MaterialPointer material, const Transform& transform, const Transform& offsetTransform) {
|
||||
MeshPartPayload::MeshPartPayload(const std::shared_ptr<const model::Mesh>& mesh, int partIndex, model::MaterialPointer material, const Transform& transform, const Transform& offsetTransform) {
|
||||
|
||||
updateMeshPart(mesh, partIndex);
|
||||
updateMaterial(material);
|
||||
updateTransform(transform, offsetTransform);
|
||||
}
|
||||
|
||||
void MeshPartPayload::updateMeshPart(model::MeshPointer drawMesh, int partIndex) {
|
||||
void MeshPartPayload::updateMeshPart(const std::shared_ptr<const model::Mesh>& drawMesh, int partIndex) {
|
||||
_drawMesh = drawMesh;
|
||||
if (_drawMesh) {
|
||||
auto vertexFormat = _drawMesh->getVertexFormat();
|
||||
|
@ -320,7 +320,9 @@ ModelMeshPartPayload::ModelMeshPartPayload(Model* model, int _meshIndex, int par
|
|||
_model(model),
|
||||
_meshIndex(_meshIndex),
|
||||
_shapeID(shapeIndex) {
|
||||
auto& modelMesh = _model->_geometry->getMeshes().at(_meshIndex)->_mesh;
|
||||
|
||||
assert(_model && _model->isLoaded());
|
||||
auto& modelMesh = _model->getGeometry()->getGeometry()->getMeshes().at(_meshIndex);
|
||||
updateMeshPart(modelMesh, partIndex);
|
||||
|
||||
updateTransform(transform, offsetTransform);
|
||||
|
@ -328,20 +330,22 @@ ModelMeshPartPayload::ModelMeshPartPayload(Model* model, int _meshIndex, int par
|
|||
}
|
||||
|
||||
void ModelMeshPartPayload::initCache() {
|
||||
assert(_model->isLoaded());
|
||||
|
||||
if (_drawMesh) {
|
||||
auto vertexFormat = _drawMesh->getVertexFormat();
|
||||
_hasColorAttrib = vertexFormat->hasAttribute(gpu::Stream::COLOR);
|
||||
_isSkinned = vertexFormat->hasAttribute(gpu::Stream::SKIN_CLUSTER_WEIGHT) && vertexFormat->hasAttribute(gpu::Stream::SKIN_CLUSTER_INDEX);
|
||||
|
||||
|
||||
const FBXGeometry& geometry = _model->_geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = _model->getFBXGeometry();
|
||||
const FBXMesh& mesh = geometry.meshes.at(_meshIndex);
|
||||
|
||||
_isBlendShaped = !mesh.blendshapes.isEmpty();
|
||||
}
|
||||
|
||||
auto networkMaterial = _model->_geometry->getShapeMaterial(_shapeID);
|
||||
auto networkMaterial = _model->getGeometry()->getGeometry()->getShapeMaterial(_shapeID);
|
||||
if (networkMaterial) {
|
||||
_drawMaterial = networkMaterial->_material;
|
||||
_drawMaterial = networkMaterial;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -380,8 +384,9 @@ Item::Bound ModelMeshPartPayload::getBound() const {
|
|||
}
|
||||
|
||||
ShapeKey ModelMeshPartPayload::getShapeKey() const {
|
||||
const FBXGeometry& geometry = _model->_geometry->getFBXGeometry();
|
||||
const std::vector<std::unique_ptr<NetworkMesh>>& networkMeshes = _model->_geometry->getMeshes();
|
||||
assert(_model->isLoaded());
|
||||
const FBXGeometry& geometry = _model->getFBXGeometry();
|
||||
const auto& networkMeshes = _model->getGeometry()->getGeometry()->getMeshes();
|
||||
|
||||
// guard against partially loaded meshes
|
||||
if (_meshIndex >= (int)networkMeshes.size() || _meshIndex >= (int)geometry.meshes.size() || _meshIndex >= (int)_model->_meshStates.size()) {
|
||||
|
|
|
@ -24,12 +24,12 @@ class Model;
|
|||
class MeshPartPayload {
|
||||
public:
|
||||
MeshPartPayload() {}
|
||||
MeshPartPayload(model::MeshPointer mesh, int partIndex, model::MaterialPointer material, const Transform& transform, const Transform& offsetTransform);
|
||||
MeshPartPayload(const std::shared_ptr<const model::Mesh>& mesh, int partIndex, model::MaterialPointer material, const Transform& transform, const Transform& offsetTransform);
|
||||
|
||||
typedef render::Payload<MeshPartPayload> Payload;
|
||||
typedef Payload::DataPointer Pointer;
|
||||
|
||||
virtual void updateMeshPart(model::MeshPointer drawMesh, int partIndex);
|
||||
virtual void updateMeshPart(const std::shared_ptr<const model::Mesh>& drawMesh, int partIndex);
|
||||
|
||||
virtual void notifyLocationChanged() {}
|
||||
virtual void updateTransform(const Transform& transform, const Transform& offsetTransform);
|
||||
|
@ -49,11 +49,11 @@ public:
|
|||
virtual void bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, bool canCauterize = true) const;
|
||||
|
||||
// Payload resource cached values
|
||||
model::MeshPointer _drawMesh;
|
||||
std::shared_ptr<const model::Mesh> _drawMesh;
|
||||
int _partIndex = 0;
|
||||
model::Mesh::Part _drawPart;
|
||||
|
||||
model::MaterialPointer _drawMaterial;
|
||||
std::shared_ptr<const model::Material> _drawMaterial;
|
||||
|
||||
model::Box _localBound;
|
||||
Transform _drawTransform;
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
using namespace std;
|
||||
|
||||
static int nakedModelPointerTypeId = qRegisterMetaType<ModelPointer>();
|
||||
static int weakNetworkGeometryPointerTypeId = qRegisterMetaType<QWeakPointer<NetworkGeometry> >();
|
||||
static int weakNetworkGeometryPointerTypeId = qRegisterMetaType<std::weak_ptr<NetworkGeometry> >();
|
||||
static int vec3VectorTypeId = qRegisterMetaType<QVector<glm::vec3> >();
|
||||
float Model::FAKE_DIMENSION_PLACEHOLDER = -1.0f;
|
||||
#define HTTP_INVALID_COM "http://invalid.com"
|
||||
|
@ -74,12 +74,14 @@ Model::~Model() {
|
|||
|
||||
AbstractViewStateInterface* Model::_viewState = NULL;
|
||||
|
||||
bool Model::needsFixupInScene() {
|
||||
bool Model::needsFixupInScene() const {
|
||||
if (readyToAddToScene()) {
|
||||
// Once textures are loaded, fixup if they are now transparent
|
||||
if (!_needsReload && _needsUpdateTransparentTextures && _geometry->isLoadedWithTextures()) {
|
||||
if (_needsUpdateTransparentTextures && _geometry->getGeometry()->areTexturesLoaded()) {
|
||||
_needsUpdateTransparentTextures = false;
|
||||
if (_hasTransparentTextures != _geometry->hasTransparentTextures()) {
|
||||
bool hasTransparentTextures = _geometry->getGeometry()->hasTransparentTextures();
|
||||
if (_hasTransparentTextures != hasTransparentTextures) {
|
||||
_hasTransparentTextures = hasTransparentTextures;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -150,19 +152,18 @@ void Model::enqueueLocationChange() {
|
|||
}
|
||||
|
||||
void Model::initJointTransforms() {
|
||||
if (!_geometry || !_geometry->isLoaded()) {
|
||||
return;
|
||||
if (isLoaded()) {
|
||||
glm::mat4 modelOffset = glm::scale(_scale) * glm::translate(_offset);
|
||||
_rig->setModelOffset(modelOffset);
|
||||
}
|
||||
glm::mat4 modelOffset = glm::scale(_scale) * glm::translate(_offset);
|
||||
_rig->setModelOffset(modelOffset);
|
||||
}
|
||||
|
||||
void Model::init() {
|
||||
}
|
||||
|
||||
void Model::reset() {
|
||||
if (_geometry && _geometry->isLoaded()) {
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
if (isLoaded()) {
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
_rig->reset(geometry);
|
||||
}
|
||||
}
|
||||
|
@ -171,17 +172,16 @@ bool Model::updateGeometry() {
|
|||
PROFILE_RANGE(__FUNCTION__);
|
||||
bool needFullUpdate = false;
|
||||
|
||||
if (!_geometry || !_geometry->isLoaded()) {
|
||||
// geometry is not ready
|
||||
if (!isLoaded()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
_needsReload = false;
|
||||
|
||||
if (_rig->jointStatesEmpty() && _geometry->getFBXGeometry().joints.size() > 0) {
|
||||
if (_rig->jointStatesEmpty() && getFBXGeometry().joints.size() > 0) {
|
||||
initJointStates();
|
||||
|
||||
const FBXGeometry& fbxGeometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& fbxGeometry = getFBXGeometry();
|
||||
foreach (const FBXMesh& mesh, fbxGeometry.meshes) {
|
||||
MeshState state;
|
||||
state.clusterMatrices.resize(mesh.clusters.size());
|
||||
|
@ -205,7 +205,7 @@ bool Model::updateGeometry() {
|
|||
|
||||
// virtual
|
||||
void Model::initJointStates() {
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
glm::mat4 modelOffset = glm::scale(_scale) * glm::translate(_offset);
|
||||
|
||||
_rig->initJointStates(geometry, modelOffset);
|
||||
|
@ -248,7 +248,7 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
|
|||
glm::vec3 subMeshSurfaceNormal;
|
||||
int subMeshIndex = 0;
|
||||
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
||||
// If we hit the models box, then consider the submeshes...
|
||||
_mutex.lock();
|
||||
|
@ -367,7 +367,7 @@ void Model::recalculateMeshBoxes(bool pickAgainstTriangles) {
|
|||
bool calculatedMeshTrianglesNeeded = pickAgainstTriangles && !_calculatedMeshTrianglesValid;
|
||||
|
||||
if (!_calculatedMeshBoxesValid || calculatedMeshTrianglesNeeded || (!_calculatedMeshPartBoxesValid && pickAgainstTriangles) ) {
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
int numberOfMeshes = geometry.meshes.size();
|
||||
_calculatedMeshBoxes.resize(numberOfMeshes);
|
||||
_calculatedMeshTriangles.clear();
|
||||
|
@ -478,7 +478,7 @@ void Model::recalculateMeshBoxes(bool pickAgainstTriangles) {
|
|||
|
||||
void Model::renderSetup(RenderArgs* args) {
|
||||
// set up dilated textures on first render after load/simulate
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
if (_dilatedTextures.isEmpty()) {
|
||||
foreach (const FBXMesh& mesh, geometry.meshes) {
|
||||
QVector<QSharedPointer<Texture> > dilated;
|
||||
|
@ -627,7 +627,7 @@ Extents Model::getBindExtents() const {
|
|||
if (!isActive()) {
|
||||
return Extents();
|
||||
}
|
||||
const Extents& bindExtents = _geometry->getFBXGeometry().bindExtents;
|
||||
const Extents& bindExtents = getFBXGeometry().bindExtents;
|
||||
Extents scaledExtents = { bindExtents.minimum * _scale, bindExtents.maximum * _scale };
|
||||
return scaledExtents;
|
||||
}
|
||||
|
@ -636,12 +636,12 @@ Extents Model::getMeshExtents() const {
|
|||
if (!isActive()) {
|
||||
return Extents();
|
||||
}
|
||||
const Extents& extents = _geometry->getFBXGeometry().meshExtents;
|
||||
const Extents& extents = getFBXGeometry().meshExtents;
|
||||
|
||||
// even though our caller asked for "unscaled" we need to include any fst scaling, translation, and rotation, which
|
||||
// is captured in the offset matrix
|
||||
glm::vec3 minimum = glm::vec3(_geometry->getFBXGeometry().offset * glm::vec4(extents.minimum, 1.0f));
|
||||
glm::vec3 maximum = glm::vec3(_geometry->getFBXGeometry().offset * glm::vec4(extents.maximum, 1.0f));
|
||||
glm::vec3 minimum = glm::vec3(getFBXGeometry().offset * glm::vec4(extents.minimum, 1.0f));
|
||||
glm::vec3 maximum = glm::vec3(getFBXGeometry().offset * glm::vec4(extents.maximum, 1.0f));
|
||||
Extents scaledExtents = { minimum * _scale, maximum * _scale };
|
||||
return scaledExtents;
|
||||
}
|
||||
|
@ -651,12 +651,12 @@ Extents Model::getUnscaledMeshExtents() const {
|
|||
return Extents();
|
||||
}
|
||||
|
||||
const Extents& extents = _geometry->getFBXGeometry().meshExtents;
|
||||
const Extents& extents = getFBXGeometry().meshExtents;
|
||||
|
||||
// even though our caller asked for "unscaled" we need to include any fst scaling, translation, and rotation, which
|
||||
// is captured in the offset matrix
|
||||
glm::vec3 minimum = glm::vec3(_geometry->getFBXGeometry().offset * glm::vec4(extents.minimum, 1.0f));
|
||||
glm::vec3 maximum = glm::vec3(_geometry->getFBXGeometry().offset * glm::vec4(extents.maximum, 1.0f));
|
||||
glm::vec3 minimum = glm::vec3(getFBXGeometry().offset * glm::vec4(extents.minimum, 1.0f));
|
||||
glm::vec3 maximum = glm::vec3(getFBXGeometry().offset * glm::vec4(extents.maximum, 1.0f));
|
||||
Extents scaledExtents = { minimum, maximum };
|
||||
|
||||
return scaledExtents;
|
||||
|
@ -665,8 +665,8 @@ Extents Model::getUnscaledMeshExtents() const {
|
|||
Extents Model::calculateScaledOffsetExtents(const Extents& extents,
|
||||
glm::vec3 modelPosition, glm::quat modelOrientation) const {
|
||||
// we need to include any fst scaling, translation, and rotation, which is captured in the offset matrix
|
||||
glm::vec3 minimum = glm::vec3(_geometry->getFBXGeometry().offset * glm::vec4(extents.minimum, 1.0f));
|
||||
glm::vec3 maximum = glm::vec3(_geometry->getFBXGeometry().offset * glm::vec4(extents.maximum, 1.0f));
|
||||
glm::vec3 minimum = glm::vec3(getFBXGeometry().offset * glm::vec4(extents.minimum, 1.0f));
|
||||
glm::vec3 maximum = glm::vec3(getFBXGeometry().offset * glm::vec4(extents.maximum, 1.0f));
|
||||
|
||||
Extents scaledOffsetExtents = { ((minimum + _offset) * _scale),
|
||||
((maximum + _offset) * _scale) };
|
||||
|
@ -686,7 +686,7 @@ AABox Model::calculateScaledOffsetAABox(const AABox& box, glm::vec3 modelPositio
|
|||
|
||||
glm::vec3 Model::calculateScaledOffsetPoint(const glm::vec3& point) const {
|
||||
// we need to include any fst scaling, translation, and rotation, which is captured in the offset matrix
|
||||
glm::vec3 offsetPoint = glm::vec3(_geometry->getFBXGeometry().offset * glm::vec4(point, 1.0f));
|
||||
glm::vec3 offsetPoint = glm::vec3(getFBXGeometry().offset * glm::vec4(point, 1.0f));
|
||||
glm::vec3 scaledPoint = ((offsetPoint + _offset) * _scale);
|
||||
glm::vec3 rotatedPoint = _rotation * scaledPoint;
|
||||
glm::vec3 translatedPoint = rotatedPoint + _translation;
|
||||
|
@ -714,11 +714,11 @@ void Model::setJointTranslation(int index, bool valid, const glm::vec3& translat
|
|||
}
|
||||
|
||||
int Model::getParentJointIndex(int jointIndex) const {
|
||||
return (isActive() && jointIndex != -1) ? _geometry->getFBXGeometry().joints.at(jointIndex).parentIndex : -1;
|
||||
return (isActive() && jointIndex != -1) ? getFBXGeometry().joints.at(jointIndex).parentIndex : -1;
|
||||
}
|
||||
|
||||
int Model::getLastFreeJointIndex(int jointIndex) const {
|
||||
return (isActive() && jointIndex != -1) ? _geometry->getFBXGeometry().joints.at(jointIndex).freeLineage.last() : -1;
|
||||
return (isActive() && jointIndex != -1) ? getFBXGeometry().joints.at(jointIndex).freeLineage.last() : -1;
|
||||
}
|
||||
|
||||
void Model::setURL(const QUrl& url) {
|
||||
|
@ -743,29 +743,16 @@ void Model::setURL(const QUrl& url) {
|
|||
invalidCalculatedMeshBoxes();
|
||||
deleteGeometry();
|
||||
|
||||
_geometry.reset(new NetworkGeometry(url, false, QVariantHash()));
|
||||
_geometry = DependencyManager::get<ModelCache>()->getGeometry(url);
|
||||
onInvalidate();
|
||||
}
|
||||
|
||||
const QSharedPointer<NetworkGeometry> Model::getCollisionGeometry(bool delayLoad)
|
||||
{
|
||||
if (_collisionGeometry.isNull() && !_collisionUrl.isEmpty()) {
|
||||
_collisionGeometry.reset(new NetworkGeometry(_collisionUrl, delayLoad, QVariantHash()));
|
||||
}
|
||||
|
||||
if (_collisionGeometry && _collisionGeometry->isLoaded()) {
|
||||
return _collisionGeometry;
|
||||
}
|
||||
|
||||
return QSharedPointer<NetworkGeometry>();
|
||||
}
|
||||
|
||||
void Model::setCollisionModelURL(const QUrl& url) {
|
||||
if (_collisionUrl == url) {
|
||||
return;
|
||||
}
|
||||
_collisionUrl = url;
|
||||
_collisionGeometry.reset(new NetworkGeometry(url, false, QVariantHash()));
|
||||
_collisionGeometry = DependencyManager::get<ModelCache>()->getGeometry(url);
|
||||
}
|
||||
|
||||
bool Model::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position) const {
|
||||
|
@ -815,13 +802,13 @@ QStringList Model::getJointNames() const {
|
|||
Q_RETURN_ARG(QStringList, result));
|
||||
return result;
|
||||
}
|
||||
return isActive() ? _geometry->getFBXGeometry().getJointNames() : QStringList();
|
||||
return isActive() ? getFBXGeometry().getJointNames() : QStringList();
|
||||
}
|
||||
|
||||
class Blender : public QRunnable {
|
||||
public:
|
||||
|
||||
Blender(ModelPointer model, int blendNumber, const QWeakPointer<NetworkGeometry>& geometry,
|
||||
Blender(ModelPointer model, int blendNumber, const std::weak_ptr<NetworkGeometry>& geometry,
|
||||
const QVector<FBXMesh>& meshes, const QVector<float>& blendshapeCoefficients);
|
||||
|
||||
virtual void run();
|
||||
|
@ -830,12 +817,12 @@ private:
|
|||
|
||||
ModelPointer _model;
|
||||
int _blendNumber;
|
||||
QWeakPointer<NetworkGeometry> _geometry;
|
||||
std::weak_ptr<NetworkGeometry> _geometry;
|
||||
QVector<FBXMesh> _meshes;
|
||||
QVector<float> _blendshapeCoefficients;
|
||||
};
|
||||
|
||||
Blender::Blender(ModelPointer model, int blendNumber, const QWeakPointer<NetworkGeometry>& geometry,
|
||||
Blender::Blender(ModelPointer model, int blendNumber, const std::weak_ptr<NetworkGeometry>& geometry,
|
||||
const QVector<FBXMesh>& meshes, const QVector<float>& blendshapeCoefficients) :
|
||||
_model(model),
|
||||
_blendNumber(blendNumber),
|
||||
|
@ -878,7 +865,7 @@ void Blender::run() {
|
|||
// post the result to the geometry cache, which will dispatch to the model if still alive
|
||||
QMetaObject::invokeMethod(DependencyManager::get<ModelBlender>().data(), "setBlendedVertices",
|
||||
Q_ARG(ModelPointer, _model), Q_ARG(int, _blendNumber),
|
||||
Q_ARG(const QWeakPointer<NetworkGeometry>&, _geometry), Q_ARG(const QVector<glm::vec3>&, vertices),
|
||||
Q_ARG(const std::weak_ptr<NetworkGeometry>&, _geometry), Q_ARG(const QVector<glm::vec3>&, vertices),
|
||||
Q_ARG(const QVector<glm::vec3>&, normals));
|
||||
}
|
||||
|
||||
|
@ -1010,7 +997,7 @@ void Model::updateClusterMatrices(glm::vec3 modelPosition, glm::quat modelOrient
|
|||
return;
|
||||
}
|
||||
_needsUpdateClusterMatrices = false;
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
glm::mat4 zeroScale(glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
|
||||
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
|
||||
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
|
||||
|
@ -1067,41 +1054,44 @@ void Model::updateClusterMatrices(glm::vec3 modelPosition, glm::quat modelOrient
|
|||
}
|
||||
|
||||
void Model::inverseKinematics(int endIndex, glm::vec3 targetPosition, const glm::quat& targetRotation, float priority) {
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
const QVector<int>& freeLineage = geometry.joints.at(endIndex).freeLineage;
|
||||
glm::mat4 parentTransform = glm::scale(_scale) * glm::translate(_offset);
|
||||
_rig->inverseKinematics(endIndex, targetPosition, targetRotation, priority, freeLineage, parentTransform);
|
||||
}
|
||||
|
||||
bool Model::restoreJointPosition(int jointIndex, float fraction, float priority) {
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
const QVector<int>& freeLineage = geometry.joints.at(jointIndex).freeLineage;
|
||||
return _rig->restoreJointPosition(jointIndex, fraction, priority, freeLineage);
|
||||
}
|
||||
|
||||
float Model::getLimbLength(int jointIndex) const {
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
const QVector<int>& freeLineage = geometry.joints.at(jointIndex).freeLineage;
|
||||
return _rig->getLimbLength(jointIndex, freeLineage, _scale, geometry.joints);
|
||||
}
|
||||
|
||||
bool Model::maybeStartBlender() {
|
||||
const FBXGeometry& fbxGeometry = _geometry->getFBXGeometry();
|
||||
if (fbxGeometry.hasBlendedMeshes()) {
|
||||
QThreadPool::globalInstance()->start(new Blender(getThisPointer(), ++_blendNumber, _geometry,
|
||||
fbxGeometry.meshes, _blendshapeCoefficients));
|
||||
return true;
|
||||
if (isLoaded()) {
|
||||
const FBXGeometry& fbxGeometry = getFBXGeometry();
|
||||
if (fbxGeometry.hasBlendedMeshes()) {
|
||||
QThreadPool::globalInstance()->start(new Blender(getThisPointer(), ++_blendNumber, _geometry,
|
||||
fbxGeometry.meshes, _blendshapeCoefficients));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void Model::setBlendedVertices(int blendNumber, const QWeakPointer<NetworkGeometry>& geometry,
|
||||
void Model::setBlendedVertices(int blendNumber, const std::weak_ptr<NetworkGeometry>& geometry,
|
||||
const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals) {
|
||||
if (_geometry != geometry || _blendedVertexBuffers.empty() || blendNumber < _appliedBlendNumber) {
|
||||
auto geometryRef = geometry.lock();
|
||||
if (!geometryRef || _geometry != geometryRef || _blendedVertexBuffers.empty() || blendNumber < _appliedBlendNumber) {
|
||||
return;
|
||||
}
|
||||
_appliedBlendNumber = blendNumber;
|
||||
const FBXGeometry& fbxGeometry = _geometry->getFBXGeometry();
|
||||
const FBXGeometry& fbxGeometry = getFBXGeometry();
|
||||
int index = 0;
|
||||
for (int i = 0; i < fbxGeometry.meshes.size(); i++) {
|
||||
const FBXMesh& mesh = fbxGeometry.meshes.at(i);
|
||||
|
@ -1118,13 +1108,6 @@ void Model::setBlendedVertices(int blendNumber, const QWeakPointer<NetworkGeomet
|
|||
}
|
||||
}
|
||||
|
||||
void Model::setGeometry(const QSharedPointer<NetworkGeometry>& newGeometry) {
|
||||
if (_geometry == newGeometry) {
|
||||
return;
|
||||
}
|
||||
_geometry = newGeometry;
|
||||
}
|
||||
|
||||
void Model::deleteGeometry() {
|
||||
_blendedVertexBuffers.clear();
|
||||
_meshStates.clear();
|
||||
|
@ -1134,7 +1117,7 @@ void Model::deleteGeometry() {
|
|||
|
||||
AABox Model::getPartBounds(int meshIndex, int partIndex, glm::vec3 modelPosition, glm::quat modelOrientation) const {
|
||||
|
||||
if (!_geometry || !_geometry->isLoaded()) {
|
||||
if (!isLoaded()) {
|
||||
return AABox();
|
||||
}
|
||||
|
||||
|
@ -1143,10 +1126,10 @@ AABox Model::getPartBounds(int meshIndex, int partIndex, glm::vec3 modelPosition
|
|||
bool isSkinned = state.clusterMatrices.size() > 1;
|
||||
if (isSkinned) {
|
||||
// if we're skinned return the entire mesh extents because we can't know for sure our clusters don't move us
|
||||
return calculateScaledOffsetAABox(_geometry->getFBXGeometry().meshExtents, modelPosition, modelOrientation);
|
||||
return calculateScaledOffsetAABox(getFBXGeometry().meshExtents, modelPosition, modelOrientation);
|
||||
}
|
||||
}
|
||||
if (_geometry->getFBXGeometry().meshes.size() > meshIndex) {
|
||||
if (getFBXGeometry().meshes.size() > meshIndex) {
|
||||
|
||||
// FIX ME! - This is currently a hack because for some mesh parts our efforts to calculate the bounding
|
||||
// box of the mesh part fails. It seems to create boxes that are not consistent with where the
|
||||
|
@ -1160,27 +1143,28 @@ AABox Model::getPartBounds(int meshIndex, int partIndex, glm::vec3 modelPosition
|
|||
// return _calculatedMeshBoxes[meshIndex];
|
||||
//
|
||||
// If we not skinned use the bounds of the subMesh for all it's parts
|
||||
const FBXMesh& mesh = _geometry->getFBXGeometry().meshes.at(meshIndex);
|
||||
const FBXMesh& mesh = getFBXGeometry().meshes.at(meshIndex);
|
||||
return calculateScaledOffsetExtents(mesh.meshExtents, modelPosition, modelOrientation);
|
||||
}
|
||||
return AABox();
|
||||
}
|
||||
|
||||
void Model::segregateMeshGroups() {
|
||||
QSharedPointer<NetworkGeometry> networkGeometry;
|
||||
NetworkGeometry::Pointer networkGeometry;
|
||||
bool showingCollisionHull = false;
|
||||
if (_showCollisionHull && _collisionGeometry) {
|
||||
if (_collisionGeometry->isLoaded()) {
|
||||
if (isCollisionLoaded()) {
|
||||
networkGeometry = _collisionGeometry;
|
||||
showingCollisionHull = true;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
assert(isLoaded());
|
||||
networkGeometry = _geometry;
|
||||
}
|
||||
const FBXGeometry& geometry = networkGeometry->getFBXGeometry();
|
||||
const std::vector<std::unique_ptr<NetworkMesh>>& networkMeshes = networkGeometry->getMeshes();
|
||||
const FBXGeometry& geometry = networkGeometry->getGeometry()->getGeometry();
|
||||
const auto& networkMeshes = networkGeometry->getGeometry()->getMeshes();
|
||||
|
||||
// all of our mesh vectors must match in size
|
||||
auto geoMeshesSize = geometry.meshes.size();
|
||||
|
@ -1208,7 +1192,7 @@ void Model::segregateMeshGroups() {
|
|||
int shapeID = 0;
|
||||
for (int i = 0; i < (int)networkMeshes.size(); i++) {
|
||||
const FBXMesh& mesh = geometry.meshes.at(i);
|
||||
const NetworkMesh& networkMesh = *(networkMeshes.at(i).get());
|
||||
const auto& networkMesh = networkMeshes.at(i);
|
||||
|
||||
// Create the render payloads
|
||||
int totalParts = mesh.parts.size();
|
||||
|
@ -1220,7 +1204,7 @@ void Model::segregateMeshGroups() {
|
|||
_collisionHullMaterial->setMetallic(0.02f);
|
||||
_collisionHullMaterial->setRoughness(0.5f);
|
||||
}
|
||||
_renderItemsSet << std::make_shared<MeshPartPayload>(networkMesh._mesh, partIndex, _collisionHullMaterial, transform, offset);
|
||||
_renderItemsSet << std::make_shared<MeshPartPayload>(networkMesh, partIndex, _collisionHullMaterial, transform, offset);
|
||||
} else {
|
||||
_renderItemsSet << std::make_shared<ModelMeshPartPayload>(this, i, partIndex, shapeID, transform, offset);
|
||||
}
|
||||
|
@ -1285,7 +1269,7 @@ void ModelBlender::noteRequiresBlend(ModelPointer model) {
|
|||
}
|
||||
|
||||
void ModelBlender::setBlendedVertices(ModelPointer model, int blendNumber,
|
||||
const QWeakPointer<NetworkGeometry>& geometry, const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals) {
|
||||
const std::weak_ptr<NetworkGeometry>& geometry, const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals) {
|
||||
if (model) {
|
||||
model->setBlendedVertices(blendNumber, geometry, vertices, normals);
|
||||
}
|
||||
|
|
|
@ -73,14 +73,15 @@ public:
|
|||
}
|
||||
|
||||
/// Sets the URL of the model to render.
|
||||
// Should only be called from the model's rendering thread to avoid access violations of changed geometry.
|
||||
Q_INVOKABLE void setURL(const QUrl& url);
|
||||
const QUrl& getURL() const { return _url; }
|
||||
|
||||
// new Scene/Engine rendering support
|
||||
void setVisibleInScene(bool newValue, std::shared_ptr<render::Scene> scene);
|
||||
bool needsFixupInScene();
|
||||
bool readyToAddToScene(RenderArgs* renderArgs = nullptr) {
|
||||
return !_needsReload && isRenderable() && isActive() && isLoaded();
|
||||
bool needsFixupInScene() const;
|
||||
bool readyToAddToScene(RenderArgs* renderArgs = nullptr) const {
|
||||
return !_needsReload && isRenderable() && isActive();
|
||||
}
|
||||
bool initWhenReady(render::ScenePointer scene);
|
||||
bool addToScene(std::shared_ptr<render::Scene> scene,
|
||||
|
@ -92,7 +93,7 @@ public:
|
|||
bool showCollisionHull = false);
|
||||
void removeFromScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
|
||||
void renderSetup(RenderArgs* args);
|
||||
bool isRenderable() const { return !_meshStates.isEmpty() || (isActive() && _geometry->getMeshes().empty()); }
|
||||
bool isRenderable() const { return !_meshStates.isEmpty() || (isActive() && getGeometry()->getGeometry()->getMeshes().empty()); }
|
||||
|
||||
bool isVisible() const { return _isVisible; }
|
||||
|
||||
|
@ -102,11 +103,11 @@ public:
|
|||
bool maybeStartBlender();
|
||||
|
||||
/// Sets blended vertices computed in a separate thread.
|
||||
void setBlendedVertices(int blendNumber, const QWeakPointer<NetworkGeometry>& geometry,
|
||||
void setBlendedVertices(int blendNumber, const std::weak_ptr<NetworkGeometry>& geometry,
|
||||
const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals);
|
||||
|
||||
bool isLoaded() const { return _geometry && _geometry->isLoaded(); }
|
||||
bool isLoadedWithTextures() const { return _geometry && _geometry->isLoadedWithTextures(); }
|
||||
bool isLoaded() const { return _geometry && _geometry->getGeometry(); }
|
||||
bool isCollisionLoaded() const { return _collisionGeometry && _collisionGeometry->getGeometry(); }
|
||||
|
||||
void setIsWireframe(bool isWireframe) { _isWireframe = isWireframe; }
|
||||
bool isWireframe() const { return _isWireframe; }
|
||||
|
@ -123,12 +124,23 @@ public:
|
|||
virtual void updateClusterMatrices(glm::vec3 modelPosition, glm::quat modelOrientation);
|
||||
|
||||
/// Returns a reference to the shared geometry.
|
||||
const QSharedPointer<NetworkGeometry>& getGeometry() const { return _geometry; }
|
||||
const NetworkGeometry::Pointer& getGeometry() const { return _geometry; }
|
||||
/// Returns a reference to the shared collision geometry.
|
||||
const NetworkGeometry::Pointer& getCollisionGeometry() const { return _collisionGeometry; }
|
||||
|
||||
bool isActive() const { return _geometry && _geometry->isLoaded(); }
|
||||
/// Provided as a convenience, will crash if !isLoaded()
|
||||
// And so that getGeometry() isn't chained everywhere
|
||||
const FBXGeometry& getFBXGeometry() const { assert(isLoaded()); return getGeometry()->getGeometry()->getGeometry(); }
|
||||
/// Provided as a convenience, will crash if !isCollisionLoaded()
|
||||
const FBXGeometry& getCollisionFBXGeometry() const { assert(isCollisionLoaded()); return getCollisionGeometry()->getGeometry()->getGeometry(); }
|
||||
|
||||
Q_INVOKABLE void setTextureWithNameToURL(const QString& name, const QUrl& url)
|
||||
{ _geometry->setTextureWithNameToURL(name, url); }
|
||||
// Set the model to use for collisions.
|
||||
// Should only be called from the model's rendering thread to avoid access violations of changed geometry.
|
||||
Q_INVOKABLE void setCollisionModelURL(const QUrl& url);
|
||||
const QUrl& getCollisionURL() const { return _collisionUrl; }
|
||||
|
||||
|
||||
bool isActive() const { return isLoaded(); }
|
||||
|
||||
bool convexHullContains(glm::vec3 point);
|
||||
|
||||
|
@ -143,13 +155,6 @@ public:
|
|||
BoxFace& face, glm::vec3& surfaceNormal,
|
||||
QString& extraInfo, bool pickAgainstTriangles = false);
|
||||
|
||||
// Set the model to use for collisions
|
||||
Q_INVOKABLE void setCollisionModelURL(const QUrl& url);
|
||||
const QUrl& getCollisionURL() const { return _collisionUrl; }
|
||||
|
||||
/// Returns a reference to the shared collision geometry.
|
||||
const QSharedPointer<NetworkGeometry> getCollisionGeometry(bool delayLoad = false);
|
||||
|
||||
void setOffset(const glm::vec3& offset);
|
||||
const glm::vec3& getOffset() const { return _offset; }
|
||||
|
||||
|
@ -257,8 +262,7 @@ protected:
|
|||
/// \return true if joint exists
|
||||
bool getJointPosition(int jointIndex, glm::vec3& position) const;
|
||||
|
||||
QSharedPointer<NetworkGeometry> _geometry;
|
||||
void setGeometry(const QSharedPointer<NetworkGeometry>& newGeometry);
|
||||
NetworkGeometry::Pointer _geometry;
|
||||
|
||||
glm::vec3 _translation;
|
||||
glm::quat _rotation;
|
||||
|
@ -325,7 +329,7 @@ protected:
|
|||
void deleteGeometry();
|
||||
void initJointTransforms();
|
||||
|
||||
QSharedPointer<NetworkGeometry> _collisionGeometry;
|
||||
NetworkGeometry::Pointer _collisionGeometry;
|
||||
|
||||
float _pupilDilation;
|
||||
QVector<float> _blendshapeCoefficients;
|
||||
|
@ -376,8 +380,8 @@ protected:
|
|||
bool _readyWhenAdded { false };
|
||||
bool _needsReload { true };
|
||||
bool _needsUpdateClusterMatrices { true };
|
||||
bool _needsUpdateTransparentTextures { true };
|
||||
bool _hasTransparentTextures { false };
|
||||
mutable bool _needsUpdateTransparentTextures { true };
|
||||
mutable bool _hasTransparentTextures { false };
|
||||
bool _showCollisionHull { false };
|
||||
|
||||
friend class ModelMeshPartPayload;
|
||||
|
@ -385,7 +389,7 @@ protected:
|
|||
};
|
||||
|
||||
Q_DECLARE_METATYPE(ModelPointer)
|
||||
Q_DECLARE_METATYPE(QWeakPointer<NetworkGeometry>)
|
||||
Q_DECLARE_METATYPE(std::weak_ptr<NetworkGeometry>)
|
||||
|
||||
/// Handle management of pending models that need blending
|
||||
class ModelBlender : public QObject, public Dependency {
|
||||
|
@ -398,7 +402,7 @@ public:
|
|||
void noteRequiresBlend(ModelPointer model);
|
||||
|
||||
public slots:
|
||||
void setBlendedVertices(ModelPointer model, int blendNumber, const QWeakPointer<NetworkGeometry>& geometry,
|
||||
void setBlendedVertices(ModelPointer model, int blendNumber, const std::weak_ptr<NetworkGeometry>& geometry,
|
||||
const QVector<glm::vec3>& vertices, const QVector<glm::vec3>& normals);
|
||||
|
||||
private:
|
||||
|
|
|
@ -17,12 +17,14 @@
|
|||
|
||||
#include <gpu/Context.h>
|
||||
|
||||
#include "EngineStats.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
Engine::Engine() :
|
||||
_sceneContext(std::make_shared<SceneContext>()),
|
||||
_renderContext(std::make_shared<RenderContext>()) {
|
||||
addJob<EngineStats>("Stats");
|
||||
}
|
||||
|
||||
void Engine::load() {
|
||||
|
@ -57,4 +59,6 @@ void Engine::run() {
|
|||
for (auto job : _jobs) {
|
||||
job.run(_sceneContext, _renderContext);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -16,37 +16,37 @@
|
|||
|
||||
#include "Context.h"
|
||||
#include "Task.h"
|
||||
|
||||
namespace render {
|
||||
|
||||
// The render engine holds all render tasks, and is itself a render task.
|
||||
// State flows through tasks to jobs via the render and scene contexts -
|
||||
// the engine should not be known from its jobs.
|
||||
class Engine : public Task {
|
||||
public:
|
||||
Engine();
|
||||
~Engine() = default;
|
||||
// The render engine holds all render tasks, and is itself a render task.
|
||||
// State flows through tasks to jobs via the render and scene contexts -
|
||||
// the engine should not be known from its jobs.
|
||||
class Engine : public Task {
|
||||
public:
|
||||
|
||||
// Load any persisted settings, and set up the presets
|
||||
// This should be run after adding all jobs, and before building ui
|
||||
void load();
|
||||
Engine();
|
||||
~Engine() = default;
|
||||
|
||||
// Register the scene
|
||||
void registerScene(const ScenePointer& scene) { _sceneContext->_scene = scene; }
|
||||
// Load any persisted settings, and set up the presets
|
||||
// This should be run after adding all jobs, and before building ui
|
||||
void load();
|
||||
|
||||
// Push a RenderContext
|
||||
void setRenderContext(const RenderContext& renderContext) { (*_renderContext) = renderContext; }
|
||||
RenderContextPointer getRenderContext() const { return _renderContext; }
|
||||
// Register the scene
|
||||
void registerScene(const ScenePointer& scene) { _sceneContext->_scene = scene; }
|
||||
|
||||
// Render a frame
|
||||
// A frame must have a scene registered and a context set to render
|
||||
void run();
|
||||
// Push a RenderContext
|
||||
void setRenderContext(const RenderContext& renderContext) { (*_renderContext) = renderContext; }
|
||||
RenderContextPointer getRenderContext() const { return _renderContext; }
|
||||
|
||||
protected:
|
||||
SceneContextPointer _sceneContext;
|
||||
RenderContextPointer _renderContext;
|
||||
};
|
||||
using EnginePointer = std::shared_ptr<Engine>;
|
||||
// Render a frame
|
||||
// A frame must have a scene registered and a context set to render
|
||||
void run();
|
||||
|
||||
protected:
|
||||
SceneContextPointer _sceneContext;
|
||||
RenderContextPointer _renderContext;
|
||||
};
|
||||
using EnginePointer = std::shared_ptr<Engine>;
|
||||
|
||||
}
|
||||
|
||||
|
|
49
libraries/render/src/render/EngineStats.cpp
Normal file
49
libraries/render/src/render/EngineStats.cpp
Normal file
|
@ -0,0 +1,49 @@
|
|||
//
|
||||
// EngineStats.cpp
|
||||
// render/src/render
|
||||
//
|
||||
// Created by Sam Gateau on 3/27/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "EngineStats.h"
|
||||
|
||||
#include <gpu/Texture.h>
|
||||
|
||||
using namespace render;
|
||||
|
||||
void EngineStats::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
|
||||
// Tick time
|
||||
|
||||
quint64 msecsElapsed = _frameTimer.restart();
|
||||
double frequency = 1000.0 / msecsElapsed;
|
||||
|
||||
// Update the stats
|
||||
auto config = std::static_pointer_cast<Config>(renderContext->jobConfig);
|
||||
|
||||
config->bufferCPUCount = gpu::Buffer::getBufferCPUCount();
|
||||
config->bufferGPUCount = gpu::Buffer::getBufferGPUCount();
|
||||
config->bufferCPUMemoryUsage = gpu::Buffer::getBufferCPUMemoryUsage();
|
||||
config->bufferGPUMemoryUsage = gpu::Buffer::getBufferGPUMemoryUsage();
|
||||
|
||||
config->textureCPUCount = gpu::Texture::getTextureCPUCount();
|
||||
config->textureGPUCount = gpu::Texture::getTextureGPUCount();
|
||||
config->textureCPUMemoryUsage = gpu::Texture::getTextureCPUMemoryUsage();
|
||||
config->textureGPUMemoryUsage = gpu::Texture::getTextureGPUMemoryUsage();
|
||||
|
||||
gpu::ContextStats gpuStats(_gpuStats);
|
||||
renderContext->args->_context->getStats(_gpuStats);
|
||||
|
||||
config->frameDrawcallCount = _gpuStats._DSNumDrawcalls - gpuStats._DSNumDrawcalls;
|
||||
config->frameDrawcallRate = config->frameDrawcallCount * frequency;
|
||||
|
||||
config->frameTriangleCount = _gpuStats._DSNumTriangles - gpuStats._DSNumTriangles;
|
||||
config->frameTriangleRate = config->frameTriangleCount * frequency;
|
||||
|
||||
config->frameTextureCount = _gpuStats._RSNumTextureBounded - gpuStats._RSNumTextureBounded;
|
||||
config->frameTextureRate = config->frameTextureCount * frequency;
|
||||
|
||||
config->emitDirty();
|
||||
}
|
89
libraries/render/src/render/EngineStats.h
Normal file
89
libraries/render/src/render/EngineStats.h
Normal file
|
@ -0,0 +1,89 @@
|
|||
//
|
||||
// EngineStats.h
|
||||
// render/src/render
|
||||
//
|
||||
// Created by Sam Gateau on 3/27/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_render_EngineStats_h
|
||||
#define hifi_render_EngineStats_h
|
||||
|
||||
#include <gpu/Context.h>
|
||||
|
||||
#include <QElapsedTimer>
|
||||
|
||||
#include "Engine.h"
|
||||
|
||||
namespace render {
|
||||
|
||||
// A simple job collecting global stats on the Engine / Scene / GPU
|
||||
class EngineStatsConfig : public Job::Config{
|
||||
Q_OBJECT
|
||||
|
||||
Q_PROPERTY(quint32 bufferCPUCount MEMBER bufferCPUCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 bufferGPUCount MEMBER bufferGPUCount NOTIFY dirty)
|
||||
Q_PROPERTY(qint64 bufferCPUMemoryUsage MEMBER bufferCPUMemoryUsage NOTIFY dirty)
|
||||
Q_PROPERTY(qint64 bufferGPUMemoryUsage MEMBER bufferGPUMemoryUsage NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 textureCPUCount MEMBER textureCPUCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 textureGPUCount MEMBER textureGPUCount NOTIFY dirty)
|
||||
Q_PROPERTY(qint64 textureCPUMemoryUsage MEMBER textureCPUMemoryUsage NOTIFY dirty)
|
||||
Q_PROPERTY(qint64 textureGPUMemoryUsage MEMBER textureGPUMemoryUsage NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 frameDrawcallCount MEMBER frameDrawcallCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 frameDrawcallRate MEMBER frameDrawcallRate NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 frameTriangleCount MEMBER frameTriangleCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 frameTriangleRate MEMBER frameTriangleRate NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 frameTextureCount MEMBER frameTextureCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 frameTextureRate MEMBER frameTextureRate NOTIFY dirty)
|
||||
|
||||
|
||||
public:
|
||||
EngineStatsConfig() : Job::Config(true) {}
|
||||
|
||||
quint32 bufferCPUCount{ 0 };
|
||||
quint32 bufferGPUCount{ 0 };
|
||||
qint64 bufferCPUMemoryUsage{ 0 };
|
||||
qint64 bufferGPUMemoryUsage{ 0 };
|
||||
|
||||
quint32 textureCPUCount{ 0 };
|
||||
quint32 textureGPUCount{ 0 };
|
||||
qint64 textureCPUMemoryUsage{ 0 };
|
||||
qint64 textureGPUMemoryUsage{ 0 };
|
||||
|
||||
quint32 frameDrawcallCount{ 0 };
|
||||
quint32 frameDrawcallRate{ 0 };
|
||||
|
||||
quint32 frameTriangleCount{ 0 };
|
||||
quint32 frameTriangleRate{ 0 };
|
||||
|
||||
quint32 frameTextureCount{ 0 };
|
||||
quint32 frameTextureRate{ 0 };
|
||||
|
||||
void emitDirty() { emit dirty(); }
|
||||
|
||||
signals:
|
||||
void dirty();
|
||||
};
|
||||
|
||||
class EngineStats {
|
||||
gpu::ContextStats _gpuStats;
|
||||
QElapsedTimer _frameTimer;
|
||||
public:
|
||||
using Config = EngineStatsConfig;
|
||||
using JobModel = Job::Model<EngineStats, Config>;
|
||||
|
||||
EngineStats() { _frameTimer.start(); }
|
||||
|
||||
void configure(const Config& configuration) {}
|
||||
void run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext);
|
||||
};
|
||||
}
|
||||
|
||||
#endif
|
|
@ -8,35 +8,44 @@
|
|||
|
||||
#include "QmlWebWindowClass.h"
|
||||
|
||||
#include <QtCore/QUrl>
|
||||
#include <QtCore/QUrlQuery>
|
||||
#include <QtCore/QThread>
|
||||
|
||||
#include <QtQml/QQmlContext>
|
||||
|
||||
#include <QtScript/QScriptContext>
|
||||
#include <QtScript/QScriptEngine>
|
||||
|
||||
#include <QtQuick/QQuickItem>
|
||||
|
||||
#include <AbstractUriHandler.h>
|
||||
#include <AccountManager.h>
|
||||
#include <AddressManager.h>
|
||||
#include <DependencyManager.h>
|
||||
|
||||
#include "OffscreenUi.h"
|
||||
|
||||
static const char* const URL_PROPERTY = "source";
|
||||
|
||||
// Method called by Qt scripts to create a new web window in the overlay
|
||||
QScriptValue QmlWebWindowClass::constructor(QScriptContext* context, QScriptEngine* engine) {
|
||||
return QmlWindowClass::internalConstructor("QmlWebWindow.qml", context, engine,
|
||||
[&](QObject* object) { return new QmlWebWindowClass(object); });
|
||||
auto properties = parseArguments(context);
|
||||
QmlWebWindowClass* retVal { nullptr };
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->executeOnUiThread([&] {
|
||||
retVal = new QmlWebWindowClass();
|
||||
retVal->initQml(properties);
|
||||
}, true);
|
||||
Q_ASSERT(retVal);
|
||||
connect(engine, &QScriptEngine::destroyed, retVal, &QmlWindowClass::deleteLater);
|
||||
return engine->newQObject(retVal);
|
||||
}
|
||||
|
||||
QmlWebWindowClass::QmlWebWindowClass(QObject* qmlWindow) : QmlWindowClass(qmlWindow) {
|
||||
void QmlWebWindowClass::emitScriptEvent(const QVariant& scriptMessage) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "emitScriptEvent", Qt::QueuedConnection, Q_ARG(QVariant, scriptMessage));
|
||||
} else {
|
||||
emit scriptEventReceived(scriptMessage);
|
||||
}
|
||||
}
|
||||
|
||||
void QmlWebWindowClass::emitWebEvent(const QVariant& webMessage) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "emitWebEvent", Qt::QueuedConnection, Q_ARG(QVariant, webMessage));
|
||||
} else {
|
||||
emit webEventReceived(webMessage);
|
||||
}
|
||||
}
|
||||
|
||||
QString QmlWebWindowClass::getURL() const {
|
||||
QVariant result = DependencyManager::get<OffscreenUi>()->returnFromUiThread([&]()->QVariant {
|
||||
|
|
|
@ -18,14 +18,21 @@ class QmlWebWindowClass : public QmlWindowClass {
|
|||
|
||||
public:
|
||||
static QScriptValue constructor(QScriptContext* context, QScriptEngine* engine);
|
||||
QmlWebWindowClass(QObject* qmlWindow);
|
||||
|
||||
public slots:
|
||||
public slots:
|
||||
QString getURL() const;
|
||||
void setURL(const QString& url);
|
||||
|
||||
void emitScriptEvent(const QVariant& scriptMessage);
|
||||
void emitWebEvent(const QVariant& webMessage);
|
||||
|
||||
signals:
|
||||
void urlChanged();
|
||||
void scriptEventReceived(const QVariant& message);
|
||||
void webEventReceived(const QVariant& message);
|
||||
|
||||
protected:
|
||||
QString qmlSource() const override { return "QmlWebWindow.qml"; }
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -20,203 +20,113 @@
|
|||
|
||||
#include <QtWebSockets/QWebSocketServer>
|
||||
#include <QtWebSockets/QWebSocket>
|
||||
#include <QtWebChannel/QWebChannel>
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonObject>
|
||||
|
||||
#include "OffscreenUi.h"
|
||||
|
||||
QWebSocketServer* QmlWindowClass::_webChannelServer { nullptr };
|
||||
static QWebChannel webChannel;
|
||||
static const uint16_t WEB_CHANNEL_PORT = 51016;
|
||||
static std::atomic<int> nextWindowId;
|
||||
static const char* const SOURCE_PROPERTY = "source";
|
||||
static const char* const TITLE_PROPERTY = "title";
|
||||
static const char* const EVENT_BRIDGE_PROPERTY = "eventBridge";
|
||||
static const char* const WIDTH_PROPERTY = "width";
|
||||
static const char* const HEIGHT_PROPERTY = "height";
|
||||
static const char* const VISIBILE_PROPERTY = "visible";
|
||||
static const char* const TOOLWINDOW_PROPERTY = "toolWindow";
|
||||
static const uvec2 MAX_QML_WINDOW_SIZE { 1280, 720 };
|
||||
static const uvec2 MIN_QML_WINDOW_SIZE { 120, 80 };
|
||||
|
||||
void QmlScriptEventBridge::emitWebEvent(const QString& data) {
|
||||
QMetaObject::invokeMethod(this, "webEventReceived", Qt::QueuedConnection, Q_ARG(QString, data));
|
||||
}
|
||||
|
||||
void QmlScriptEventBridge::emitScriptEvent(const QString& data) {
|
||||
QMetaObject::invokeMethod(this, "scriptEventReceived", Qt::QueuedConnection,
|
||||
Q_ARG(int, _webWindow->getWindowId()), Q_ARG(QString, data));
|
||||
}
|
||||
|
||||
class QmlWebTransport : public QWebChannelAbstractTransport {
|
||||
Q_OBJECT
|
||||
public:
|
||||
QmlWebTransport(QWebSocket* webSocket) : _webSocket(webSocket) {
|
||||
// Translate from the websocket layer to the webchannel layer
|
||||
connect(webSocket, &QWebSocket::textMessageReceived, [this](const QString& message) {
|
||||
QJsonParseError error;
|
||||
QJsonDocument document = QJsonDocument::fromJson(message.toUtf8(), &error);
|
||||
if (error.error || !document.isObject()) {
|
||||
qWarning() << "Unable to parse incoming JSON message" << message;
|
||||
return;
|
||||
}
|
||||
emit messageReceived(document.object(), this);
|
||||
});
|
||||
}
|
||||
|
||||
virtual void sendMessage(const QJsonObject &message) override {
|
||||
// Translate from the webchannel layer to the websocket layer
|
||||
_webSocket->sendTextMessage(QJsonDocument(message).toJson(QJsonDocument::Compact));
|
||||
}
|
||||
|
||||
private:
|
||||
QWebSocket* const _webSocket;
|
||||
};
|
||||
|
||||
|
||||
void QmlWindowClass::setupServer() {
|
||||
if (!_webChannelServer) {
|
||||
_webChannelServer = new QWebSocketServer("EventBridge Server", QWebSocketServer::NonSecureMode);
|
||||
if (!_webChannelServer->listen(QHostAddress::LocalHost, WEB_CHANNEL_PORT)) {
|
||||
qFatal("Failed to open web socket server.");
|
||||
}
|
||||
|
||||
QObject::connect(_webChannelServer, &QWebSocketServer::newConnection, [] {
|
||||
webChannel.connectTo(new QmlWebTransport(_webChannelServer->nextPendingConnection()));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
QScriptValue QmlWindowClass::internalConstructor(const QString& qmlSource,
|
||||
QScriptContext* context, QScriptEngine* engine,
|
||||
std::function<QmlWindowClass*(QObject*)> builder)
|
||||
{
|
||||
QVariantMap QmlWindowClass::parseArguments(QScriptContext* context) {
|
||||
const auto argumentCount = context->argumentCount();
|
||||
QString url;
|
||||
QString title;
|
||||
int width = -1, height = -1;
|
||||
bool visible = true;
|
||||
bool toolWindow = false;
|
||||
QVariantMap properties;
|
||||
if (argumentCount > 1) {
|
||||
|
||||
if (!context->argument(0).isUndefined()) {
|
||||
title = context->argument(0).toString();
|
||||
properties[TITLE_PROPERTY] = context->argument(0).toString();
|
||||
}
|
||||
if (!context->argument(1).isUndefined()) {
|
||||
url = context->argument(1).toString();
|
||||
properties[SOURCE_PROPERTY] = context->argument(1).toString();
|
||||
}
|
||||
if (context->argument(2).isNumber()) {
|
||||
width = context->argument(2).toInt32();
|
||||
properties[WIDTH_PROPERTY] = context->argument(2).toInt32();
|
||||
}
|
||||
if (context->argument(3).isNumber()) {
|
||||
height = context->argument(3).toInt32();
|
||||
properties[HEIGHT_PROPERTY] = context->argument(3).toInt32();
|
||||
}
|
||||
if (context->argument(4).isBool()) {
|
||||
toolWindow = context->argument(4).toBool();
|
||||
properties[TOOLWINDOW_PROPERTY] = context->argument(4).toBool();
|
||||
}
|
||||
} else {
|
||||
auto argumentObject = context->argument(0);
|
||||
if (!argumentObject.property(TITLE_PROPERTY).isUndefined()) {
|
||||
title = argumentObject.property(TITLE_PROPERTY).toString();
|
||||
}
|
||||
if (!argumentObject.property(SOURCE_PROPERTY).isUndefined()) {
|
||||
url = argumentObject.property(SOURCE_PROPERTY).toString();
|
||||
}
|
||||
if (argumentObject.property(WIDTH_PROPERTY).isNumber()) {
|
||||
width = argumentObject.property(WIDTH_PROPERTY).toInt32();
|
||||
}
|
||||
if (argumentObject.property(HEIGHT_PROPERTY).isNumber()) {
|
||||
height = argumentObject.property(HEIGHT_PROPERTY).toInt32();
|
||||
}
|
||||
if (argumentObject.property(VISIBILE_PROPERTY).isBool()) {
|
||||
visible = argumentObject.property(VISIBILE_PROPERTY).toBool();
|
||||
}
|
||||
if (argumentObject.property(TOOLWINDOW_PROPERTY).isBool()) {
|
||||
toolWindow = argumentObject.property(TOOLWINDOW_PROPERTY).toBool();
|
||||
}
|
||||
properties = context->argument(0).toVariant().toMap();
|
||||
}
|
||||
|
||||
QString url = properties[SOURCE_PROPERTY].toString();
|
||||
if (!url.startsWith("http") && !url.startsWith("file://") && !url.startsWith("about:")) {
|
||||
url = QUrl::fromLocalFile(url).toString();
|
||||
properties[SOURCE_PROPERTY] = QUrl::fromLocalFile(url).toString();
|
||||
}
|
||||
|
||||
if (width != -1 || height != -1) {
|
||||
width = std::max(100, std::min(1280, width));
|
||||
height = std::max(100, std::min(720, height));
|
||||
}
|
||||
|
||||
QmlWindowClass* retVal{ nullptr };
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
|
||||
if (toolWindow) {
|
||||
auto toolWindow = offscreenUi->getToolWindow();
|
||||
QVariantMap properties;
|
||||
properties.insert(TITLE_PROPERTY, title);
|
||||
properties.insert(SOURCE_PROPERTY, url);
|
||||
if (width != -1 && height != -1) {
|
||||
properties.insert(WIDTH_PROPERTY, width);
|
||||
properties.insert(HEIGHT_PROPERTY, height);
|
||||
}
|
||||
|
||||
// Build the event bridge and wrapper on the main thread
|
||||
QVariant newTabVar;
|
||||
bool invokeResult = QMetaObject::invokeMethod(toolWindow, "addWebTab", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(QVariant, newTabVar),
|
||||
Q_ARG(QVariant, QVariant::fromValue(properties)));
|
||||
|
||||
QQuickItem* newTab = qvariant_cast<QQuickItem*>(newTabVar);
|
||||
if (!invokeResult || !newTab) {
|
||||
return QScriptValue();
|
||||
}
|
||||
|
||||
offscreenUi->returnFromUiThread([&] {
|
||||
setupServer();
|
||||
retVal = builder(newTab);
|
||||
retVal->_toolWindow = true;
|
||||
registerObject(url.toLower(), retVal);
|
||||
return QVariant();
|
||||
});
|
||||
} else {
|
||||
// Build the event bridge and wrapper on the main thread
|
||||
QMetaObject::invokeMethod(offscreenUi.data(), "load", Qt::BlockingQueuedConnection,
|
||||
Q_ARG(const QString&, qmlSource),
|
||||
Q_ARG(std::function<void(QQmlContext*, QObject*)>, [&](QQmlContext* context, QObject* object) {
|
||||
setupServer();
|
||||
retVal = builder(object);
|
||||
context->engine()->setObjectOwnership(retVal->_qmlWindow, QQmlEngine::CppOwnership);
|
||||
registerObject(url.toLower(), retVal);
|
||||
if (!title.isEmpty()) {
|
||||
retVal->setTitle(title);
|
||||
}
|
||||
if (width != -1 && height != -1) {
|
||||
retVal->setSize(width, height);
|
||||
}
|
||||
object->setProperty(SOURCE_PROPERTY, url);
|
||||
if (visible) {
|
||||
object->setProperty("visible", true);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
retVal->_source = url;
|
||||
connect(engine, &QScriptEngine::destroyed, retVal, &QmlWindowClass::deleteLater);
|
||||
return engine->newQObject(retVal);
|
||||
return properties;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Method called by Qt scripts to create a new web window in the overlay
|
||||
QScriptValue QmlWindowClass::constructor(QScriptContext* context, QScriptEngine* engine) {
|
||||
return internalConstructor("QmlWindow.qml", context, engine, [&](QObject* object){
|
||||
return new QmlWindowClass(object);
|
||||
});
|
||||
auto properties = parseArguments(context);
|
||||
QmlWindowClass* retVal { nullptr };
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->executeOnUiThread([&] {
|
||||
retVal = new QmlWindowClass();
|
||||
retVal->initQml(properties);
|
||||
}, true);
|
||||
Q_ASSERT(retVal);
|
||||
connect(engine, &QScriptEngine::destroyed, retVal, &QmlWindowClass::deleteLater);
|
||||
return engine->newQObject(retVal);
|
||||
}
|
||||
|
||||
QmlWindowClass::QmlWindowClass(QObject* qmlWindow)
|
||||
: _windowId(++nextWindowId), _qmlWindow(qmlWindow)
|
||||
{
|
||||
qDebug() << "Created window with ID " << _windowId;
|
||||
QmlWindowClass::QmlWindowClass() {
|
||||
|
||||
}
|
||||
|
||||
void QmlWindowClass::initQml(QVariantMap properties) {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
_toolWindow = properties.contains(TOOLWINDOW_PROPERTY) && properties[TOOLWINDOW_PROPERTY].toBool();
|
||||
_source = properties[SOURCE_PROPERTY].toString();
|
||||
|
||||
if (_toolWindow) {
|
||||
// Build the event bridge and wrapper on the main thread
|
||||
_qmlWindow = offscreenUi->getToolWindow();
|
||||
properties[EVENT_BRIDGE_PROPERTY] = QVariant::fromValue(this);
|
||||
QVariant newTabVar;
|
||||
bool invokeResult = QMetaObject::invokeMethod(_qmlWindow, "addWebTab", Qt::DirectConnection,
|
||||
Q_RETURN_ARG(QVariant, newTabVar),
|
||||
Q_ARG(QVariant, QVariant::fromValue(properties)));
|
||||
Q_ASSERT(invokeResult);
|
||||
} else {
|
||||
// Build the event bridge and wrapper on the main thread
|
||||
offscreenUi->load(qmlSource(), [&](QQmlContext* context, QObject* object) {
|
||||
_qmlWindow = object;
|
||||
_qmlWindow->setProperty("eventBridge", QVariant::fromValue(this));
|
||||
context->engine()->setObjectOwnership(this, QQmlEngine::CppOwnership);
|
||||
context->engine()->setObjectOwnership(object, QQmlEngine::CppOwnership);
|
||||
if (properties.contains(TITLE_PROPERTY)) {
|
||||
object->setProperty(TITLE_PROPERTY, properties[TITLE_PROPERTY].toString());
|
||||
}
|
||||
if (properties.contains(HEIGHT_PROPERTY) && properties.contains(WIDTH_PROPERTY)) {
|
||||
uvec2 requestedSize { properties[WIDTH_PROPERTY].toUInt(), properties[HEIGHT_PROPERTY].toUInt() };
|
||||
requestedSize = glm::clamp(requestedSize, MIN_QML_WINDOW_SIZE, MAX_QML_WINDOW_SIZE);
|
||||
asQuickItem()->setSize(QSize(requestedSize.x, requestedSize.y));
|
||||
}
|
||||
|
||||
bool visible = !properties.contains(VISIBILE_PROPERTY) || properties[VISIBILE_PROPERTY].toBool();
|
||||
object->setProperty(VISIBILE_PROPERTY, visible);
|
||||
object->setProperty(SOURCE_PROPERTY, _source);
|
||||
|
||||
// Forward messages received from QML on to the script
|
||||
connect(_qmlWindow, SIGNAL(sendToScript(QVariant)), this, SIGNAL(fromQml(const QVariant&)), Qt::QueuedConnection);
|
||||
});
|
||||
}
|
||||
Q_ASSERT(_qmlWindow);
|
||||
Q_ASSERT(dynamic_cast<const QQuickItem*>(_qmlWindow.data()));
|
||||
// Forward messages received from QML on to the script
|
||||
connect(_qmlWindow, SIGNAL(sendToScript(QVariant)), this, SIGNAL(fromQml(const QVariant&)), Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
void QmlWindowClass::sendToQml(const QVariant& message) {
|
||||
|
@ -228,14 +138,6 @@ QmlWindowClass::~QmlWindowClass() {
|
|||
close();
|
||||
}
|
||||
|
||||
void QmlWindowClass::registerObject(const QString& name, QObject* object) {
|
||||
webChannel.registerObject(name, object);
|
||||
}
|
||||
|
||||
void QmlWindowClass::deregisterObject(QObject* object) {
|
||||
webChannel.deregisterObject(object);
|
||||
}
|
||||
|
||||
QQuickItem* QmlWindowClass::asQuickItem() const {
|
||||
if (_toolWindow) {
|
||||
return DependencyManager::get<OffscreenUi>()->getToolWindow();
|
||||
|
@ -248,7 +150,6 @@ void QmlWindowClass::setVisible(bool visible) {
|
|||
if (_toolWindow) {
|
||||
// For tool window tabs we special case visibility as a function call on the tab parent
|
||||
// The tool window itself has special logic based on whether any tabs are visible
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
QMetaObject::invokeMethod(targetWindow, "showTabForUrl", Qt::QueuedConnection, Q_ARG(QVariant, _source), Q_ARG(QVariant, visible));
|
||||
} else {
|
||||
DependencyManager::get<OffscreenUi>()->executeOnUiThread([=] {
|
||||
|
@ -359,5 +260,3 @@ void QmlWindowClass::raise() {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
#include "QmlWindowClass.moc"
|
||||
|
|
|
@ -13,45 +13,22 @@
|
|||
#include <QtCore/QPointer>
|
||||
#include <QtScript/QScriptValue>
|
||||
#include <QtQuick/QQuickItem>
|
||||
#include <QtWebChannel/QWebChannelAbstractTransport>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
class QScriptEngine;
|
||||
class QScriptContext;
|
||||
class QmlWindowClass;
|
||||
class QWebSocketServer;
|
||||
class QWebSocket;
|
||||
|
||||
class QmlScriptEventBridge : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
QmlScriptEventBridge(const QmlWindowClass* webWindow) : _webWindow(webWindow) {}
|
||||
|
||||
public slots :
|
||||
void emitWebEvent(const QString& data);
|
||||
void emitScriptEvent(const QString& data);
|
||||
|
||||
signals:
|
||||
void webEventReceived(const QString& data);
|
||||
void scriptEventReceived(int windowId, const QString& data);
|
||||
|
||||
private:
|
||||
const QmlWindowClass* _webWindow { nullptr };
|
||||
QWebSocket *_socket { nullptr };
|
||||
};
|
||||
// FIXME refactor this class to be a QQuickItem derived type and eliminate the needless wrapping
|
||||
class QmlWindowClass : public QObject {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(QObject* eventBridge READ getEventBridge CONSTANT)
|
||||
Q_PROPERTY(int windowId READ getWindowId CONSTANT)
|
||||
Q_PROPERTY(glm::vec2 position READ getPosition WRITE setPosition NOTIFY positionChanged)
|
||||
Q_PROPERTY(glm::vec2 size READ getSize WRITE setSize NOTIFY sizeChanged)
|
||||
Q_PROPERTY(bool visible READ isVisible WRITE setVisible NOTIFY visibilityChanged)
|
||||
|
||||
public:
|
||||
static QScriptValue constructor(QScriptContext* context, QScriptEngine* engine);
|
||||
QmlWindowClass(QObject* qmlWindow);
|
||||
QmlWindowClass();
|
||||
~QmlWindowClass();
|
||||
|
||||
public slots:
|
||||
|
@ -69,8 +46,7 @@ public slots:
|
|||
|
||||
Q_INVOKABLE void raise();
|
||||
Q_INVOKABLE void close();
|
||||
Q_INVOKABLE int getWindowId() const { return _windowId; };
|
||||
Q_INVOKABLE QmlScriptEventBridge* getEventBridge() const { return _eventBridge; };
|
||||
Q_INVOKABLE QObject* getEventBridge() { return this; };
|
||||
|
||||
// Scripts can use this to send a message to the QML object
|
||||
void sendToQml(const QVariant& message);
|
||||
|
@ -89,21 +65,18 @@ protected slots:
|
|||
void hasClosed();
|
||||
|
||||
protected:
|
||||
static QScriptValue internalConstructor(const QString& qmlSource,
|
||||
QScriptContext* context, QScriptEngine* engine,
|
||||
std::function<QmlWindowClass*(QObject*)> function);
|
||||
static void setupServer();
|
||||
static void registerObject(const QString& name, QObject* object);
|
||||
static void deregisterObject(QObject* object);
|
||||
static QWebSocketServer* _webChannelServer;
|
||||
static QVariantMap parseArguments(QScriptContext* context);
|
||||
static QScriptValue internalConstructor(QScriptContext* context, QScriptEngine* engine,
|
||||
std::function<QmlWindowClass*(QVariantMap)> function);
|
||||
|
||||
virtual QString qmlSource() const { return "QmlWindow.qml"; }
|
||||
|
||||
virtual void initQml(QVariantMap properties);
|
||||
QQuickItem* asQuickItem() const;
|
||||
QmlScriptEventBridge* const _eventBridge { new QmlScriptEventBridge(this) };
|
||||
|
||||
// FIXME needs to be initialized in the ctor once we have support
|
||||
// for tool window panes in QML
|
||||
bool _toolWindow { false };
|
||||
const int _windowId;
|
||||
QPointer<QObject> _qmlWindow;
|
||||
QString _source;
|
||||
};
|
||||
|
|
|
@ -451,10 +451,10 @@ bool NeuronPlugin::isSupported() const {
|
|||
#endif
|
||||
}
|
||||
|
||||
void NeuronPlugin::activate() {
|
||||
#ifdef HAVE_NEURON
|
||||
bool NeuronPlugin::activate() {
|
||||
InputPlugin::activate();
|
||||
|
||||
#ifdef HAVE_NEURON
|
||||
// register with userInputMapper
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
userInputMapper->registerDevice(_inputDevice);
|
||||
|
@ -473,11 +473,15 @@ void NeuronPlugin::activate() {
|
|||
if (!_socketRef) {
|
||||
// error
|
||||
qCCritical(inputplugins) << "NeuronPlugin: error connecting to " << _serverAddress.c_str() << ":" << _serverPort << ", error = " << BRGetLastErrorMessage();
|
||||
return false;
|
||||
} else {
|
||||
qCDebug(inputplugins) << "NeuronPlugin: success connecting to " << _serverAddress.c_str() << ":" << _serverPort;
|
||||
|
||||
BRRegisterAutoSyncParmeter(_socketRef, Cmd_CombinationMode);
|
||||
return true;
|
||||
}
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ public:
|
|||
virtual const QString& getName() const override { return NAME; }
|
||||
const QString& getID() const override { return NEURON_ID_STRING; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual bool activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
||||
|
|
|
@ -99,15 +99,19 @@ void SDL2Manager::deinit() {
|
|||
#endif
|
||||
}
|
||||
|
||||
void SDL2Manager::activate() {
|
||||
bool SDL2Manager::activate() {
|
||||
InputPlugin::activate();
|
||||
|
||||
#ifdef HAVE_SDL2
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
for (auto joystick : _openJoysticks) {
|
||||
userInputMapper->registerDevice(joystick);
|
||||
emit joystickAdded(joystick.get());
|
||||
}
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
InputPlugin::activate();
|
||||
}
|
||||
|
||||
void SDL2Manager::deactivate() {
|
||||
|
|
|
@ -35,7 +35,7 @@ public:
|
|||
virtual void deinit() override;
|
||||
|
||||
/// Called when a plugin is being activated for use. May be called multiple times.
|
||||
virtual void activate() override;
|
||||
virtual bool activate() override;
|
||||
/// Called when a plugin is no longer being used. May be called multiple times.
|
||||
virtual void deactivate() override;
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ bool SixenseManager::isSupported() const {
|
|||
#endif
|
||||
}
|
||||
|
||||
void SixenseManager::activate() {
|
||||
bool SixenseManager::activate() {
|
||||
InputPlugin::activate();
|
||||
|
||||
#ifdef HAVE_SIXENSE
|
||||
|
@ -101,6 +101,9 @@ void SixenseManager::activate() {
|
|||
|
||||
loadSettings();
|
||||
_sixenseLoaded = (sixenseInit() == SIXENSE_SUCCESS);
|
||||
return _sixenseLoaded;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ public:
|
|||
virtual const QString& getName() const override { return NAME; }
|
||||
virtual const QString& getID() const override { return HYDRA_ID_STRING; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual bool activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
||||
|
|
|
@ -34,8 +34,11 @@ void OculusBaseDisplayPlugin::customizeContext() {
|
|||
Parent::customizeContext();
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::internalActivate() {
|
||||
bool OculusBaseDisplayPlugin::internalActivate() {
|
||||
_session = acquireOculusSession();
|
||||
if (!_session) {
|
||||
return false;
|
||||
}
|
||||
|
||||
_hmdDesc = ovr_GetHmdDesc(_session);
|
||||
|
||||
|
@ -65,7 +68,7 @@ void OculusBaseDisplayPlugin::internalActivate() {
|
|||
|
||||
if (!OVR_SUCCESS(ovr_ConfigureTracking(_session,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
|
||||
qWarning() << "Could not attach to sensor device";
|
||||
logWarning("Failed to attach to sensor device");
|
||||
}
|
||||
|
||||
// Parent class relies on our _session intialization, so it must come after that.
|
||||
|
@ -81,7 +84,7 @@ void OculusBaseDisplayPlugin::internalActivate() {
|
|||
// This must come after the initialization, so that the values calculated
|
||||
// above are available during the customizeContext call (when not running
|
||||
// in threaded present mode)
|
||||
Parent::internalActivate();
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::internalDeactivate() {
|
||||
|
|
|
@ -24,7 +24,7 @@ public:
|
|||
|
||||
protected:
|
||||
void customizeContext() override;
|
||||
void internalActivate() override;
|
||||
bool internalActivate() override;
|
||||
void internalDeactivate() override;
|
||||
|
||||
protected:
|
||||
|
|
|
@ -67,7 +67,7 @@ void OculusDisplayPlugin::hmdPresent() {
|
|||
ovrLayerHeader* layers = &_sceneLayer.Header;
|
||||
ovrResult result = ovr_SubmitFrame(_session, _currentRenderFrameIndex, &_viewScaleDesc, &layers, 1);
|
||||
if (!OVR_SUCCESS(result)) {
|
||||
qDebug() << result;
|
||||
logWarning("Failed to present");
|
||||
}
|
||||
}
|
||||
_sceneFbo->Increment();
|
||||
|
|
|
@ -10,16 +10,34 @@
|
|||
|
||||
#include <atomic>
|
||||
#include <QtCore/QLoggingCategory>
|
||||
|
||||
using Mutex = std::mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(oculus)
|
||||
Q_LOGGING_CATEGORY(oculus, "hifi.plugins.oculus")
|
||||
|
||||
static std::atomic<uint32_t> refCount { 0 };
|
||||
static ovrSession session { nullptr };
|
||||
|
||||
inline ovrErrorInfo getError() {
|
||||
ovrErrorInfo error;
|
||||
ovr_GetLastErrorInfo(&error);
|
||||
return error;
|
||||
}
|
||||
|
||||
void logWarning(const char* what) {
|
||||
qWarning(oculus) << what << ":" << getError().ErrorString;
|
||||
}
|
||||
|
||||
void logFatal(const char* what) {
|
||||
std::string error("[oculus] ");
|
||||
error += what;
|
||||
error += ": ";
|
||||
error += getError().ErrorString;
|
||||
qFatal(error.c_str());
|
||||
}
|
||||
|
||||
bool oculusAvailable() {
|
||||
ovrDetectResult detect = ovr_Detect(0);
|
||||
return (detect.IsOculusServiceRunning && detect.IsOculusHMDConnected);
|
||||
|
@ -37,14 +55,14 @@ ovrSession acquireOculusSession() {
|
|||
init.ConnectionTimeoutMS = 0;
|
||||
init.LogCallback = nullptr;
|
||||
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
|
||||
qCWarning(oculus) << "Failed to initialize Oculus SDK";
|
||||
logWarning("Failed to initialize Oculus SDK");
|
||||
return session;
|
||||
}
|
||||
|
||||
Q_ASSERT(0 == refCount);
|
||||
ovrGraphicsLuid luid;
|
||||
if (!OVR_SUCCESS(ovr_Create(&session, &luid))) {
|
||||
qCWarning(oculus) << "Failed to acquire Oculus session";
|
||||
logWarning("Failed to acquire Oculus session");
|
||||
return session;
|
||||
}
|
||||
}
|
||||
|
@ -105,7 +123,7 @@ void SwapFramebufferWrapper::initColor() {
|
|||
destroyColor();
|
||||
|
||||
if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(_session, GL_SRGB8_ALPHA8, size.x, size.y, &color))) {
|
||||
qFatal("Unable to create swap textures");
|
||||
logFatal("Failed to create swap textures");
|
||||
}
|
||||
|
||||
for (int i = 0; i < color->TextureCount; ++i) {
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
|
||||
#include <gl/OglplusHelpers.h>
|
||||
|
||||
void logWarning(const char* what);
|
||||
void logFatal(const char* what);
|
||||
bool oculusAvailable();
|
||||
ovrSession acquireOculusSession();
|
||||
void releaseOculusSession();
|
||||
|
|
|
@ -68,18 +68,20 @@ bool OculusLegacyDisplayPlugin::isSupported() const {
|
|||
return result;
|
||||
}
|
||||
|
||||
void OculusLegacyDisplayPlugin::internalActivate() {
|
||||
bool OculusLegacyDisplayPlugin::internalActivate() {
|
||||
Parent::internalActivate();
|
||||
|
||||
if (!(ovr_Initialize(nullptr))) {
|
||||
Q_ASSERT(false);
|
||||
qFatal("Failed to Initialize SDK");
|
||||
return false;
|
||||
}
|
||||
|
||||
_hswDismissed = false;
|
||||
_hmd = ovrHmd_Create(0);
|
||||
if (!_hmd) {
|
||||
qFatal("Failed to acquire HMD");
|
||||
return false;
|
||||
}
|
||||
|
||||
_ipd = ovrHmd_GetFloat(_hmd, OVR_KEY_IPD, _ipd);
|
||||
|
@ -107,6 +109,8 @@ void OculusLegacyDisplayPlugin::internalActivate() {
|
|||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
|
||||
qFatal("Could not attach to sensor device");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OculusLegacyDisplayPlugin::internalDeactivate() {
|
||||
|
|
|
@ -31,7 +31,7 @@ public:
|
|||
virtual float getTargetFrameRate() override;
|
||||
|
||||
protected:
|
||||
virtual void internalActivate() override;
|
||||
virtual bool internalActivate() override;
|
||||
virtual void internalDeactivate() override;
|
||||
|
||||
virtual void customizeContext() override;
|
||||
|
|
|
@ -41,14 +41,18 @@ bool OpenVrDisplayPlugin::isSupported() const {
|
|||
return !isOculusPresent() && vr::VR_IsHmdPresent();
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::internalActivate() {
|
||||
bool OpenVrDisplayPlugin::internalActivate() {
|
||||
Parent::internalActivate();
|
||||
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
if (!_system) {
|
||||
_system = acquireOpenVrSystem();
|
||||
}
|
||||
Q_ASSERT(_system);
|
||||
if (!_system) {
|
||||
qWarning() << "Failed to initialize OpenVR";
|
||||
return false;
|
||||
}
|
||||
|
||||
_system->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
|
||||
// Recommended render target size is per-eye, so double the X size for
|
||||
|
@ -86,6 +90,8 @@ void OpenVrDisplayPlugin::internalActivate() {
|
|||
} else {
|
||||
qDebug() << "OpenVR: error could not get chaperone pointer";
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::internalDeactivate() {
|
||||
|
|
|
@ -30,7 +30,7 @@ public:
|
|||
virtual void updateHeadPose(uint32_t frameIndex) override;
|
||||
|
||||
protected:
|
||||
void internalActivate() override;
|
||||
bool internalActivate() override;
|
||||
void internalDeactivate() override;
|
||||
|
||||
void hmdPresent() override;
|
||||
|
|
|
@ -53,8 +53,9 @@ bool ViveControllerManager::isSupported() const {
|
|||
return !isOculusPresent() && vr::VR_IsHmdPresent();
|
||||
}
|
||||
|
||||
void ViveControllerManager::activate() {
|
||||
bool ViveControllerManager::activate() {
|
||||
InputPlugin::activate();
|
||||
|
||||
_container->addMenu(MENU_PATH);
|
||||
_container->addMenuItem(PluginType::INPUT_PLUGIN, MENU_PATH, RENDER_CONTROLLERS,
|
||||
[this] (bool clicked) { this->setRenderControllers(clicked); },
|
||||
|
@ -122,6 +123,8 @@ void ViveControllerManager::activate() {
|
|||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
userInputMapper->registerDevice(_inputDevice);
|
||||
_registeredWithInputMapper = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void ViveControllerManager::deactivate() {
|
||||
|
|
|
@ -37,7 +37,7 @@ public:
|
|||
virtual bool isJointController() const override { return true; }
|
||||
const QString& getName() const override { return NAME; }
|
||||
|
||||
virtual void activate() override;
|
||||
virtual bool activate() override;
|
||||
virtual void deactivate() override;
|
||||
|
||||
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
||||
|
@ -111,8 +111,6 @@ private:
|
|||
std::shared_ptr<InputDevice> _inputDevice { std::make_shared<InputDevice>(_system) };
|
||||
|
||||
static const QString NAME;
|
||||
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi__ViveControllerManager
|
||||
|
|
|
@ -80,16 +80,16 @@
|
|||
print("WE DONT HAVE A SECOND HAND! RETURNING");
|
||||
return;
|
||||
}
|
||||
|
||||
var clockRotation = Entities.getEntityProperties(_this.clockBody, "rotation").rotation;
|
||||
var DEGREES_FOR_SECOND = 6;
|
||||
var myDate = new Date();
|
||||
var seconds = myDate.getSeconds();
|
||||
secondRollDegrees = -seconds * DEGREES_FOR_SECOND;
|
||||
// var localClockHandRotation = Quat.fromPitchYawRollDegrees(0, 0, secondRollDegrees);
|
||||
// var worldClockHandRotation = Quat.multiply(clockRotation, localClockHandRotation);
|
||||
// Entities.editEntity(_this.secondHand, {
|
||||
// rotation: worldClockHandRotation
|
||||
// });
|
||||
var localClockHandRotation = Quat.fromPitchYawRollDegrees(0, 0, secondRollDegrees);
|
||||
var worldClockHandRotation = Quat.multiply(clockRotation, localClockHandRotation);
|
||||
Entities.editEntity(_this.secondHand, {
|
||||
rotation: worldClockHandRotation
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
|
@ -122,12 +122,12 @@
|
|||
var seconds = date.getSeconds();
|
||||
var minutes = date.getMinutes();
|
||||
|
||||
// if (seconds === 0 && minutes === 0) {
|
||||
// _this.popCuckooOut();
|
||||
// }
|
||||
if (seconds % 30 === 0) {
|
||||
if (seconds === 0 && minutes === 0) {
|
||||
_this.popCuckooOut();
|
||||
}
|
||||
// if (seconds % 30 === 0) {
|
||||
// _this.popCuckooOut();
|
||||
// }
|
||||
|
||||
},
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
function GrowingPlant() {
|
||||
_this = this;
|
||||
_this.flowers = [];
|
||||
// _this.STARTING_FLOWER_DIMENSIONS = {x: 0.1, y: 0.001, z: 0.1}
|
||||
_this.STARTING_FLOWER_DIMENSIONS = {
|
||||
x: 0.001,
|
||||
y: 0.001,
|
||||
|
@ -34,9 +33,7 @@
|
|||
max: 1000
|
||||
};
|
||||
_this.canCreateFlower = true;
|
||||
// _this.SHADER_URL = "https://s3-us-west-1.amazonaws.com/hifi-content/eric/shaders/flower.fs";
|
||||
_this.SHADER_URL = "atp:/shaders/flower.fs";
|
||||
// _this.SHADER_URL = "file:///C:/Users/Eric/hifi/unpublishedScripts/DomainContent/Home/plant/flower.fs";
|
||||
|
||||
_this.flowerHSLColors = [{
|
||||
hue: 19 / 360,
|
||||
|
@ -77,10 +74,10 @@
|
|||
// Reduces flower overlap
|
||||
return;
|
||||
}
|
||||
var xzGrowthRate = randFloat(0.00006, 0.00016);
|
||||
var xzGrowthRate = randFloat(0.0009, 0.0026);
|
||||
var growthRate = {
|
||||
x: xzGrowthRate,
|
||||
y: randFloat(0.001, 0.003),
|
||||
y: randFloat(0.01, 0.03),
|
||||
z: xzGrowthRate
|
||||
};
|
||||
|
||||
|
@ -92,17 +89,7 @@
|
|||
},
|
||||
startingPosition: position,
|
||||
rotation: Quat.rotationBetween(Vec3.UNIT_Y, surfaceNormal),
|
||||
maxYDimension: randFloat(0.4, 1.1),
|
||||
// startingHSLColor: {
|
||||
// hue: 80 / 360,
|
||||
// saturation: 0.47,
|
||||
// light: 0.48
|
||||
// },
|
||||
// endingHSLColor: {
|
||||
// hue: 19 / 260,
|
||||
// saturation: 0.92,
|
||||
// light: 0.41
|
||||
// },
|
||||
maxYDimension: randFloat(0.8, 1.7),
|
||||
hslColor: Math.random() < 0.5 ? _this.flowerHSLColors[0] : _this.flowerHSLColors[1],
|
||||
growthRate: growthRate
|
||||
};
|
||||
|
@ -121,7 +108,7 @@
|
|||
};
|
||||
flower.id = Entities.addEntity({
|
||||
type: "Sphere",
|
||||
name: "flower",
|
||||
name: "home-sphere-flower",
|
||||
lifetime: 3600,
|
||||
position: position,
|
||||
collisionless: true,
|
||||
|
@ -136,12 +123,6 @@
|
|||
}
|
||||
flower.dimensions = Vec3.sum(flower.dimensions, flower.growthRate);
|
||||
flower.position = Vec3.sum(flower.startingPosition, Vec3.multiply(Quat.getUp(flower.rotation), flower.dimensions.y / 2));
|
||||
//As we grow we must also move ourselves in direction we grow!
|
||||
//TODO: Add this color changing back once we fix bug https://app.asana.com/0/inbox/31759584831096/96943843100173/98022172055918
|
||||
// var newHue = map(flower.dimensions.y, _this.STARTING_FLOWER_DIMENSIONS.y, flower.maxYDimension, flower.startingHSLColor.hue, flower.endingHSLColor.hue);
|
||||
// var newSaturation = map(flower.dimensions.y, _this.STARTING_FLOWER_DIMENSIONS.y, flower.maxYDimension, flower.startingHSLColor.saturation, flower.endingHSLColor.saturation);
|
||||
// var newLight = map(flower.dimensions.y, _this.STARTING_FLOWER_DIMENSIONS.y, flower.maxYDimension, flower.startingHSLColor.light, flower.endingHSLColor.light);
|
||||
// flower.userData.PrsoceduralEntity.uniforms.iHSLColor = [newHue, newSaturation, newLight];
|
||||
Entities.editEntity(flower.id, {
|
||||
dimensions: flower.dimensions,
|
||||
position: flower.position,
|
||||
|
@ -155,6 +136,12 @@
|
|||
_this.entityID = entityID;
|
||||
},
|
||||
|
||||
unload: function() {
|
||||
_this.flowers.forEach(function(flower) {
|
||||
Entities.deleteEntity(flower.id);
|
||||
});
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
// entity scripts always need to return a newly constructed object of our type
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
function WaterSpout() {
|
||||
_this = this;
|
||||
_this.waterSound = SoundCache.getSound("atp:/growingPlant/watering_can_pour.L.wav");
|
||||
_this.POUR_ANGLE_THRESHOLD = 0;
|
||||
_this.POUR_ANGLE_THRESHOLD = -0.1;
|
||||
_this.waterPouring = false;
|
||||
_this.WATER_SPOUT_NAME = "hifi-water-spout";
|
||||
_this.WATER_SPOUT_NAME = "home_box_waterSpout";
|
||||
_this.GROWABLE_ENTITIES_SEARCH_RANGE = 100;
|
||||
|
||||
};
|
||||
|
@ -37,6 +37,7 @@
|
|||
},
|
||||
|
||||
startHold: function() {
|
||||
print("EBL START HOLD")
|
||||
var entities = Entities.findEntities(_this.position, 2);
|
||||
print("EBL SEARCHING FOR SPOUT");
|
||||
entities.forEach(function(entity) {
|
||||
|
@ -62,22 +63,12 @@
|
|||
|
||||
releaseHold: function() {
|
||||
_this.stopPouring();
|
||||
var waterEffectToDelete = _this.waterEffect;
|
||||
Script.setTimeout(function() {
|
||||
Entities.deleteEntity(_this.waterEffect);
|
||||
Entities.deleteEntity(waterEffectToDelete);
|
||||
}, 2000);
|
||||
},
|
||||
|
||||
stopPouring: function() {
|
||||
Entities.editEntity(_this.waterEffect, {
|
||||
isEmitting: false
|
||||
});
|
||||
_this.waterPouring = false;
|
||||
//water no longer pouring...
|
||||
if (_this.waterInjector) {
|
||||
_this.waterInjector.stop();
|
||||
}
|
||||
Entities.callEntityMethod(_this.mostRecentIntersectedGrowableEntity, 'stopWatering');
|
||||
},
|
||||
continueEquip: function() {
|
||||
_this.continueHolding();
|
||||
},
|
||||
|
@ -91,37 +82,59 @@
|
|||
return;
|
||||
}
|
||||
// Check rotation of water can along it's z axis. If it's beyond a threshold, then start spraying water
|
||||
_this.castRay();
|
||||
var rotation = Entities.getEntityProperties(_this.entityID, "rotation").rotation;
|
||||
var pitch = Quat.safeEulerAngles(rotation).x;
|
||||
if (pitch < _this.POUR_ANGLE_THRESHOLD) {
|
||||
var forwardVec = Quat.getFront(rotation);
|
||||
if (forwardVec.y < _this.POUR_ANGLE_THRESHOLD) {
|
||||
// Water is pouring
|
||||
var spoutProps = Entities.getEntityProperties(_this.waterSpout, ["rotation", "position"]);
|
||||
_this.spoutProps= Entities.getEntityProperties(_this.waterSpout, ["rotation", "position"]);
|
||||
_this.castRay();
|
||||
if (!_this.waterPouring) {
|
||||
Entities.editEntity(_this.waterEffect, {
|
||||
isEmitting: true
|
||||
});
|
||||
_this.waterPouring = true;
|
||||
if (!_this.waterInjector) {
|
||||
_this.waterInjector = Audio.playSound(_this.waterSound, {
|
||||
position: spoutProps.position,
|
||||
loop: true
|
||||
});
|
||||
|
||||
} else {
|
||||
_this.waterInjector.restart();
|
||||
}
|
||||
_this.startPouring();
|
||||
}
|
||||
_this.waterSpoutRotation = spoutProps.rotation;
|
||||
_this.waterSpoutRotation = _this.spoutProps.rotation;
|
||||
var waterEmitOrientation = Quat.multiply(_this.waterSpoutRotation, Quat.fromPitchYawRollDegrees(0, 180, 0));
|
||||
Entities.editEntity(_this.waterEffect, {
|
||||
emitOrientation: waterEmitOrientation
|
||||
});
|
||||
} else if (pitch > _this.POUR_ANGLE_THRESHOLD && _this.waterPouring) {
|
||||
} else if (forwardVec.y > _this.POUR_ANGLE_THRESHOLD && _this.waterPouring) {
|
||||
_this.stopPouring();
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
stopPouring: function() {
|
||||
print("EBL STOP POURING")
|
||||
Entities.editEntity(_this.waterEffect, {
|
||||
isEmitting: false
|
||||
});
|
||||
_this.waterPouring = false;
|
||||
//water no longer pouring...
|
||||
if (_this.waterInjector) {
|
||||
_this.waterInjector.stop();
|
||||
}
|
||||
Entities.callEntityMethod(_this.mostRecentIntersectedGrowableEntity, 'stopWatering');
|
||||
},
|
||||
|
||||
startPouring: function() {
|
||||
print("EBL START POURING")
|
||||
Script.setTimeout(function() {
|
||||
Entities.editEntity(_this.waterEffect, {
|
||||
isEmitting: true
|
||||
});
|
||||
}, 100);
|
||||
_this.waterPouring = true;
|
||||
if (!_this.waterInjector) {
|
||||
_this.waterInjector = Audio.playSound(_this.waterSound, {
|
||||
position: _this.spoutProps.position,
|
||||
loop: true
|
||||
});
|
||||
|
||||
} else {
|
||||
_this.waterInjector.restart();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
castRay: function() {
|
||||
var spoutProps = Entities.getEntityProperties(_this.waterSpout, ["position, rotation"]);
|
||||
var direction = Quat.getFront(spoutProps.rotation)
|
||||
|
@ -227,20 +240,6 @@
|
|||
print("EBL PRELOADING WATER CAN")
|
||||
_this.entityID = entityID;
|
||||
_this.position = Entities.getEntityProperties(_this.entityID, "position").position;
|
||||
// Wait a a bit for spout to spawn for case where preload is initial spawn, then save it
|
||||
Script.setTimeout(function() {
|
||||
var entities = Entities.findEntities(_this.position, 2);
|
||||
print("EBL SEARCHING FOR SPOUT");
|
||||
entities.forEach(function(entity) {
|
||||
var name = Entities.getEntityProperties(entity, "name").name;
|
||||
if (name === _this.WATER_SPOUT_NAME) {
|
||||
print("EBL FOUND SPOUT");
|
||||
_this.waterSpout = entity;
|
||||
}
|
||||
});
|
||||
|
||||
}, 2000);
|
||||
|
||||
},
|
||||
|
||||
|
||||
|
|
|
@ -8,9 +8,11 @@
|
|||
// This entity script handles the logic for growing a plant when it has water poured on it
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
//
|
||||
var PLANT_SCRIPT_URL = Script.resolvePath("atp:/growingPlant/growingPlantEntityScript.js");
|
||||
var WATER_CAN_SCRIPT_URL = Script.resolvePath("atp:/growingPlant/waterCanEntityScript.js");
|
||||
var WATER_CAN_SCRIPT_URL = Script.resolvePath("atp:/growingPlant/waterCanEntityScript.js");
|
||||
Plant = function(spawnPosition, spawnRotation) {
|
||||
var orientation;
|
||||
if (spawnRotation !== undefined) {
|
||||
|
@ -18,10 +20,10 @@ Plant = function(spawnPosition, spawnRotation) {
|
|||
} else {
|
||||
orientation = Camera.getOrientation();
|
||||
}
|
||||
print("EBL ORIENTATION " + JSON.stringify(orientation));
|
||||
var bowlPosition = spawnPosition;
|
||||
var BOWL_MODEL_URL = "atp:/growingPlant/Flowers-Bowl.fbx";
|
||||
var BOWL_COLLISION_HULL_URL = "atp:/growingPlant/bowl.obj";
|
||||
|
||||
var bowlDimensions = {
|
||||
x: 0.518,
|
||||
y: 0.1938,
|
||||
|
@ -31,9 +33,10 @@ Plant = function(spawnPosition, spawnRotation) {
|
|||
type: "Model",
|
||||
modelURL: BOWL_MODEL_URL,
|
||||
dimensions: bowlDimensions,
|
||||
dynamic: true,
|
||||
shapeType: 'compound',
|
||||
compoundShapeURL: BOWL_COLLISION_HULL_URL,
|
||||
name: "plant bowl",
|
||||
name: "home_model_plantBowl",
|
||||
position: bowlPosition,
|
||||
userData: JSON.stringify({
|
||||
'hifiHomeKey': {
|
||||
|
@ -43,8 +46,8 @@ Plant = function(spawnPosition, spawnRotation) {
|
|||
});
|
||||
|
||||
|
||||
var PLANT_MODEL_URL = "atp:/growingPlant/Flowers-Rock.fbx";
|
||||
|
||||
var PLANT_MODEL_URL = "atp:/growingPlant/Flowers-Rock.fbx";
|
||||
var plantDimensions = {
|
||||
x: 0.52,
|
||||
y: 0.2600,
|
||||
|
@ -59,7 +62,7 @@ Plant = function(spawnPosition, spawnRotation) {
|
|||
var plant = Entities.addEntity({
|
||||
type: "Model",
|
||||
modelURL: PLANT_MODEL_URL,
|
||||
name: "hifi-growable-plant",
|
||||
name: "home_model_growablePlant",
|
||||
dimensions: plantDimensions,
|
||||
position: plantPosition,
|
||||
script: PLANT_SCRIPT_URL,
|
||||
|
@ -78,7 +81,9 @@ Plant = function(spawnPosition, spawnRotation) {
|
|||
var waterCanRotation = orientation;
|
||||
var waterCan = Entities.addEntity({
|
||||
type: "Model",
|
||||
name: "hifi-water-can-newest",
|
||||
shapeType: 'compound',
|
||||
compoundShapeURL: WATER_CAN_COLLIISION_HULL_URL,
|
||||
name: "home_model_waterCan",
|
||||
modelURL: WATER_CAN_MODEL_URL,
|
||||
script: WATER_CAN_SCRIPT_URL,
|
||||
dimensions: {
|
||||
|
@ -135,11 +140,12 @@ Plant = function(spawnPosition, spawnRotation) {
|
|||
});
|
||||
|
||||
|
||||
var waterSpoutPosition = Vec3.sum(waterCanPosition, Vec3.multiply(0.2, Quat.getFront(orientation)))
|
||||
var waterSpoutPosition = Vec3.sum(waterCanPosition, Vec3.multiply(0.21, Quat.getFront(orientation)))
|
||||
var waterSpoutRotation = Quat.multiply(waterCanRotation, Quat.fromPitchYawRollDegrees(10, 0, 0));
|
||||
var WATER_SPOUT_NAME = "home_box_waterSpout";
|
||||
var waterSpout = Entities.addEntity({
|
||||
type: "Box",
|
||||
name: "hifi-water-spout",
|
||||
name: WATER_SPOUT_NAME,
|
||||
dimensions: {
|
||||
x: 0.02,
|
||||
y: 0.02,
|
||||
|
|
|
@ -1,103 +1,44 @@
|
|||
{
|
||||
"Entities": [
|
||||
{
|
||||
"angularVelocity": {
|
||||
"x": -0.026321493089199066,
|
||||
"y": -0.028931867331266403,
|
||||
"z": 0.010236549191176891
|
||||
"collisionless": 1,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
},
|
||||
"collisionsWillMove": 1,
|
||||
"compoundShapeURL": "atp:/kineticObjects/lamp/Lamp-Bulldog-Shade.obj",
|
||||
"created": "2016-03-22T22:55:11Z",
|
||||
"created": "2016-03-24T22:17:16Z",
|
||||
"dimensions": {
|
||||
"x": 0.35158795118331909,
|
||||
"y": 0.27823561429977417,
|
||||
"z": 0.35158795118331909
|
||||
"x": 0.2776367485523224,
|
||||
"y": 0.27723021268844604,
|
||||
"z": 0.2535492241382599
|
||||
},
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -2,
|
||||
"z": 0
|
||||
},
|
||||
"id": "{747a8714-3cc5-4336-bac3-de3c8ec1494d}",
|
||||
"modelURL": "atp:/kineticObjects/lamp/Lamp-Bulldog-Shade.fbx",
|
||||
"name": "hifi-home-model-bulldog-shade",
|
||||
"id": "{c7c6108b-4c60-4735-b2bf-5559d7e30e7d}",
|
||||
"ignoreForCollisions": 1,
|
||||
"name": "hifi-home-living-room-desk-lamp-trigger",
|
||||
"parentID": "{f59b50d8-13fb-4ceb-b80a-62cd03428a7c}",
|
||||
"position": {
|
||||
"x": 0.085205078125,
|
||||
"y": 0.5194091796875,
|
||||
"z": 0
|
||||
"x": 0.0062166899442672729,
|
||||
"y": -0.1577162891626358,
|
||||
"z": -0.0018789069727063179
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.56977474689483643,
|
||||
"x": -0.19968229532241821,
|
||||
"y": 0.23452180624008179,
|
||||
"z": -0.28488737344741821
|
||||
},
|
||||
"restitution": 0.20000000298023224,
|
||||
"rotation": {
|
||||
"w": 0.98912078142166138,
|
||||
"x": -0.0035601628478616476,
|
||||
"y": -0.14705567061901093,
|
||||
"z": 0.0014477154472842813
|
||||
},
|
||||
"shapeType": "compound",
|
||||
"type": "Model",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}}",
|
||||
"velocity": {
|
||||
"x": 0.0051150284707546234,
|
||||
"y": 0.00043292529881000519,
|
||||
"z": 0.00053954275790601969
|
||||
}
|
||||
},
|
||||
{
|
||||
"angularVelocity": {
|
||||
"x": -0.0016508022090420127,
|
||||
"y": 0.00053207820747047663,
|
||||
"z": -0.0033033043146133423
|
||||
},
|
||||
"collisionsWillMove": 1,
|
||||
"compoundShapeURL": "atp:/kineticObjects/lamp/Lamp-Bulldog-Base.obj",
|
||||
"created": "2016-03-22T22:50:49Z",
|
||||
"dimensions": {
|
||||
"x": 0.26436957716941833,
|
||||
"y": 0.73171323537826538,
|
||||
"z": 0.26436954736709595
|
||||
},
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -9,
|
||||
"z": 0
|
||||
},
|
||||
"id": "{5dcc642f-34a6-44a7-8da8-275cbbbef9d6}",
|
||||
"modelURL": "atp:/kineticObjects/lamp/Lamp-Bulldog-Base.fbx",
|
||||
"name": "hifi-home-model-bulldog-base",
|
||||
"position": {
|
||||
"x": 0.08642578125,
|
||||
"y": 0.306884765625,
|
||||
"z": 0.00026702880859375
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.82169753313064575,
|
||||
"x": -0.32442298531532288,
|
||||
"y": -0.10396400094032288,
|
||||
"z": -0.41058173775672913
|
||||
"scale": 6.92822265625,
|
||||
"x": 1101.2117919921875,
|
||||
"y": 456.86856079101562,
|
||||
"z": -84.708488464355469
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.84591019153594971,
|
||||
"x": -6.7644752562046051e-05,
|
||||
"y": -0.53332537412643433,
|
||||
"z": 8.2868709796457551e-06
|
||||
"w": 0.84585332870483398,
|
||||
"x": 1.52587890625e-05,
|
||||
"y": 0.53337907791137695,
|
||||
"z": -0.0001373291015625
|
||||
},
|
||||
"shapeType": "compound",
|
||||
"type": "Model",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}}",
|
||||
"velocity": {
|
||||
"x": 0.0011470125755295157,
|
||||
"y": 0.0010520070791244507,
|
||||
"z": -0.0011547321919351816
|
||||
}
|
||||
"script": "atp:/switches/livingRoomDeskLamp.js",
|
||||
"scriptTimestamp": 1458860464095,
|
||||
"type": "Box",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true},\"grabbableKey\":{\"wantsTrigger\":true},\"home-switch\":{\"state\":\"off\"}}",
|
||||
"visible": 0
|
||||
},
|
||||
{
|
||||
"color": {
|
||||
|
@ -113,68 +54,120 @@
|
|||
"z": 4
|
||||
},
|
||||
"falloffRadius": 0.30000001192092896,
|
||||
"id": "{1f15e48e-66cd-4ecd-8d60-a391f050ec8d}",
|
||||
"id": "{47e2c415-854c-4b6a-b71e-06c760675afd}",
|
||||
"intensity": 20,
|
||||
"isSpotlight": 1,
|
||||
"name": "hifi-home-living-room-desk-lamp-spotlight",
|
||||
"parentID": "{c7c6108b-4c60-4735-b2bf-5559d7e30e7d}",
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0.727783203125
|
||||
"x": -0.10525670647621155,
|
||||
"y": -0.14917388558387756,
|
||||
"z": 0.71759903430938721
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 6.9282031059265137,
|
||||
"x": -3.4641015529632568,
|
||||
"y": -3.4641015529632568,
|
||||
"z": -2.7363183498382568
|
||||
"scale": 20.784608840942383,
|
||||
"x": 1094.28076171875,
|
||||
"y": 449.9404296875,
|
||||
"z": -91.635543823242188
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.63591969013214111,
|
||||
"x": 0.26338601112365723,
|
||||
"y": -0.27760738134384155,
|
||||
"z": -0.67016100883483887
|
||||
"w": 0.6360476016998291,
|
||||
"x": 0.26325368881225586,
|
||||
"y": -0.27753552794456482,
|
||||
"z": -0.67013490200042725
|
||||
},
|
||||
"type": "Light",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}}",
|
||||
"visible": 0
|
||||
},
|
||||
{
|
||||
"collisionsWillMove": 1,
|
||||
"compoundShapeURL": "atp:/kineticObjects/lamp/Lamp-Bulldog-Shade.obj",
|
||||
"created": "2016-03-22T22:55:11Z",
|
||||
"dimensions": {
|
||||
"x": 0.35158795118331909,
|
||||
"y": 0.27823561429977417,
|
||||
"z": 0.35158795118331909
|
||||
},
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -2,
|
||||
"z": 0
|
||||
},
|
||||
"id": "{6c59ca6f-e3f6-42c1-ae4d-1549b5bdaee1}",
|
||||
"modelURL": "atp:/kineticObjects/lamp/Lamp-Bulldog-Shade.fbx",
|
||||
"name": "hifi-home-model-bulldog-shade",
|
||||
"position": {
|
||||
"x": 0.085205078125,
|
||||
"y": 0.5194091796875,
|
||||
"z": 0
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.56977474689483643,
|
||||
"x": -0.28488737344741821,
|
||||
"y": -0.072393476963043213,
|
||||
"z": -0.28485685586929321
|
||||
},
|
||||
"restitution": 0.20000000298023224,
|
||||
"rotation": {
|
||||
"w": 0.98724901676177979,
|
||||
"x": -0.0033740350045263767,
|
||||
"y": -0.15913932025432587,
|
||||
"z": 0.0016243136487901211
|
||||
},
|
||||
"shapeType": "compound",
|
||||
"type": "Model",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}}"
|
||||
},
|
||||
{
|
||||
"collisionless": 1,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
"angularVelocity": {
|
||||
"x": 0.0087265009060502052,
|
||||
"y": 0.005478390958160162,
|
||||
"z": 0.0057442504912614822
|
||||
},
|
||||
"created": "2016-03-24T22:17:16Z",
|
||||
"collisionsWillMove": 1,
|
||||
"compoundShapeURL": "atp:/kineticObjects/lamp/Lamp-Bulldog-Base.obj",
|
||||
"created": "2016-03-22T22:50:49Z",
|
||||
"dimensions": {
|
||||
"x": 0.15776367485523224,
|
||||
"y": 0.26723021268844604,
|
||||
"z": 0.16535492241382599
|
||||
"x": 0.26436957716941833,
|
||||
"y": 0.73171323537826538,
|
||||
"z": 0.26436954736709595
|
||||
},
|
||||
"id": "{51eb700c-62ba-4e1d-813a-2c23deb02e26}",
|
||||
"ignoreForCollisions": 1,
|
||||
"name": "hifi-home-living-room-desk-lamp-trigger",
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -9,
|
||||
"z": 0
|
||||
},
|
||||
"id": "{f59b50d8-13fb-4ceb-b80a-62cd03428a7c}",
|
||||
"modelURL": "atp:/kineticObjects/lamp/Lamp-Bulldog-Base.fbx",
|
||||
"name": "hifi-home-model-bulldog-base",
|
||||
"position": {
|
||||
"x": 0.09033203125,
|
||||
"y": 0.149169921875,
|
||||
"z": 0.00484466552734375
|
||||
"x": 0.08642578125,
|
||||
"y": 0.306884765625,
|
||||
"z": 0.00026702880859375
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.35162994265556335,
|
||||
"x": -0.085482940077781677,
|
||||
"y": -0.026645049452781677,
|
||||
"z": -0.17097030580043793
|
||||
"scale": 0.82169753313064575,
|
||||
"x": -0.40962806344032288,
|
||||
"y": -0.41084876656532288,
|
||||
"z": -0.41084876656532288
|
||||
},
|
||||
"rotation": {
|
||||
"w": 1,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": -1.52587890625e-05,
|
||||
"z": -1.52587890625e-05
|
||||
"w": 0.84690523147583008,
|
||||
"x": 4.9625090468907729e-05,
|
||||
"y": -0.53174382448196411,
|
||||
"z": 4.0798266127239913e-05
|
||||
},
|
||||
"script": "atp:/switches/livingRoomDeskLamp.js",
|
||||
"scriptTimestamp": 1458860464095,
|
||||
"type": "Box",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true},\"grabbableKey\":{\"wantsTrigger\":true},\"home-switch\":{\"state\":\"on\"}}",
|
||||
"visible": 0
|
||||
"shapeType": "compound",
|
||||
"type": "Model",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}}",
|
||||
"velocity": {
|
||||
"x": -0.0026286719366908073,
|
||||
"y": 0.0016162246465682983,
|
||||
"z": 0.0042029935866594315
|
||||
}
|
||||
}
|
||||
],
|
||||
"Version": 57
|
||||
|
|
|
@ -41,12 +41,7 @@
|
|||
"y": 1.1607639789581299,
|
||||
"z": 0.87844705581665039
|
||||
},
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -5,
|
||||
"z": 0
|
||||
},
|
||||
"dynamic": 0,
|
||||
"id": "{9eec1faa-9e1a-4d76-abeb-a1b1175a44d5}",
|
||||
"modelURL": "atp:/kineticObjects/posters/Cellscience-Poster-2.fbx",
|
||||
"name": "hifi-home-poster-cell",
|
||||
|
@ -60,6 +55,35 @@
|
|||
"shapeType": "compound",
|
||||
"type": "Model",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}}"
|
||||
},
|
||||
{
|
||||
"color": {
|
||||
"blue": 255,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
},
|
||||
"name": "home_sphere_cellsciencePortkey",
|
||||
"href": "hifi://cellscience",
|
||||
"script": "atp:/switches/portkey.js",
|
||||
"visible": false,
|
||||
"dimensions": {
|
||||
"x": 0.111,
|
||||
"y": 0.111,
|
||||
"z": 0.111
|
||||
},
|
||||
"position": {
|
||||
"x": 0.0,
|
||||
"y": -0.2,
|
||||
"z": 0.0
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.25831151008605957,
|
||||
"x": -0.12915575504302979,
|
||||
"y": -0.12915575504302979,
|
||||
"z": -0.10074388980865479
|
||||
},
|
||||
"type": "Sphere",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}, \"grabbableKey\": {\"wantsTrigger\": true}}"
|
||||
}],
|
||||
"Version": 57
|
||||
}
|
|
@ -8,12 +8,7 @@
|
|||
"y": 1.2243480682373047,
|
||||
"z": 0.92656642198562622
|
||||
},
|
||||
"dynamic": 1,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": -2,
|
||||
"z": 0
|
||||
},
|
||||
"dynamic": 0,
|
||||
"name": "hifi-home-poster-playa",
|
||||
"id": "{2b5ca0a0-9115-4916-bee6-63f88d3909b1}",
|
||||
"modelURL": "atp:/kineticObjects/posters/Playa-Poster.fbx",
|
||||
|
@ -66,6 +61,35 @@
|
|||
},
|
||||
"type": "Box",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}}"
|
||||
},
|
||||
{
|
||||
"color": {
|
||||
"blue": 255,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
},
|
||||
"name": "home_sphere_playaPortkey",
|
||||
"href": "hifi://playa",
|
||||
"script": "atp:/switches/portkey.js",
|
||||
"visible": false,
|
||||
"dimensions": {
|
||||
"x": 0.111,
|
||||
"y": 0.111,
|
||||
"z": 0.111
|
||||
},
|
||||
"position": {
|
||||
"x": 0.0,
|
||||
"y": -0.2,
|
||||
"z": 0.0
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.25831151008605957,
|
||||
"x": -0.12915575504302979,
|
||||
"y": -0.12915575504302979,
|
||||
"z": -0.10074388980865479
|
||||
},
|
||||
"type": "Sphere",
|
||||
"userData": "{\"hifiHomeKey\":{\"reset\":true}, \"grabbableKey\": {\"wantsTrigger\": true}}"
|
||||
}],
|
||||
"Version": 57
|
||||
}
|
|
@ -30,6 +30,9 @@
|
|||
|
||||
var plantPath = Script.resolvePath("atp:/growingPlant/wrapper.js");
|
||||
|
||||
//EBL REMOVE ME
|
||||
var myPlant;
|
||||
|
||||
var cuckooClockPath = Script.resolvePath("atp:/cuckooClock/wrapper.js");
|
||||
|
||||
var pingPongGunPath = Script.resolvePath("atp:/pingPongGun/wrapper.js");
|
||||
|
@ -166,15 +169,15 @@
|
|||
z: 0
|
||||
});
|
||||
|
||||
// var myPlant = new Plant({
|
||||
// x: 1099.8785,
|
||||
// y: 460.3115,
|
||||
// z: -84.7736
|
||||
// }, {
|
||||
// x: 0,
|
||||
// y: 0,
|
||||
// z: 0
|
||||
// });
|
||||
myPlant = new Plant({
|
||||
x: 1099.8785,
|
||||
y: 460.3115,
|
||||
z: -84.7736
|
||||
}, {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
});
|
||||
|
||||
var pingPongGun = new HomePingPongGun({
|
||||
x: 1101.2123,
|
||||
|
@ -195,7 +198,7 @@
|
|||
z: 0
|
||||
});
|
||||
|
||||
//v2.0
|
||||
// v2.0
|
||||
// var musicBox = new MusicBox();
|
||||
// var doppelganger = new Doppelganger();
|
||||
|
||||
|
@ -271,12 +274,6 @@
|
|||
z: -73.3
|
||||
});
|
||||
|
||||
var livingRoomLampTriggerBoxName = "hifi-home-living-room-desk-lamp-trigger";
|
||||
var livingRoomLampModelName = "hifi-home-model-bulldog-base";
|
||||
Script.setTimeout(function() {
|
||||
attachChildToParent(livingRoomLampTriggerBoxName, livingRoomLampModelName, MyAvatar.position, 20);
|
||||
}, 1000);
|
||||
|
||||
},
|
||||
|
||||
setupDressingRoom: function() {
|
||||
|
@ -468,7 +465,8 @@
|
|||
},
|
||||
|
||||
unload: function() {
|
||||
// this.findAndDeleteHomeEntities();
|
||||
this.findAndDeleteHomeEntities();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
42
unpublishedScripts/DomainContent/Home/switches/portkey.js
Normal file
42
unpublishedScripts/DomainContent/Home/switches/portkey.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
//
|
||||
// portkey.js
|
||||
//
|
||||
//
|
||||
// Created by Eric Levin on 3/28/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// This entity script is designed to teleport a user to an in-world location specified in object's userdata
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
(function() {
|
||||
|
||||
Script.include("atp:/utils.js");
|
||||
var _this = this;
|
||||
|
||||
|
||||
this.startFarTrigger = function() {
|
||||
_this.teleport();
|
||||
}
|
||||
|
||||
this.startNearTrigger = function() {
|
||||
_this.teleport();
|
||||
}
|
||||
|
||||
this.startNearGrab = function() {
|
||||
_this.teleport();
|
||||
}
|
||||
|
||||
this.teleport = function() {
|
||||
Window.location = _this.portkeyLink;
|
||||
}
|
||||
|
||||
this.preload = function(entityID) {
|
||||
_this.entityID = entityID;
|
||||
_this.portkeyLink = Entities.getEntityProperties(_this.entityID, "href").href;
|
||||
}
|
||||
|
||||
});
|
Loading…
Reference in a new issue