Merge branch 'master' of https://github.com/highfidelity/hifi into gputest

This commit is contained in:
Seiji Emery 2015-08-24 14:38:26 -07:00
commit 4d9f45475e
192 changed files with 4146 additions and 2062 deletions

View file

@ -213,3 +213,12 @@ endif ()
if (ANDROID OR DESKTOP_GVR)
add_subdirectory(gvr-interface)
endif ()
if (DEFINED ENV{HIFI_MEMORY_DEBUGGING})
SET( HIFI_MEMORY_DEBUGGING true )
endif ()
if (HIFI_MEMORY_DEBUGGING)
if (UNIX)
MESSAGE("-- Memory debugging is enabled")
endif (UNIX)
endif ()

View file

@ -1,6 +1,6 @@
set(TARGET_NAME assignment-client)
setup_hifi_project(Core Gui Network Script Widgets)
setup_hifi_project(Core Gui Network Script Widgets WebSockets)
add_dependency_external_projects(glm)
find_package(GLM REQUIRED)
@ -17,4 +17,8 @@ if (UNIX)
target_link_libraries(${TARGET_NAME} ${CMAKE_DL_LIBS})
endif (UNIX)
copy_dlls_beside_windows_executable()
include_application_version()
setup_memory_debugger()
copy_dlls_beside_windows_executable()

View file

@ -24,6 +24,7 @@
#include <SoundCache.h>
#include <UUID.h>
#include <WebSocketServerClass.h>
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
#include "avatars/ScriptableAvatar.h"
@ -180,10 +181,17 @@ void Agent::run() {
// register ourselves to the script engine
_scriptEngine.registerGlobalObject("Agent", this);
if (!_payload.isEmpty()) {
_scriptEngine.setParentURL(_payload);
}
_scriptEngine.init(); // must be done before we set up the viewers
_scriptEngine.registerGlobalObject("SoundCache", DependencyManager::get<SoundCache>().data());
QScriptValue webSocketServerConstructorValue = _scriptEngine.newFunction(WebSocketServerClass::constructor);
_scriptEngine.globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
_scriptEngine.registerGlobalObject("EntityViewer", &_entityViewer);

View file

@ -129,6 +129,7 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
packetReceiver.registerListener(PacketType::CreateAssignment, this, "handleCreateAssignmentPacket");
packetReceiver.registerListener(PacketType::StopNode, this, "handleStopNodePacket");
}
void AssignmentClient::stopAssignmentClient() {
qDebug() << "Forced stop of assignment-client.";
@ -172,7 +173,6 @@ void AssignmentClient::aboutToQuit() {
qInstallMessageHandler(0);
}
void AssignmentClient::setUpStatusToMonitor() {
// send a stats packet every 1 seconds
connect(&_statsTimerACM, &QTimer::timeout, this, &AssignmentClient::sendStatusPacketToACM);
@ -217,7 +217,6 @@ void AssignmentClient::sendAssignmentRequest() {
qDebug() << "Failed to read local assignment server port from shared memory"
<< "- will send assignment request to previous assignment server socket.";
}
}
nodeList->sendAssignment(_requestAssignment);

View file

@ -12,6 +12,7 @@
#include <QCommandLineParser>
#include <QThread>
#include <ApplicationVersion.h>
#include <LogHandler.h>
#include <SharedUtil.h>
#include <HifiConfigVariantMap.h>
@ -40,6 +41,7 @@ AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
setOrganizationName("High Fidelity");
setOrganizationDomain("highfidelity.io");
setApplicationName("assignment-client");
setApplicationName(BUILD_VERSION);
// use the verbose message handler in Logging
qInstallMessageHandler(LogHandler::verboseMessageHandler);
@ -93,10 +95,8 @@ AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
Q_UNREACHABLE();
}
const QVariantMap argumentVariantMap = HifiConfigVariantMap::mergeCLParametersWithJSONConfig(arguments());
unsigned int numForks = 0;
if (parser.isSet(numChildsOption)) {
numForks = parser.value(numChildsOption).toInt();
@ -139,7 +139,6 @@ AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
assignmentPool = parser.value(poolOption);
}
QUuid walletUUID;
if (argumentVariantMap.contains(ASSIGNMENT_WALLET_DESTINATION_ID_OPTION)) {
walletUUID = argumentVariantMap.value(ASSIGNMENT_WALLET_DESTINATION_ID_OPTION).toString();

View file

@ -203,7 +203,7 @@ void AssignmentClientMonitor::checkSpares() {
void AssignmentClientMonitor::handleChildStatusPacket(QSharedPointer<NLPacket> packet) {
// read out the sender ID
QUuid senderID = QUuid::fromRfc4122(packet->read(NUM_BYTES_RFC4122_UUID));
QUuid senderID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
auto nodeList = DependencyManager::get<NodeList>();

View file

@ -90,7 +90,7 @@ int AudioMixerClientData::parseData(NLPacket& packet) {
// grab the stream identifier for this injected audio
packet.seek(sizeof(quint16));
QUuid streamIdentifier = QUuid::fromRfc4122(packet.read(NUM_BYTES_RFC4122_UUID));
QUuid streamIdentifier = QUuid::fromRfc4122(packet.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
bool isStereo;
packet.readPrimitive(&isStereo);

View file

@ -15,7 +15,7 @@
int AvatarMixerClientData::parseData(NLPacket& packet) {
// compute the offset to the data payload
return _avatar.parseDataFromBuffer(packet.read(packet.bytesLeftToRead()));
return _avatar.parseDataFromBuffer(packet.readWithoutCopy(packet.bytesLeftToRead()));
}
bool AvatarMixerClientData::checkAndSetHasReceivedFirstPackets() {

View file

@ -5,6 +5,21 @@ set(EXTERNAL_NAME LibOVR)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
# These are all provided in order to allow easier testing of both
# the legacy display plugin and the new windows only plugin on
# various versions of the SDK, all on windows
#
# 0.5 public
# URL http://static.oculus.com/sdk-downloads/ovr_sdk_win_0.5.0.1.zip
# URL_MD5 d3fc4c02db9be5ff08af4ef4c97b32f9
# 0.6 public
# URL http://static.oculus.com/sdk-downloads/0.6.0.1/Public/1435190862/ovr_sdk_win_0.6.0.1.zip
# URL_MD5 4b3ef825f9a1d6d3035c9f6820687da9
# 0.7 alpha
# URL https://s3.amazonaws.com/static.oculus.com/sdk-downloads/0.7.0.0/Public/Alpha/ovr_sdk_win_0.7.0.0_RC1.zip
# URL_MD5 a562bb9d117087b2cf9d86653ea70fd8
if (WIN32)
ExternalProject_Add(

View file

@ -1,9 +1,12 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// ApplicationVersion.h.in
// cmake/macros
//
// Created by Leonardo Murillo on 8/13/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "MainWindowOpenGLDisplayPlugin.h"
const QString BUILD_VERSION = "@BUILD_SEQ@";

View file

@ -0,0 +1,22 @@
#
# IncludeApplicationVersion.cmake
# cmake/macros
#
# Created by Leonardo Murillo on 07/14/2015.
# Copyright 2015 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(INCLUDE_APPLICATION_VERSION)
if (DEFINED ENV{JOB_ID})
set (BUILD_SEQ $ENV{JOB_ID})
elseif (DEFINED ENV{ghprbPullId})
set (BUILD_SEQ "PR: $ENV{ghprbPullId} - Commit: $ENV{ghprbActualCommit}")
else ()
set(BUILD_SEQ "dev")
endif ()
configure_file("${MACRO_DIR}/ApplicationVersion.h.in" "${PROJECT_BINARY_DIR}/includes/ApplicationVersion.h")
include_directories("${PROJECT_BINARY_DIR}/includes")
endmacro(INCLUDE_APPLICATION_VERSION)

View file

@ -0,0 +1,21 @@
#
# MemoryDebugger.cmake
#
# Copyright 2015 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(SETUP_MEMORY_DEBUGGER)
if (DEFINED ENV{HIFI_MEMORY_DEBUGGING})
SET( HIFI_MEMORY_DEBUGGING true )
endif ()
if (HIFI_MEMORY_DEBUGGING)
if (UNIX)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address -fno-omit-frame-pointer")
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libasan -static-libstdc++ -fsanitize=address")
endif (UNIX)
endif ()
endmacro(SETUP_MEMORY_DEBUGGER)

View file

@ -1,5 +1,7 @@
set(TARGET_NAME domain-server)
setup_memory_debugger()
if (UPPER_CMAKE_BUILD_TYPE MATCHES DEBUG AND NOT WIN32)
set(_SHOULD_SYMLINK_RESOURCES TRUE)
else ()
@ -31,4 +33,5 @@ include_directories(SYSTEM "${OPENSSL_INCLUDE_DIR}")
# append OpenSSL to our list of libraries to link
target_link_libraries(${TARGET_NAME} ${OPENSSL_LIBRARIES})
include_application_version()
copy_dlls_beside_windows_executable()

View file

@ -9,6 +9,7 @@
<thead>
<tr>
<th>Type</th>
<th>Version</th>
<th>UUID</th>
<th>Pool</th>
<th>Username</th>
@ -24,12 +25,13 @@
<% _.each(nodes, function(node, node_index){ %>
<tr>
<td><%- node.type %></td>
<td><%- node.version %></td>
<td><a href="stats/?uuid=<%- node.uuid %>"><%- node.uuid %></a></td>
<td><%- node.pool %></td>
<td><%- node.username %></td>
<td><%- node.public.ip %><span class='port'>:<%- node.public.port %></span></td>
<td><%- node.local.ip %><span class='port'>:<%- node.local.port %></span></td>
<td><%- ((Date.now() - node.wake_timestamp) / 1000).toLocaleString() %></td>
<td><%- node.uptime %></td>
<td><%- (typeof node.pending_credits == 'number' ? node.pending_credits.toLocaleString() : 'N/A') %></td>
<td><span class='glyphicon glyphicon-remove' data-uuid="<%- node.uuid %>"></span></td>
</tr>
@ -75,4 +77,4 @@
<!--#include file="footer.html"-->
<script src='js/underscore-min.js'></script>
<script src='js/tables.js'></script>
<!--#include file="page-end.html"-->
<!--#include file="page-end.html"-->

View file

@ -9,9 +9,9 @@ $(document).ready(function(){
json.nodes.sort(function(a, b){
if (a.type === b.type) {
if (a.wake_timestamp < b.wake_timestamp) {
if (a.uptime < b.uptime) {
return 1;
} else if (a.wake_timestamp > b.wake_timestamp) {
} else if (a.uptime > b.uptime) {
return -1;
} else {
return 0;

View file

@ -24,6 +24,7 @@
#include <QUrlQuery>
#include <AccountManager.h>
#include <ApplicationVersion.h>
#include <HifiConfigVariantMap.h>
#include <HTTPConnection.h>
#include <JSONBreakableMarshal.h>
@ -75,6 +76,7 @@ DomainServer::DomainServer(int argc, char* argv[]) :
setOrganizationName("High Fidelity");
setOrganizationDomain("highfidelity.io");
setApplicationName("domain-server");
setApplicationVersion(BUILD_VERSION);
QSettings::setDefaultFormat(QSettings::IniFormat);
// make sure we have a fresh AccountManager instance
@ -738,6 +740,7 @@ void DomainServer::processConnectRequestPacket(QSharedPointer<NLPacket> packet)
if (isAssignment) {
nodeData->setAssignmentUUID(matchingQueuedAssignment->getUUID());
nodeData->setWalletUUID(pendingAssigneeData->getWalletUUID());
nodeData->setNodeVersion(pendingAssigneeData->getNodeVersion());
// always allow assignment clients to create and destroy entities
newNode->setCanAdjustLocks(true);
@ -1168,7 +1171,8 @@ void DomainServer::processRequestAssignmentPacket(QSharedPointer<NLPacket> packe
// add the information for that deployed assignment to the hash of pending assigned nodes
PendingAssignedNodeData* pendingNodeData = new PendingAssignedNodeData(assignmentToDeploy->getUUID(),
requestAssignment.getWalletUUID());
requestAssignment.getWalletUUID(),
requestAssignment.getNodeVersion());
_pendingAssignedNodes.insert(uniqueAssignment.getUUID(), pendingNodeData);
} else {
if (requestAssignment.getType() != Assignment::AgentType
@ -1476,9 +1480,9 @@ const char JSON_KEY_PUBLIC_SOCKET[] = "public";
const char JSON_KEY_LOCAL_SOCKET[] = "local";
const char JSON_KEY_POOL[] = "pool";
const char JSON_KEY_PENDING_CREDITS[] = "pending_credits";
const char JSON_KEY_WAKE_TIMESTAMP[] = "wake_timestamp";
const char JSON_KEY_UPTIME[] = "uptime";
const char JSON_KEY_USERNAME[] = "username";
const char JSON_KEY_VERSION[] = "version";
QJsonObject DomainServer::jsonObjectForNode(const SharedNodePointer& node) {
QJsonObject nodeJson;
@ -1498,13 +1502,14 @@ QJsonObject DomainServer::jsonObjectForNode(const SharedNodePointer& node) {
nodeJson[JSON_KEY_LOCAL_SOCKET] = jsonForSocket(node->getLocalSocket());
// add the node uptime in our list
nodeJson[JSON_KEY_WAKE_TIMESTAMP] = QString::number(node->getWakeTimestamp());
nodeJson[JSON_KEY_UPTIME] = QString::number(double(QDateTime::currentMSecsSinceEpoch() - node->getWakeTimestamp()) / 1000.0);
// if the node has pool information, add it
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(node->getLinkedData());
// add the node username, if it exists
nodeJson[JSON_KEY_USERNAME] = nodeData->getUsername();
nodeJson[JSON_KEY_VERSION] = nodeData->getNodeVersion();
SharedAssignmentPointer matchingAssignment = _allAssignments.value(nodeData->getAssignmentUUID());
if (matchingAssignment) {
@ -1527,7 +1532,6 @@ QJsonObject DomainServer::jsonObjectForNode(const SharedNodePointer& node) {
}
const char ASSIGNMENT_SCRIPT_HOST_LOCATION[] = "resources/web/assignment";
QString pathForAssignmentScript(const QUuid& assignmentUUID) {
QString newPath(ASSIGNMENT_SCRIPT_HOST_LOCATION);
newPath += "/scripts/";
@ -1537,7 +1541,6 @@ QString pathForAssignmentScript(const QUuid& assignmentUUID) {
}
const QString URI_OAUTH = "/oauth";
bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url, bool skipSubHandler) {
const QString JSON_MIME_TYPE = "application/json";
@ -2024,8 +2027,6 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
}
const QString OAUTH_JSON_ACCESS_TOKEN_KEY = "access_token";
QNetworkReply* DomainServer::profileRequestGivenTokenReply(QNetworkReply* tokenReply) {
// pull the access token from the returned JSON and store it with the matching session UUID
QJsonDocument returnedJSON = QJsonDocument::fromJson(tokenReply->readAll());
@ -2042,7 +2043,6 @@ QNetworkReply* DomainServer::profileRequestGivenTokenReply(QNetworkReply* tokenR
}
const QString DS_SETTINGS_SESSIONS_GROUP = "web-sessions";
Headers DomainServer::setupCookieHeadersFromProfileReply(QNetworkReply* profileReply) {
Headers cookieHeaders;

View file

@ -50,6 +50,10 @@ public:
const NodeSet& getNodeInterestSet() const { return _nodeInterestSet; }
void setNodeInterestSet(const NodeSet& nodeInterestSet) { _nodeInterestSet = nodeInterestSet; }
void setNodeVersion(const QString& nodeVersion) { _nodeVersion = nodeVersion; }
const QString& getNodeVersion() { return _nodeVersion; }
private:
QJsonObject mergeJSONStatsFromNewObject(const QJsonObject& newObject, QJsonObject destinationObject);
@ -62,6 +66,7 @@ private:
HifiSockAddr _sendingSockAddr;
bool _isAuthenticated;
NodeSet _nodeInterestSet;
QString _nodeVersion;
};
#endif // hifi_DomainServerNodeData_h

View file

@ -11,9 +11,10 @@
#include "PendingAssignedNodeData.h"
PendingAssignedNodeData::PendingAssignedNodeData(const QUuid& assignmentUUID, const QUuid& walletUUID) :
PendingAssignedNodeData::PendingAssignedNodeData(const QUuid& assignmentUUID, const QUuid& walletUUID, const QString& nodeVersion) :
_assignmentUUID(assignmentUUID),
_walletUUID(walletUUID)
_walletUUID(walletUUID),
_nodeVersion(nodeVersion)
{
}

View file

@ -18,16 +18,20 @@
class PendingAssignedNodeData : public QObject {
Q_OBJECT
public:
PendingAssignedNodeData(const QUuid& assignmentUUID, const QUuid& walletUUID);
PendingAssignedNodeData(const QUuid& assignmentUUID, const QUuid& walletUUID, const QString& nodeVersion);
void setAssignmentUUID(const QUuid& assignmentUUID) { _assignmentUUID = assignmentUUID; }
const QUuid& getAssignmentUUID() const { return _assignmentUUID; }
void setWalletUUID(const QUuid& walletUUID) { _walletUUID = walletUUID; }
const QUuid& getWalletUUID() const { return _walletUUID; }
const QString& getNodeVersion() const { return _nodeVersion; }
private:
QUuid _assignmentUUID;
QUuid _walletUUID;
QString _nodeVersion;
};
#endif // hifi_PendingAssignedNodeData_h

View file

@ -16,6 +16,4 @@ Script.load("notifications.js");
Script.load("users.js");
Script.load("grab.js");
Script.load("directory.js");
Script.load("mouseLook.js");
Script.load("hmdControls.js");
Script.load("dialTone.js");

View file

@ -1043,17 +1043,23 @@ function getPositionToCreateEntity() {
var placementPosition = Vec3.sum(Camera.position, offset);
var cameraPosition = Camera.position;
var HALF_TREE_SCALE = 16384;
var cameraOutOfBounds = cameraPosition.x < 0 || cameraPosition.y < 0 || cameraPosition.z < 0;
var placementOutOfBounds = placementPosition.x < 0 || placementPosition.y < 0 || placementPosition.z < 0;
var cameraOutOfBounds = Math.abs(cameraPosition.x) > HALF_TREE_SCALE
|| Math.abs(cameraPosition.y) > HALF_TREE_SCALE
|| Math.abs(cameraPosition.z) > HALF_TREE_SCALE;
var placementOutOfBounds = Math.abs(placementPosition.x) > HALF_TREE_SCALE
|| Math.abs(placementPosition.y) > HALF_TREE_SCALE
|| Math.abs(placementPosition.z) > HALF_TREE_SCALE;
if (cameraOutOfBounds && placementOutOfBounds) {
return null;
}
placementPosition.x = Math.max(0, placementPosition.x);
placementPosition.y = Math.max(0, placementPosition.y);
placementPosition.z = Math.max(0, placementPosition.z);
placementPosition.x = Math.min(HALF_TREE_SCALE, Math.max(-HALF_TREE_SCALE, placementPosition.x));
placementPosition.y = Math.min(HALF_TREE_SCALE, Math.max(-HALF_TREE_SCALE, placementPosition.y));
placementPosition.z = Math.min(HALF_TREE_SCALE, Math.max(-HALF_TREE_SCALE, placementPosition.z));
return placementPosition;
}

View file

@ -8,6 +8,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var toolIconUrl = HIFI_PUBLIC_BUCKET + "images/tools/";
progressDialog = (function () {
var that = {},
@ -142,4 +144,4 @@ progressDialog = (function () {
that.cleanup = cleanup;
return that;
}());
}());

View file

@ -11,17 +11,14 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var test = function(name, func) {
test = function(name, func, timeout) {
print("Running test: " + name);
var unitTest = new UnitTest(name, func);
try {
unitTest.run();
var unitTest = new UnitTest(name, func, timeout);
unitTest.run(function(unitTest) {
print(" Success: " + unitTest.numAssertions + " assertions passed");
} catch (error) {
}, function(unitTest, error) {
print(" Failure: " + error.name + " " + error.message);
}
});
};
AssertionException = function(expected, actual, message) {
@ -36,13 +33,86 @@ UnthrownException = function(message) {
this.name = 'UnthrownException';
};
UnitTest = function(name, func) {
this.numAssertions = 0;
this.func = func;
TimeoutException = function() {
print("Creating exception");
this.message = "UnitTest timed out\n";
this.name = 'TimeoutException';
};
UnitTest.prototype.run = function() {
this.func();
SequentialUnitTester = function() {
this.tests = [];
this.testIndex = -1;
};
SequentialUnitTester.prototype.addTest = function(name, func, timeout) {
var _this = this;
this.tests.push(function() {
print("Running test: " + name);
var unitTest = new UnitTest(name, func, timeout);
unitTest.run(function(unitTest) {
print(" Success: " + unitTest.numAssertions + " assertions passed");
_this._nextTest();
}, function(unitTest, error) {
print(" Failure: " + error.name + " " + error.message);
_this._nextTest();
});
});
};
SequentialUnitTester.prototype._nextTest = function() {
this.testIndex++;
if (this.testIndex < this.tests.length) {
this.tests[this.testIndex]();
return;
}
print("Completed all UnitTests");
};
SequentialUnitTester.prototype.run = function() {
this._nextTest();
};
UnitTest = function(name, func, timeout) {
this.numAssertions = 0;
this.func = func;
this.timeout = timeout;
};
UnitTest.prototype.run = function(successCallback, failureCallback) {
var _this = this;
this.successCallback = successCallback;
this.failureCallback = failureCallback;
if (this.timeout !== undefined) {
this.timeoutTimer = Script.setTimeout(function() {
_this.failureCallback(this, new TimeoutException());
}, this.timeout);
}
try {
this.func();
if (this.timeout === undefined) {
successCallback(this);
}
} catch (exception) {
this.handleException(exception);
}
};
UnitTest.prototype.registerCallbackFunction = function(func) {
var _this = this;
return function(one, two, three, four, five, six) {
try {
func(one, two, three, four, five, six);
} catch (exception) {
_this.handleException(exception);
}
};
};
UnitTest.prototype.handleException = function(exception) {
if (this.timeout !== undefined) {
Script.clearTimeout(this.timeoutTimer);
}
this.failureCallback(this, exception);
};
UnitTest.prototype.assertNotEquals = function(expected, actual, message) {
@ -83,7 +153,7 @@ UnitTest.prototype.assertNull = function(value, message) {
UnitTest.prototype.arrayEqual = function(array1, array2, message) {
this.numAssertions++;
if (array1.length !== array2.length) {
throw new AssertionException(array1.length , array2.length , message);
throw new AssertionException(array1.length, array2.length , message);
}
for (var i = 0; i < array1.length; ++i) {
if (array1[i] !== array2[i]) {
@ -101,4 +171,11 @@ UnitTest.prototype.raises = function(func, message) {
}
throw new UnthrownException(message);
}
}
UnitTest.prototype.done = function() {
if (this.timeout !== undefined) {
Script.clearTimeout(this.timeoutTimer);
this.successCallback(this);
}
}

View file

@ -0,0 +1,82 @@
//
// basketball.js
// examples
//
// Created by Philip Rosedale on August 20, 2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var basketballURL = HIFI_PUBLIC_BUCKET + "models/content/basketball2.fbx";
var collisionSoundURL = HIFI_PUBLIC_BUCKET + "sounds/basketball/basketball.wav";
var basketball = null;
var originalPosition = null;
var hasMoved = false;
var GRAVITY = -9.8;
var DISTANCE_IN_FRONT_OF_ME = 1.0;
var START_MOVE = 0.01;
var DIAMETER = 0.30;
function makeBasketball() {
var position = Vec3.sum(MyAvatar.position,
Vec3.multiplyQbyV(MyAvatar.orientation,
{ x: 0, y: 0.0, z: -DISTANCE_IN_FRONT_OF_ME }));
var rotation = Quat.multiply(MyAvatar.orientation,
Quat.fromPitchYawRollDegrees(0, -90, 0));
basketball = Entities.addEntity({
type: "Model",
position: position,
rotation: rotation,
dimensions: { x: DIAMETER,
y: DIAMETER,
z: DIAMETER },
collisionsWillMove: true,
collisionSoundURL: collisionSoundURL,
modelURL: basketballURL,
restitution: 1.0,
linearDamping: 0.00001,
shapeType: "sphere"
});
originalPosition = position;
}
function update() {
if (!basketball) {
makeBasketball();
} else {
var newProperties = Entities.getEntityProperties(basketball);
var moved = Vec3.length(Vec3.subtract(originalPosition, newProperties.position));
if (!hasMoved && (moved > START_MOVE)) {
hasMoved = true;
Entities.editEntity(basketball, { gravity: {x: 0, y: GRAVITY, z: 0 }});
}
var MAX_DISTANCE = 10.0;
var distance = Vec3.length(Vec3.subtract(MyAvatar.position, newProperties.position));
if (distance > MAX_DISTANCE) {
deleteStuff();
}
}
}
function scriptEnding() {
deleteStuff();
}
function deleteStuff() {
if (basketball != null) {
Entities.deleteEntity(basketball);
basketball = null;
hasMoved = false;
}
}
Script.update.connect(update);
Script.scriptEnding.connect(scriptEnding);

201
examples/toys/grenade.js Normal file
View file

@ -0,0 +1,201 @@
//
// Grenade.js
// examples
//
// Created by Philip Rosedale on August 20, 2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var grenadeURL = HIFI_PUBLIC_BUCKET + "models/props/grenade/grenade.fbx";
var fuseSoundURL = HIFI_PUBLIC_BUCKET + "sounds/burningFuse.wav";
var boomSoundURL = HIFI_PUBLIC_BUCKET + "sounds/explosion.wav";
var AudioRotationOffset = Quat.fromPitchYawRollDegrees(0, -90, 0);
var audioOptions = {
volume: 0.5,
loop: true
}
var injector = null;
var fuseSound = SoundCache.getSound(fuseSoundURL, audioOptions.isStereo);
var boomSound = SoundCache.getSound(boomSoundURL, audioOptions.isStereo);
var grenade = null;
var particles = null;
var properties = null;
var originalPosition = null;
var isGrenade = false;
var isBurning = false;
var animationSettings = JSON.stringify({
running: true,
loop: true
});
var explodeAnimationSettings = JSON.stringify({
running: true,
loop: false
});
var GRAVITY = -9.8;
var TIME_TO_EXPLODE = 2500;
var DISTANCE_IN_FRONT_OF_ME = 1.0;
function makeGrenade() {
var position = Vec3.sum(MyAvatar.position,
Vec3.multiplyQbyV(MyAvatar.orientation,
{ x: 0, y: 0.0, z: -DISTANCE_IN_FRONT_OF_ME }));
var rotation = Quat.multiply(MyAvatar.orientation,
Quat.fromPitchYawRollDegrees(0, -90, 0));
grenade = Entities.addEntity({
type: "Model",
position: position,
rotation: rotation,
dimensions: { x: 0.09,
y: 0.20,
z: 0.09 },
collisionsWillMove: true,
modelURL: grenadeURL,
shapeType: "box"
});
properties = Entities.getEntityProperties(grenade);
audioOptions.position = position;
audioOptions.orientation = rotation;
originalPosition = position;
}
function update() {
if (!grenade) {
makeGrenade();
} else {
var newProperties = Entities.getEntityProperties(grenade);
if (!isBurning) {
// If moved, start fuse
var FUSE_START_MOVE = 0.01;
if (Vec3.length(Vec3.subtract(newProperties.position, originalPosition)) > FUSE_START_MOVE) {
isBurning = true;
// Create fuse particles
particles = Entities.addEntity({
type: "ParticleEffect",
animationSettings: animationSettings,
position: newProperties.position,
textures: 'https://raw.githubusercontent.com/ericrius1/SantasLair/santa/assets/smokeparticle.png',
emitRate: 100,
emitStrength: 2.0,
emitDirection: { x: 0.0, y: 1.0, z: 0.0 },
color: { red: 200, green: 0, blue: 0 },
lifespan: 10.0,
visible: true,
locked: false
});
// Start fuse sound
injector = Audio.playSound(fuseSound, audioOptions);
// Start explosion timer
Script.setTimeout(boom, TIME_TO_EXPLODE);
originalPosition = newProperties.position;
// Add gravity
Entities.editEntity(grenade, { gravity: {x: 0, y: GRAVITY, z: 0 }});
}
}
if (newProperties.type === "Model") {
if (newProperties.position != properties.position) {
audioOptions.position = newProperties.position;
}
if (newProperties.orientation != properties.orientation) {
audioOptions.orientation = newProperties.orientation;
}
properties = newProperties;
// Update sound location if playing
if (injector) {
injector.options = audioOptions;
}
if (particles) {
Entities.editEntity(particles, { position: newProperties.position });
}
} else {
grenade = null;
Script.update.disconnect(update);
Script.scriptEnding.connect(scriptEnding);
scriptEnding();
Script.stop();
}
}
}
function boom() {
injector.stop();
isBurning = false;
var audioOptions = {
position: properties.position,
volume: 0.75,
loop: false
}
Audio.playSound(boomSound, audioOptions);
Entities.addEntity({
type: "ParticleEffect",
animationSettings: explodeAnimationSettings,
position: properties.position,
textures: 'https://raw.githubusercontent.com/ericrius1/SantasLair/santa/assets/smokeparticle.png',
emitRate: 200,
emitStrength: 3.0,
emitDirection: { x: 0.0, y: 1.0, z: 0.0 },
color: { red: 255, green: 255, blue: 0 },
lifespan: 2.0,
visible: true,
lifetime: 2,
locked: false
});
var BLAST_RADIUS = 20.0;
var LIFT_DEPTH = 2.0;
var epicenter = properties.position;
epicenter.y -= LIFT_DEPTH;
blowShitUp(epicenter, BLAST_RADIUS);
deleteStuff();
}
function blowShitUp(position, radius) {
var stuff = Entities.findEntities(position, radius);
var numMoveable = 0;
var STRENGTH = 3.5;
var SPIN_RATE = 20.0;
for (var i = 0; i < stuff.length; i++) {
var properties = Entities.getEntityProperties(stuff[i]);
if (properties.collisionsWillMove) {
var diff = Vec3.subtract(properties.position, position);
var distance = Vec3.length(diff);
var velocity = Vec3.sum(properties.velocity, Vec3.multiply(STRENGTH * 1.0 / distance, Vec3.normalize(diff)));
var angularVelocity = { x: Math.random() * SPIN_RATE, y: Math.random() * SPIN_RATE, z: Math.random() * SPIN_RATE };
angularVelocity = Vec3.multiply( 1.0 / distance, angularVelocity);
Entities.editEntity(stuff[i], { velocity: velocity,
angularVelocity: angularVelocity });
}
}
}
function scriptEnding() {
deleteStuff();
}
function deleteStuff() {
if (grenade != null) {
Entities.deleteEntity(grenade);
grenade = null;
}
if (particles != null) {
Entities.deleteEntity(particles);
particles = null;
}
}
Script.update.connect(update);
Script.scriptEnding.connect(scriptEnding);

View file

@ -0,0 +1,102 @@
//
// testWebSocket.js
// examples
//
// Created by Thijs Wenker on 8/18/15
// Copyright 2015 High Fidelity, Inc.
//
// WebSocket and WebSocketServer Tests
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
Script.include("../../libraries/unitTest.js");
// We set the unit testing timeout to 1000 milliseconds by default. Please increase if the test fails due to a slow connection.
const UNITTEST_TIMEOUT = 1000;
const WEBSOCKET_PING_URL = "ws://echo.websocket.org";
// Please do not register the following domain + gTLD:
const WEBSOCKET_INVALID_URL = "ws://thisisnotavaliddomainname.invalid";
const TEST_MESSAGE = "This is a test message.";
var unitTests = new SequentialUnitTester();
unitTests.addTest("Test default WebSocket values", function(finished) {
var _this = this;
var webSocket = new WebSocket(WEBSOCKET_PING_URL);
webSocket.onmessage = this.registerCallbackFunction(function(event) {
_this.assertEquals(TEST_MESSAGE, event.data, "event.data should be '" + TEST_MESSAGE + "'");
webSocket.close();
});
webSocket.onopen = this.registerCallbackFunction(function(event) {
_this.assertEquals(webSocket.OPEN, webSocket.readyState, "readyState should be OPEN");
webSocket.send(TEST_MESSAGE);
});
webSocket.onclose = this.registerCallbackFunction(function(event) {
_this.assertEquals(webSocket.CLOSED, webSocket.readyState, "readyState should be CLOSED");
_this.done();
});
this.assertEquals(webSocket.CONNECTING, webSocket.readyState, "readyState should be CONNECTING");
this.assertEquals("blob", webSocket.binaryType, "binaryType should be 'blob'");
this.assertEquals(0, webSocket.bufferedAmount, "bufferedAmount should be 0");
this.assertEquals("", webSocket.extensions, "extensions should be an empty string by default");
this.assertEquals("", webSocket.protocol, "protocol should be an empty string by default");
this.assertEquals(WEBSOCKET_PING_URL, webSocket.url, "url should be '" + WEBSOCKET_PING_URL + "'");
}, UNITTEST_TIMEOUT);
unitTests.addTest("Test WebSocket invalid URL", function(finished) {
var _this = this;
var webSocket = new WebSocket(WEBSOCKET_INVALID_URL);
var hadError = false;
webSocket.onerror = this.registerCallbackFunction(function() {
hadError = true;
_this.done();
});
webSocket.onclose = this.registerCallbackFunction(function(event) {
_this.assertEquals(webSocket.CLOSED, webSocket.readyState, "readyState should be CLOSED");
});
this.assertEquals(webSocket.CONNECTING, webSocket.readyState, "readyState should be CONNECTING");
this.assertEquals(WEBSOCKET_INVALID_URL, webSocket.url, "url should be '" + WEBSOCKET_INVALID_URL + "'");
}, UNITTEST_TIMEOUT);
if (this.WebSocketServer === undefined) {
print("Skipping WebSocketServer tests.");
} else {
unitTests.addTest("Test WebSocketServer with three clients", function(finished) {
var _this = this;
const NUMBER_OF_CLIENTS = 3;
var connectedClients = 0;
var respondedClients = 0;
var webSocketServer = new WebSocketServer();
_this.assertEquals(true, webSocketServer.listening, "listening should be true");
webSocketServer.newConnection.connect(this.registerCallbackFunction(function(newClient) {
connectedClients++;
newClient.onmessage = _this.registerCallbackFunction(function(event) {
var data = JSON.parse(event.data);
_this.assertEquals(TEST_MESSAGE, data.message, "data.message should be '" + TEST_MESSAGE + "'");
respondedClients++;
if (respondedClients === NUMBER_OF_CLIENTS) {
webSocketServer.close();
_this.assertEquals(false, webSocketServer.listening, "listening should be false");
_this.done();
}
});
newClient.send(JSON.stringify({message: TEST_MESSAGE, client: connectedClients}));
}));
var newSocket1 = new WebSocket(webSocketServer.url);
newSocket1.onmessage = this.registerCallbackFunction(function(event) {
newSocket1.send(event.data);
});
var newSocket2 = new WebSocket(webSocketServer.url);
newSocket2.onmessage = this.registerCallbackFunction(function(event) {
newSocket2.send(event.data);
});
var newSocket3 = new WebSocket(webSocketServer.url);
newSocket3.onmessage = this.registerCallbackFunction(function(event) {
newSocket3.send(event.data);
});
}, UNITTEST_TIMEOUT);
}
unitTests.run();

View file

@ -183,6 +183,24 @@ var CHECK_MARK_COLOR = {
this.onValueChanged(resetValue);
};
Slider.prototype.setMinValue = function(minValue) {
var currentValue = this.getValue();
this.minValue = minValue;
this.setValue(currentValue);
};
Slider.prototype.getMinValue = function() {
return this.minValue;
};
Slider.prototype.setMaxValue = function(maxValue) {
var currentValue = this.getValue();
this.maxValue = maxValue;
this.setValue(currentValue);
};
Slider.prototype.getMaxValue = function() {
return this.maxValue;
};
Slider.prototype.onValueChanged = function(value) {};
Slider.prototype.getHeight = function() {
@ -1396,6 +1414,14 @@ var CHECK_MARK_COLOR = {
return null;
};
Panel.prototype.getWidget = function(name) {
var item = this.items[name];
if (item != null) {
return item.widget;
}
return null;
};
Panel.prototype.update = function(name) {
var item = this.items[name];
if (item != null) {

View file

@ -12,59 +12,55 @@ Script.include("cookies.js");
var panel = new Panel(10, 100);
panel.newSlider("Num Feed Opaques", 0, 1000,
function(value) { },
function() { return Scene.getEngineNumFeedOpaqueItems(); },
function(value) { return (value); }
function CounterWidget(parentPanel, name, feedGetter, drawGetter, capSetter, capGetter) {
this.subPanel = panel.newSubPanel(name);
this.subPanel.newSlider("Num Feed", 0, 1,
function(value) { },
feedGetter,
function(value) { return (value); });
this.subPanel.newSlider("Num Drawn", 0, 1,
function(value) { },
drawGetter,
function(value) { return (value); });
this.subPanel.newSlider("Max Drawn", -1, 1,
capSetter,
capGetter,
function(value) { return (value); });
this.update = function () {
var numFeed = this.subPanel.get("Num Feed");
this.subPanel.set("Num Feed", numFeed);
this.subPanel.set("Num Drawn", this.subPanel.get("Num Drawn"));
var numMax = Math.max(numFeed, 1);
this.subPanel.getWidget("Num Feed").setMaxValue(numMax);
this.subPanel.getWidget("Num Drawn").setMaxValue(numMax);
this.subPanel.getWidget("Max Drawn").setMaxValue(numMax);
};
};
var opaquesCounter = new CounterWidget(panel, "Opaques",
function () { return Scene.getEngineNumFeedOpaqueItems(); },
function () { return Scene.getEngineNumDrawnOpaqueItems(); },
function(value) { Scene.setEngineMaxDrawnOpaqueItems(value); },
function () { return Scene.getEngineMaxDrawnOpaqueItems(); }
);
panel.newSlider("Num Drawn Opaques", 0, 1000,
function(value) { },
function() { return Scene.getEngineNumDrawnOpaqueItems(); },
function(value) { return (value); }
var transparentsCounter = new CounterWidget(panel, "Transparents",
function () { return Scene.getEngineNumFeedTransparentItems(); },
function () { return Scene.getEngineNumDrawnTransparentItems(); },
function(value) { Scene.setEngineMaxDrawnTransparentItems(value); },
function () { return Scene.getEngineMaxDrawnTransparentItems(); }
);
panel.newSlider("Max Drawn Opaques", -1, 1000,
function(value) { Scene.setEngineMaxDrawnOpaqueItems(value); },
function() { return Scene.getEngineMaxDrawnOpaqueItems(); },
function(value) { return (value); }
var overlaysCounter = new CounterWidget(panel, "Overlays",
function () { return Scene.getEngineNumFeedOverlay3DItems(); },
function () { return Scene.getEngineNumDrawnOverlay3DItems(); },
function(value) { Scene.setEngineMaxDrawnOverlay3DItems(value); },
function () { return Scene.getEngineMaxDrawnOverlay3DItems(); }
);
panel.newSlider("Num Feed Transparents", 0, 100,
function(value) { },
function() { return Scene.getEngineNumFeedTransparentItems(); },
function(value) { return (value); }
);
panel.newSlider("Num Drawn Transparents", 0, 100,
function(value) { },
function() { return Scene.getEngineNumDrawnTransparentItems(); },
function(value) { return (value); }
);
panel.newSlider("Max Drawn Transparents", -1, 100,
function(value) { Scene.setEngineMaxDrawnTransparentItems(value); },
function() { return Scene.getEngineMaxDrawnTransparentItems(); },
function(value) { return (value); }
);
panel.newSlider("Num Feed Overlay3Ds", 0, 100,
function(value) { },
function() { return Scene.getEngineNumFeedOverlay3DItems(); },
function(value) { return (value); }
);
panel.newSlider("Num Drawn Overlay3Ds", 0, 100,
function(value) { },
function() { return Scene.getEngineNumDrawnOverlay3DItems(); },
function(value) { return (value); }
);
panel.newSlider("Max Drawn Overlay3Ds", -1, 100,
function(value) { Scene.setEngineMaxDrawnOverlay3DItems(value); },
function() { return Scene.getEngineMaxDrawnOverlay3DItems(); },
function(value) { return (value); }
);
panel.newCheckbox("Display status",
function(value) { Scene.setEngineDisplayItemStatus(value); },
@ -75,31 +71,9 @@ panel.newCheckbox("Display status",
var tickTackPeriod = 500;
function updateCounters() {
var numFeedOpaques = panel.get("Num Feed Opaques");
var numFeedTransparents = panel.get("Num Feed Transparents");
var numFeedOverlay3Ds = panel.get("Num Feed Overlay3Ds");
panel.set("Num Feed Opaques", numFeedOpaques);
panel.set("Num Drawn Opaques", panel.get("Num Drawn Opaques"));
panel.set("Num Feed Transparents", numFeedTransparents);
panel.set("Num Drawn Transparents", panel.get("Num Drawn Transparents"));
panel.set("Num Feed Overlay3Ds", numFeedOverlay3Ds);
panel.set("Num Drawn Overlay3Ds", panel.get("Num Drawn Overlay3Ds"));
var numMax = Math.max(numFeedOpaques * 1.2, 1);
panel.getWidget("Num Feed Opaques").setMaxValue(numMax);
panel.getWidget("Num Drawn Opaques").setMaxValue(numMax);
panel.getWidget("Max Drawn Opaques").setMaxValue(numMax);
numMax = Math.max(numFeedTransparents * 1.2, 1);
panel.getWidget("Num Feed Transparents").setMaxValue(numMax);
panel.getWidget("Num Drawn Transparents").setMaxValue(numMax);
panel.getWidget("Max Drawn Transparents").setMaxValue(numMax);
numMax = Math.max(numFeedOverlay3Ds * 1.2, 1);
panel.getWidget("Num Feed Overlay3Ds").setMaxValue(numMax);
panel.getWidget("Num Drawn Overlay3Ds").setMaxValue(numMax);
panel.getWidget("Max Drawn Overlay3Ds").setMaxValue(numMax);
opaquesCounter.update();
transparentsCounter.update();
overlaysCounter.update();
}
Script.setInterval(updateCounters, tickTackPeriod);

View file

@ -88,4 +88,6 @@ if (ANDROID)
endif (ANDROID)
copy_dlls_beside_windows_executable()
setup_memory_debugger()
copy_dlls_beside_windows_executable()

View file

@ -1,9 +1,11 @@
set(TARGET_NAME ice-server)
setup_memory_debugger()
# setup the project and link required Qt modules
setup_hifi_project(Network)
# link the shared hifi libraries
link_hifi_libraries(embedded-webserver networking shared)
copy_dlls_beside_windows_executable()
copy_dlls_beside_windows_executable()

View file

@ -14,20 +14,12 @@ endforeach()
find_package(Qt5LinguistTools REQUIRED)
find_package(Qt5LinguistToolsMacros)
if (DEFINED ENV{JOB_ID})
set(BUILD_SEQ $ENV{JOB_ID})
elseif (DEFINED ENV{ghprbPullId})
set(BUILD_SEQ "PR: $ENV{ghprbPullId} - Commit: $ENV{ghprbActualCommit}")
else ()
set(BUILD_SEQ "dev")
endif ()
if (WIN32)
add_definitions(-D_USE_MATH_DEFINES) # apparently needed to get M_PI and other defines from cmath/math.h
add_definitions(-DWINDOWS_LEAN_AND_MEAN) # needed to make sure windows doesn't go to crazy with its defines
endif()
configure_file(InterfaceVersion.h.in "${PROJECT_BINARY_DIR}/includes/InterfaceVersion.h")
include_application_version()
# grab the implementation and header files from src dirs
file(GLOB_RECURSE INTERFACE_SRCS "src/*.cpp" "src/*.h")
@ -48,7 +40,7 @@ else ()
list(REMOVE_ITEM INTERFACE_SRCS ${SPEECHRECOGNIZER_CPP})
endif ()
find_package(Qt5 COMPONENTS Gui Multimedia Network OpenGL Qml Quick Script Svg WebKitWidgets)
find_package(Qt5 COMPONENTS Gui Multimedia Network OpenGL Qml Quick Script Svg WebKitWidgets WebSockets)
# grab the ui files in resources/ui
file (GLOB_RECURSE QT_UI_FILES ui/*.ui)
@ -174,7 +166,7 @@ if (RTMIDI_FOUND AND NOT DISABLE_RTMIDI AND APPLE)
endif ()
# include headers for interface and InterfaceConfig.
include_directories("${PROJECT_SOURCE_DIR}/src" "${PROJECT_BINARY_DIR}/includes")
include_directories("${PROJECT_SOURCE_DIR}/src")
target_link_libraries(
${TARGET_NAME}
@ -213,4 +205,6 @@ else (APPLE)
endif()
endif (APPLE)
setup_memory_debugger()
copy_dlls_beside_windows_executable()

View file

@ -1,14 +0,0 @@
//
// InterfaceVersion.h
// interface/src
//
// Created by Leonardo Murillo on 12/16/13.
// Copyright 2013 High Fidelity, Inc.
//
// Declaration of version and build data
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
const QString BUILD_VERSION = "@BUILD_SEQ@";

300
interface/interface_da.ts Normal file
View file

@ -0,0 +1,300 @@
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="da">
<context>
<name>Application</name>
<message>
<location filename="src/Application.cpp" line="1482"/>
<source>Sparse Voxel Octree Files (*.svo)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Application.cpp" line="3465"/>
<source>Open Script</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Application.cpp" line="3466"/>
<source>JavaScript Files (*.js)</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ChatWindow</name>
<message>
<location filename="ui/chatWindow.ui" line="20"/>
<location filename="../build/interface/ui_chatWindow.h" line="143"/>
<source>Chat</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/chatWindow.ui" line="50"/>
<location filename="../build/interface/ui_chatWindow.h" line="144"/>
<source>Connecting to XMPP...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/chatWindow.ui" line="71"/>
<location filename="../build/interface/ui_chatWindow.h" line="145"/>
<source> online now:</source>
<translation type="unfinished"></translation>
</message>
<message numerus="yes">
<location filename="src/ui/ChatWindow.cpp" line="128"/>
<source>day</source>
<translation>
<numerusform>%n dag</numerusform>
<numerusform>%n dage</numerusform>
</translation>
</message>
<message numerus="yes">
<location filename="src/ui/ChatWindow.cpp" line="128"/>
<source>hour</source>
<translation>
<numerusform>%n time</numerusform>
<numerusform>%n timer</numerusform>
</translation>
</message>
<message numerus="yes">
<location filename="src/ui/ChatWindow.cpp" line="128"/>
<source>minute</source>
<translation>
<numerusform>%n minut</numerusform>
<numerusform>%n minutter</numerusform>
</translation>
</message>
<message numerus="yes">
<source>second</source>
<translation type="vanished">
<numerusform>%n second</numerusform>
<numerusform>%n seconds</numerusform>
</translation>
</message>
<message>
<location filename="src/ui/ChatWindow.cpp" line="183"/>
<source>%1 online now:</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>Dialog</name>
<message>
<location filename="ui/updateDialog.ui" line="20"/>
<location filename="ui/updateDialog.ui" line="73"/>
<location filename="../build/interface/ui_updateDialog.h" line="137"/>
<location filename="../build/interface/ui_updateDialog.h" line="138"/>
<source>Update Required</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/updateDialog.ui" line="129"/>
<location filename="../build/interface/ui_updateDialog.h" line="140"/>
<source>Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/updateDialog.ui" line="151"/>
<location filename="../build/interface/ui_updateDialog.h" line="141"/>
<source>Skip Version</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/updateDialog.ui" line="173"/>
<location filename="../build/interface/ui_updateDialog.h" line="142"/>
<source>Close</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>Menu</name>
<message>
<location filename="src/Menu.cpp" line="554"/>
<source>Open .ini config file</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Menu.cpp" line="556"/>
<location filename="src/Menu.cpp" line="568"/>
<source>Text files (*.ini)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/Menu.cpp" line="566"/>
<source>Save .ini config file</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PreferencesDialog</name>
<message>
<location filename="ui/preferencesDialog.ui" line="90"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="618"/>
<source>Cancel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="125"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="619"/>
<source>Save all changes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="196"/>
<location filename="ui/preferencesDialog.ui" line="573"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="620"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="629"/>
<source>Avatar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="230"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="621"/>
<source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Avatar display name &lt;span style=&quot; color:#909090;&quot;&gt;(optional)&lt;/span&gt;&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="266"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="622"/>
<source>Not showing a name</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="294"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="623"/>
<source>Head</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="395"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="625"/>
<source>Body</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="506"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="627"/>
<source>Advanced Tuning</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="537"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="628"/>
<source>It&apos;s not recomended that you play with these settings unless you&apos;ve looked into exactly what they do.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="605"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="630"/>
<source>Vertical field of view</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="708"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="631"/>
<source>Lean scale (applies to Faceshift users)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="793"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="632"/>
<source>Avatar scale &lt;span style=&quot; color:#909090;&quot;&gt;(default is 1.0)&lt;/span&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="875"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="633"/>
<source>Pupil dillation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="954"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="634"/>
<source>Audio Jitter Buffer Samples (0 for automatic)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="1045"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="635"/>
<source>Faceshift eye detection</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="1125"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="636"/>
<source>Octree</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/preferencesDialog.ui" line="1236"/>
<location filename="../build/interface/ui_preferencesDialog.h" line="638"/>
<source>Max packets sent each second</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="src/ui/VoxelImportDialog.cpp" line="24"/>
<source>Loading ...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/VoxelImportDialog.cpp" line="27"/>
<source>Cancel</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>RunningScriptsWidget</name>
<message>
<location filename="ui/runningScriptsWidget.ui" line="14"/>
<location filename="../build/interface/ui_runningScriptsWidget.h" line="140"/>
<source>Form</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/runningScriptsWidget.ui" line="39"/>
<location filename="../build/interface/ui_runningScriptsWidget.h" line="141"/>
<source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;&lt;span style=&quot; font-size:18pt;&quot;&gt;Running Scripts&lt;/span&gt;&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/runningScriptsWidget.ui" line="63"/>
<location filename="../build/interface/ui_runningScriptsWidget.h" line="142"/>
<source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;&lt;span style=&quot; font-weight:600;&quot;&gt;Currently running&lt;/span&gt;&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/runningScriptsWidget.ui" line="89"/>
<location filename="../build/interface/ui_runningScriptsWidget.h" line="143"/>
<source>Reload all</source>
<oldsource>Reload All</oldsource>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/runningScriptsWidget.ui" line="116"/>
<location filename="../build/interface/ui_runningScriptsWidget.h" line="144"/>
<source>Stop all</source>
<oldsource>Stop All</oldsource>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/runningScriptsWidget.ui" line="137"/>
<location filename="../build/interface/ui_runningScriptsWidget.h" line="145"/>
<source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;&lt;span style=&quot; font-weight:600;&quot;&gt;Recently loaded&lt;/span&gt;&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/runningScriptsWidget.ui" line="154"/>
<location filename="../build/interface/ui_runningScriptsWidget.h" line="146"/>
<source>(click a script or use the 1-9 keys to load and run it)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="ui/runningScriptsWidget.ui" line="202"/>
<location filename="../build/interface/ui_runningScriptsWidget.h" line="148"/>
<source>There are no scripts currently running.</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS>

View file

@ -52,11 +52,14 @@
#include <AccountManager.h>
#include <AddressManager.h>
#include <ApplicationVersion.h>
#include <CursorManager.h>
#include <AudioInjector.h>
#include <AutoUpdater.h>
#include <DeferredLightingEffect.h>
#include <DependencyManager.h>
#include <plugins/PluginContainer.h>
#include <plugins/PluginManager.h>
#include <display-plugins/DisplayPlugin.h>
#include <EntityScriptingInterface.h>
@ -103,7 +106,6 @@
#include "AudioClient.h"
#include "DiscoverabilityManager.h"
#include "GLCanvas.h"
#include "InterfaceVersion.h"
#include "LODManager.h"
#include "Menu.h"
#include "ModelPackager.h"
@ -148,6 +150,8 @@
#include "ui/overlays/Cube3DOverlay.h"
#include "PluginContainerProxy.h"
// ON WIndows PC, NVidia Optimus laptop, we want to enable NVIDIA GPU
// FIXME seems to be broken.
#if defined(Q_OS_WIN)
@ -175,8 +179,6 @@ public:
using namespace std;
// Starfield information
static uint8_t THROTTLED_IDLE_TIMER_DELAY = 10;
const qint64 MAXIMUM_CACHE_SIZE = 10 * BYTES_PER_GIGABYTES; // 10GB
static QTimer* locationUpdateTimer = NULL;
@ -303,7 +305,7 @@ bool setupEssentials(int& argc, char** argv) {
// continuing to overburden Application.cpp
Cube3DOverlay* _keyboardFocusHighlight{ nullptr };
int _keyboardFocusHighlightID{ -1 };
PluginContainer* _pluginContainer;
Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
QApplication(argc, argv),
@ -353,7 +355,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_applicationOverlay()
{
setInstance(this);
Plugin::setContainer(this);
_pluginContainer = new PluginContainerProxy();
Plugin::setContainer(_pluginContainer);
#ifdef Q_OS_WIN
installNativeEventFilter(&MyNativeEventFilter::getInstance());
#endif
@ -737,6 +741,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_keyboardFocusHighlight->setVisible(false);
}
});
connect(this, &Application::applicationStateChanged, this, &Application::activeChanged);
setVSyncEnabled(); // make sure VSync is set properly at startup
}
void Application::aboutToQuit() {
@ -993,6 +1001,8 @@ void Application::paintGL() {
auto displayPlugin = getActiveDisplayPlugin();
displayPlugin->preRender();
_offscreenContext->makeCurrent();
// update the avatar with a fresh HMD pose
_myAvatar->updateFromHMDSensorMatrix(getHMDSensorPose());
auto lodManager = DependencyManager::get<LODManager>();
@ -1089,7 +1099,6 @@ void Application::paintGL() {
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
}
// Update camera position
if (!isHMDMode()) {
_myCamera.update(1.0f / _fps);
@ -1098,64 +1107,46 @@ void Application::paintGL() {
// Primary rendering pass
auto framebufferCache = DependencyManager::get<FramebufferCache>();
QSize size = framebufferCache->getFrameBufferSize();
const QSize size = framebufferCache->getFrameBufferSize();
{
PROFILE_RANGE(__FUNCTION__ "/mainRender");
// Viewport is assigned to the size of the framebuffer
QSize size = DependencyManager::get<FramebufferCache>()->getFrameBufferSize();
renderArgs._viewport = glm::ivec4(0, 0, size.width(), size.height());
{
PROFILE_RANGE(__FUNCTION__ "/clear");
doInBatch(&renderArgs, [&](gpu::Batch& batch) {
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
batch.setFramebuffer(primaryFbo);
// clear the normal and specular buffers
batch.clearFramebuffer(
gpu::Framebuffer::BUFFER_COLOR0 |
gpu::Framebuffer::BUFFER_COLOR1 |
gpu::Framebuffer::BUFFER_COLOR2 |
gpu::Framebuffer::BUFFER_DEPTH,
vec4(vec3(0), 1), 1.0, 0.0);
});
}
renderArgs._viewport = ivec4(0, 0, size.width(), size.height());
if (displayPlugin->isStereo()) {
PROFILE_RANGE(__FUNCTION__ "/stereoRender");
QRect currentViewport(QPoint(0, 0), QSize(size.width() / 2, size.height()));
glEnable(GL_SCISSOR_TEST);
for_each_eye([&](Eye eye){
// Load the view frustum, used by meshes
Camera eyeCamera;
if (qApp->isHMDMode()) {
// Allow the displayPlugin to compose the final eye transform, based on the most up-to-date head motion.
eyeCamera.setTransform(displayPlugin->getModelview(eye, _myAvatar->getSensorToWorldMatrix()));
} else {
eyeCamera.setTransform(displayPlugin->getModelview(eye, _myCamera.getTransform()));
}
eyeCamera.setProjection(displayPlugin->getProjection(eye, _myCamera.getProjection()));
renderArgs._viewport = toGlm(currentViewport);
doInBatch(&renderArgs, [&](gpu::Batch& batch) {
batch.setViewportTransform(renderArgs._viewport);
batch.setStateScissorRect(renderArgs._viewport);
});
displaySide(&renderArgs, eyeCamera);
}, [&] {
currentViewport.moveLeft(currentViewport.width());
// Stereo modes will typically have a larger projection matrix overall,
// so we ask for the 'mono' projection matrix, which for stereo and HMD
// plugins will imply the combined projection for both eyes.
//
// This is properly implemented for the Oculus plugins, but for OpenVR
// and Stereo displays I'm not sure how to get / calculate it, so we're
// just relying on the left FOV in each case and hoping that the
// overall culling margin of error doesn't cause popping in the
// right eye. There are FIXMEs in the relevant plugins
_myCamera.setProjection(displayPlugin->getProjection(Mono, _myCamera.getProjection()));
renderArgs._context->enableStereo(true);
mat4 eyeViews[2];
mat4 eyeProjections[2];
auto baseProjection = renderArgs._viewFrustum->getProjection();
// FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user
// changes the FOV manually, which right now I don't think they can.
for_each_eye([&](Eye eye) {
// For providing the stereo eye views, the HMD head pose has already been
// applied to the avatar, so we need to get the difference between the head
// pose applied to the avatar and the per eye pose, and use THAT as
// the per-eye stereo matrix adjustment.
mat4 eyePose = displayPlugin->getEyePose(eye);
mat4 headPose = displayPlugin->getHeadPose();
mat4 eyeView = glm::inverse(eyePose) * headPose;
eyeViews[eye] = eyeView;
eyeProjections[eye] = displayPlugin->getProjection(eye, baseProjection);
});
glDisable(GL_SCISSOR_TEST);
} else {
PROFILE_RANGE(__FUNCTION__ "/monoRender");
renderArgs._viewport = gpu::Vec4i(0, 0, size.width(), size.height());
// Viewport is assigned to the size of the framebuffer
doInBatch(&renderArgs, [&](gpu::Batch& batch) {
batch.setViewportTransform(renderArgs._viewport);
batch.setStateScissorRect(renderArgs._viewport);
});
displaySide(&renderArgs, _myCamera);
renderArgs._context->setStereoProjections(eyeProjections);
renderArgs._context->setStereoViews(eyeViews);
}
doInBatch(&renderArgs, [](gpu::Batch& batch){
displaySide(&renderArgs, _myCamera);
renderArgs._context->enableStereo(false);
doInBatch(&renderArgs, [](gpu::Batch& batch) {
batch.setFramebuffer(nullptr);
});
}
@ -1189,7 +1180,6 @@ void Application::paintGL() {
PROFILE_RANGE(__FUNCTION__ "/pluginOutput");
auto primaryFbo = framebufferCache->getPrimaryFramebuffer();
GLuint finalTexture = gpu::GLBackend::getTextureID(primaryFbo->getRenderBuffer(0));
uvec2 finalSize = toGlm(size);
// Ensure the rendering context commands are completed when rendering
GLsync sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
// Ensure the sync object is flushed to the driver thread before releasing the context
@ -1205,7 +1195,7 @@ void Application::paintGL() {
{
PROFILE_RANGE(__FUNCTION__ "/pluginDisplay");
displayPlugin->display(finalTexture, finalSize);
displayPlugin->display(finalTexture, toGlm(size));
}
{
@ -1278,6 +1268,7 @@ void Application::resizeGL() {
// Possible change in aspect ratio
loadViewFrustum(_myCamera, _viewFrustum);
float fov = glm::radians(DEFAULT_FIELD_OF_VIEW_DEGREES);
// FIXME the aspect ratio for stereo displays is incorrect based on this.
float aspectRatio = aspect(_renderResolution);
_myCamera.setProjection(glm::perspective(fov, aspectRatio, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
}
@ -1437,7 +1428,15 @@ void Application::keyPressEvent(QKeyEvent* event) {
break;
case Qt::Key_Enter:
case Qt::Key_Return:
Menu::getInstance()->triggerOption(MenuOption::AddressBar);
if (isOption) {
if (_window->isFullScreen()) {
_pluginContainer->unsetFullscreen();
} else {
_pluginContainer->setFullscreen(nullptr);
}
} else {
Menu::getInstance()->triggerOption(MenuOption::AddressBar);
}
break;
case Qt::Key_B:
@ -1744,6 +1743,27 @@ void Application::mouseMoveEvent(QMouseEvent* event, unsigned int deviceID) {
return;
}
#ifndef Q_OS_MAC
// If in full screen, and our main windows menu bar is hidden, and we're close to the top of the QMainWindow
// then show the menubar.
if (_window->isFullScreen()) {
QMenuBar* menuBar = _window->menuBar();
if (menuBar) {
static const int MENU_TOGGLE_AREA = 10;
if (!menuBar->isVisible()) {
if (event->pos().y() <= MENU_TOGGLE_AREA) {
menuBar->setVisible(true);
}
} else {
if (event->pos().y() > MENU_TOGGLE_AREA) {
menuBar->setVisible(false);
}
}
}
}
#endif
_entities.mouseMoveEvent(event, deviceID);
_controllerScriptingInterface.emitMouseMoveEvent(event, deviceID); // send events to any registered scripts
@ -2108,8 +2128,11 @@ void Application::idle() {
// Once rendering is off on another thread we should be able to have Application::idle run at start(0) in
// perpetuity and not expect events to get backed up.
bool isThrottled = getActiveDisplayPlugin()->isThrottled();
static const int THROTTLED_IDLE_TIMER_DELAY = MSECS_PER_SECOND / 15;
static const int IDLE_TIMER_DELAY_MS = 2;
int desiredInterval = getActiveDisplayPlugin()->isThrottled() ? THROTTLED_IDLE_TIMER_DELAY : IDLE_TIMER_DELAY_MS;
int desiredInterval = isThrottled ? THROTTLED_IDLE_TIMER_DELAY : IDLE_TIMER_DELAY_MS;
//qDebug() << "isThrottled:" << isThrottled << "desiredInterval:" << desiredInterval;
if (idleTimer->interval() != desiredInterval) {
idleTimer->start(desiredInterval);
@ -2679,9 +2702,6 @@ void Application::update(float deltaTime) {
updateLOD();
updateMouseRay(); // check what's under the mouse and update the mouse voxel
// update the avatar with a fresh HMD pose
_myAvatar->updateFromHMDSensorMatrix(getHMDSensorPose());
{
PerformanceTimer perfTimer("devices");
DeviceTracker::updateAll();
@ -3314,7 +3334,7 @@ namespace render {
template <> const ItemKey payloadGetKey(const WorldBoxRenderData::Pointer& stuff) { return ItemKey::Builder::opaqueShape(); }
template <> const Item::Bound payloadGetBound(const WorldBoxRenderData::Pointer& stuff) { return Item::Bound(); }
template <> void payloadRender(const WorldBoxRenderData::Pointer& stuff, RenderArgs* args) {
if (args->_renderMode != RenderArgs::MIRROR_RENDER_MODE && Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
if (args->_renderMode != RenderArgs::MIRROR_RENDER_MODE && Menu::getInstance()->isOptionChecked(MenuOption::WorldAxes)) {
PerformanceTimer perfTimer("worldBox");
auto& batch = *args->_batch;
@ -4612,6 +4632,20 @@ void Application::checkSkeleton() {
}
}
void Application::activeChanged(Qt::ApplicationState state) {
switch (state) {
case Qt::ApplicationActive:
_isForeground = true;
break;
case Qt::ApplicationSuspended:
case Qt::ApplicationHidden:
case Qt::ApplicationInactive:
default:
_isForeground = false;
break;
}
}
void Application::showFriendsWindow() {
const QString FRIENDS_WINDOW_TITLE = "Add/Remove Friends";
const QString FRIENDS_WINDOW_URL = "https://metaverse.highfidelity.com/user/friends";
@ -4703,6 +4737,10 @@ const DisplayPlugin * Application::getActiveDisplayPlugin() const {
return ((Application*)this)->getActiveDisplayPlugin();
}
bool _activatingDisplayPlugin{ false };
QVector<QPair<QString, QString>> _currentDisplayPluginActions;
QVector<QPair<QString, QString>> _currentInputPluginActions;
static void addDisplayPluginToMenu(DisplayPluginPointer displayPlugin, bool active = false) {
auto menu = Menu::getInstance();
@ -4724,8 +4762,6 @@ static void addDisplayPluginToMenu(DisplayPluginPointer displayPlugin, bool acti
Q_ASSERT(menu->menuItemExists(MenuOption::OutputMenu, name));
}
static QVector<QPair<QString, QString>> _currentDisplayPluginActions;
void Application::updateDisplayMode() {
auto menu = Menu::getInstance();
auto displayPlugins = PluginManager::getInstance()->getDisplayPlugins();
@ -4771,7 +4807,9 @@ void Application::updateDisplayMode() {
if (newDisplayPlugin) {
_offscreenContext->makeCurrent();
_activatingDisplayPlugin = true;
newDisplayPlugin->activate();
_activatingDisplayPlugin = false;
_offscreenContext->makeCurrent();
offscreenUi->resize(fromGlm(newDisplayPlugin->getRecommendedUiSize()));
_offscreenContext->makeCurrent();
@ -4779,11 +4817,18 @@ void Application::updateDisplayMode() {
oldDisplayPlugin = _displayPlugin;
_displayPlugin = newDisplayPlugin;
// If the displayPlugin is a screen based HMD, then it will want the HMDTools displayed
// Direct Mode HMDs (like windows Oculus) will be isHmd() but will have a screen of -1
bool newPluginWantsHMDTools = newDisplayPlugin ?
(newDisplayPlugin->isHmd() && (newDisplayPlugin->getHmdScreen() >= 0)) : false;
bool oldPluginWantedHMDTools = oldDisplayPlugin ?
(oldDisplayPlugin->isHmd() && (oldDisplayPlugin->getHmdScreen() >= 0)) : false;
// Only show the hmd tools after the correct plugin has
// been activated so that it's UI is setup correctly
if (newDisplayPlugin->isHmd()) {
showDisplayPluginsTools();
if (newPluginWantsHMDTools) {
_pluginContainer->showDisplayPluginsTools();
}
if (oldDisplayPlugin) {
@ -4791,7 +4836,7 @@ void Application::updateDisplayMode() {
_offscreenContext->makeCurrent();
// if the old plugin was HMD and the new plugin is not HMD, then hide our hmdtools
if (oldDisplayPlugin->isHmd() && !newDisplayPlugin->isHmd()) {
if (oldPluginWantedHMDTools && !newPluginWantsHMDTools) {
DependencyManager::get<DialogsManager>()->hmdTools(false);
}
}
@ -4801,9 +4846,6 @@ void Application::updateDisplayMode() {
Q_ASSERT_X(_displayPlugin, "Application::updateDisplayMode", "could not find an activated display plugin");
}
static QVector<QPair<QString, QString>> _currentInputPluginActions;
static void addInputPluginToMenu(InputPluginPointer inputPlugin, bool active = false) {
auto menu = Menu::getInstance();
QString name = inputPlugin->getName();
@ -4877,39 +4919,6 @@ void Application::updateInputModes() {
//}
}
void Application::addMenu(const QString& menuName) {
Menu::getInstance()->addMenu(menuName);
}
void Application::removeMenu(const QString& menuName) {
Menu::getInstance()->removeMenu(menuName);
}
void Application::addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable, bool checked, const QString& groupName) {
auto menu = Menu::getInstance();
MenuWrapper* parentItem = menu->getMenu(path);
QAction* action = parentItem->addAction(name);
connect(action, &QAction::triggered, [=] {
onClicked(action->isChecked());
});
action->setCheckable(checkable);
action->setChecked(checked);
_currentDisplayPluginActions.push_back({ path, name });
_currentInputPluginActions.push_back({ path, name });
}
void Application::removeMenuItem(const QString& menuName, const QString& menuItem) {
Menu::getInstance()->removeMenuItem(menuName, menuItem);
}
bool Application::isOptionChecked(const QString& name) {
return Menu::getInstance()->isOptionChecked(name);
}
void Application::setIsOptionChecked(const QString& path, bool checked) {
Menu::getInstance()->setIsOptionChecked(path, checked);
}
mat4 Application::getEyeProjection(int eye) const {
if (isHMDMode()) {
return getActiveDisplayPlugin()->getProjection((Eye)eye, _viewFrustum.getProjection());
@ -4929,7 +4938,7 @@ mat4 Application::getEyePose(int eye) const {
mat4 Application::getEyeOffset(int eye) const {
if (isHMDMode()) {
mat4 identity;
return getActiveDisplayPlugin()->getModelview((Eye)eye, identity);
return getActiveDisplayPlugin()->getView((Eye)eye, identity);
}
return mat4();
@ -4942,68 +4951,6 @@ mat4 Application::getHMDSensorPose() const {
return mat4();
}
void Application::setFullscreen(const QScreen* target) {
if (!_window->isFullScreen()) {
_savedGeometry = _window->geometry();
}
#ifdef Q_OS_MAC
_window->setGeometry(target->availableGeometry());
#endif
_window->windowHandle()->setScreen((QScreen*)target);
_window->showFullScreen();
}
void Application::unsetFullscreen(const QScreen* avoid) {
_window->showNormal();
QRect targetGeometry = _savedGeometry;
if (avoid != nullptr) {
QRect avoidGeometry = avoid->geometry();
if (avoidGeometry.contains(targetGeometry.topLeft())) {
QScreen* newTarget = primaryScreen();
if (newTarget == avoid) {
foreach(auto screen, screens()) {
if (screen != avoid) {
newTarget = screen;
break;
}
}
}
targetGeometry = newTarget->availableGeometry();
}
}
#ifdef Q_OS_MAC
QTimer* timer = new QTimer();
timer->singleShot(2000, [=] {
_window->setGeometry(targetGeometry);
timer->deleteLater();
});
#else
_window->setGeometry(targetGeometry);
#endif
}
void Application::showDisplayPluginsTools() {
DependencyManager::get<DialogsManager>()->hmdTools(true);
}
QGLWidget* Application::getPrimarySurface() {
return _glWidget;
}
void Application::setActiveDisplayPlugin(const QString& pluginName) {
auto menu = Menu::getInstance();
foreach(DisplayPluginPointer displayPlugin, PluginManager::getInstance()->getDisplayPlugins()) {
QString name = displayPlugin->getName();
QAction* action = menu->getActionForOption(name);
if (pluginName == name) {
action->setChecked(true);
}
}
updateDisplayMode();
}
void Application::setPalmData(Hand* hand, UserInputMapper::PoseValue pose, float deltaTime, int index) {
PalmData* palm;
bool foundHand = false;

View file

@ -26,18 +26,18 @@
#include <EntityEditPacketSender.h>
#include <EntityTreeRenderer.h>
#include <GeometryCache.h>
#include <input-plugins/KeyboardMouseDevice.h>
#include <NodeList.h>
#include <OctreeQuery.h>
#include <OffscreenUi.h>
#include <PhysicalEntitySimulation.h>
#include <PhysicsEngine.h>
#include <plugins/Forward.h>
#include <ScriptEngine.h>
#include <ShapeManager.h>
#include <StDev.h>
#include <udt/PacketHeaders.h>
#include <ViewFrustum.h>
#include <plugins/PluginContainer.h>
#include <plugins/PluginManager.h>
#include <SimpleMovingAverage.h>
#include "AudioClient.h"
@ -50,7 +50,6 @@
#include "Stars.h"
#include "avatar/Avatar.h"
#include "avatar/MyAvatar.h"
#include <input-plugins/KeyboardMouseDevice.h>
#include "scripting/ControllerScriptingInterface.h"
#include "scripting/DialogsManagerScriptingInterface.h"
#include "scripting/WebWindowClass.h"
@ -132,7 +131,7 @@ class Application;
typedef bool (Application::* AcceptURLMethod)(const QString &);
class Application : public QApplication, public AbstractViewStateInterface, public AbstractScriptingServicesInterface, PluginContainer {
class Application : public QApplication, public AbstractViewStateInterface, public AbstractScriptingServicesInterface {
Q_OBJECT
friend class OctreePacketProcessor;
@ -280,22 +279,10 @@ public:
virtual void endOverrideEnvironmentData() { _environment.endOverride(); }
virtual qreal getDevicePixelRatio();
// Plugin container support
virtual void addMenu(const QString& menuName);
virtual void removeMenu(const QString& menuName);
virtual void addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable, bool checked, const QString& groupName);
virtual void removeMenuItem(const QString& menuName, const QString& menuItem);
virtual bool isOptionChecked(const QString& name);
virtual void setIsOptionChecked(const QString& path, bool checked);
virtual void setFullscreen(const QScreen* target) override;
virtual void unsetFullscreen(const QScreen* avoid) override;
virtual void showDisplayPluginsTools() override;
virtual QGLWidget* getPrimarySurface() override;
void setActiveDisplayPlugin(const QString& pluginName);
DisplayPlugin * getActiveDisplayPlugin();
const DisplayPlugin * getActiveDisplayPlugin() const;
DisplayPlugin* getActiveDisplayPlugin();
const DisplayPlugin* getActiveDisplayPlugin() const;
public:
@ -476,6 +463,7 @@ private slots:
void faceTrackerMuteToggled();
void setCursorVisible(bool visible);
void activeChanged(Qt::ApplicationState state);
private:
void resetCameras(Camera& camera, const glm::uvec2& size);
@ -688,6 +676,9 @@ private:
SimpleMovingAverage _simsPerSecond{10};
int _simsPerSecondReport = 0;
quint64 _lastSimsPerSecondUpdate = 0;
bool _isForeground = true; // starts out assumed to be in foreground
friend class PluginContainerProxy;
};
#endif // hifi_Application_h

View file

@ -18,8 +18,6 @@
#include "MainWindow.h"
const int MSECS_PER_FRAME_WHEN_THROTTLED = 66;
static QGLFormat& getDesiredGLFormat() {
// Specify an OpenGL 3.3 format using the Core profile.
// That is, no old-school fixed pipeline functionality
@ -35,10 +33,7 @@ static QGLFormat& getDesiredGLFormat() {
return glFormat;
}
GLCanvas::GLCanvas() : QGLWidget(getDesiredGLFormat()),
_throttleRendering(false),
_idleRenderInterval(MSECS_PER_FRAME_WHEN_THROTTLED)
{
GLCanvas::GLCanvas() : QGLWidget(getDesiredGLFormat()) {
#ifdef Q_OS_LINUX
// Cause GLCanvas::eventFilter to be called.
// It wouldn't hurt to do this on Mac and PC too; but apparently it's only needed on linux.
@ -46,15 +41,6 @@ GLCanvas::GLCanvas() : QGLWidget(getDesiredGLFormat()),
#endif
}
void GLCanvas::stopFrameTimer() {
_frameTimer.stop();
}
bool GLCanvas::isThrottleRendering() const {
return (_throttleRendering
|| (Application::getInstance()->getWindow()->isMinimized() && Application::getInstance()->isThrottleFPSEnabled()));
}
int GLCanvas::getDeviceWidth() const {
return width() * (windowHandle() ? (float)windowHandle()->devicePixelRatio() : 1.0f);
}
@ -66,17 +52,17 @@ int GLCanvas::getDeviceHeight() const {
void GLCanvas::initializeGL() {
setAttribute(Qt::WA_AcceptTouchEvents);
setAcceptDrops(true);
connect(Application::getInstance(), SIGNAL(applicationStateChanged(Qt::ApplicationState)), this, SLOT(activeChanged(Qt::ApplicationState)));
connect(&_frameTimer, SIGNAL(timeout()), this, SLOT(throttleRender()));
// Note, we *DO NOT* want Qt to automatically swap buffers for us. This results in the "ringing" bug mentioned in WL#19514 when we're throttling the framerate.
setAutoBufferSwap(false);
}
void GLCanvas::paintGL() {
PROFILE_RANGE(__FUNCTION__);
if (!_throttleRendering &&
(!Application::getInstance()->getWindow()->isMinimized() || !Application::getInstance()->isThrottleFPSEnabled())) {
// FIXME - I'm not sure why this still remains, it appears as if this GLCanvas gets a single paintGL call near
// the beginning of the application starting up. I'm not sure if we really need to call Application::paintGL()
// in this case, since the display plugins eventually handle all the painting
if ((!Application::getInstance()->getWindow()->isMinimized() || !Application::getInstance()->isThrottleFPSEnabled())) {
Application::getInstance()->paintGL();
}
}
@ -85,39 +71,6 @@ void GLCanvas::resizeGL(int width, int height) {
Application::getInstance()->resizeGL();
}
void GLCanvas::activeChanged(Qt::ApplicationState state) {
switch (state) {
case Qt::ApplicationActive:
// If we're active, stop the frame timer and the throttle.
_frameTimer.stop();
_throttleRendering = false;
break;
case Qt::ApplicationSuspended:
case Qt::ApplicationHidden:
// If we're hidden or are about to suspend, don't render anything.
_throttleRendering = false;
_frameTimer.stop();
break;
default:
// Otherwise, throttle.
if (!_throttleRendering && !Application::getInstance()->isAboutToQuit()
&& Application::getInstance()->isThrottleFPSEnabled()) {
_frameTimer.start(_idleRenderInterval);
_throttleRendering = true;
}
break;
}
}
void GLCanvas::throttleRender() {
_frameTimer.start(_idleRenderInterval);
if (!Application::getInstance()->getWindow()->isMinimized()) {
Application::getInstance()->paintGL();
}
}
int updateTime = 0;
bool GLCanvas::event(QEvent* event) {
switch (event->type()) {

View file

@ -23,28 +23,18 @@ class GLCanvas : public QGLWidget {
public:
GLCanvas();
void stopFrameTimer();
bool isThrottleRendering() const;
int getDeviceWidth() const;
int getDeviceHeight() const;
QSize getDeviceSize() const { return QSize(getDeviceWidth(), getDeviceHeight()); }
protected:
QTimer _frameTimer;
bool _throttleRendering;
int _idleRenderInterval;
virtual void initializeGL();
virtual void paintGL();
virtual void resizeGL(int width, int height);
virtual bool event(QEvent* event);
private slots:
void activeChanged(Qt::ApplicationState state);
void throttleRender();
bool eventFilter(QObject*, QEvent* event);
};

View file

@ -16,11 +16,13 @@
#include <AddressManager.h>
#include <AudioClient.h>
#include <DependencyManager.h>
#include <display-plugins/DisplayPlugin.h>
#include <PathUtils.h>
#include <SettingHandle.h>
#include <UserActivityLogger.h>
#include <VrMenu.h>
#include "Application.h"
#include "AccountManager.h"
#include "audio/AudioScope.h"
@ -220,7 +222,7 @@ Menu::Menu() {
addActionToQMenuAndActionHash(toolsMenu, MenuOption::PackageModel, 0,
qApp, SLOT(packageModel()));
MenuWrapper* displayMenu = addMenu("Display");
MenuWrapper* displayMenu = addMenu(DisplayPlugin::MENU_PATH);
{
MenuWrapper* displayModeMenu = addMenu(MenuOption::OutputMenu);
QActionGroup* displayModeGroup = new QActionGroup(displayModeMenu);
@ -294,6 +296,8 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::StandingHMDSensorMode, 0, false,
avatar, SLOT(updateStandingHMDModeFromMenu()));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::WorldAxes);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);
addActionToQMenuAndActionHash(viewMenu, MenuOption::Log,
Qt::CTRL | Qt::SHIFT | Qt::Key_L,
@ -444,6 +448,7 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::FixGaze, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::DisableEyelidAdjustment, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu,
MenuOption::Connexion,
0, false,

View file

@ -168,6 +168,7 @@ namespace MenuOption {
const QString DecreaseAvatarSize = "Decrease Avatar Size";
const QString DeleteBookmark = "Delete Bookmark...";
const QString DisableActivityLogger = "Disable Activity Logger";
const QString DisableEyelidAdjustment = "Disable Eyelid Adjustment";
const QString DisableLightEntities = "Disable Light Entities";
const QString DisableNackPackets = "Disable Entity NACK Packets";
const QString DiskCacheEditor = "Disk Cache Editor";
@ -283,7 +284,7 @@ namespace MenuOption {
const QString TestPing = "Test Ping";
const QString ThirdPerson = "Third Person";
const QString ThreePointCalibration = "3 Point Calibration";
const QString ThrottleFPSIfNotFocus = "Throttle FPS If Not Focus";
const QString ThrottleFPSIfNotFocus = "Throttle FPS If Not Focus"; // FIXME - this value duplicated in Basic2DWindowOpenGLDisplayPlugin.cpp
const QString ToolWindow = "Tool Window";
const QString TransmitterDrive = "Transmitter Drive";
const QString TurnWithHead = "Turn using Head";
@ -293,6 +294,7 @@ namespace MenuOption {
const QString VisibleToEveryone = "Everyone";
const QString VisibleToFriends = "Friends";
const QString VisibleToNoOne = "No one";
const QString WorldAxes = "World Axes";
}
#endif // hifi_Menu_h

View file

@ -106,16 +106,17 @@ bool ModelPackager::loadModel() {
}
qCDebug(interfaceapp) << "Reading FBX file : " << _fbxInfo.filePath();
QByteArray fbxContents = fbx.readAll();
_geometry = readFBX(fbxContents, QVariantHash(), _fbxInfo.filePath());
_geometry.reset(readFBX(fbxContents, QVariantHash(), _fbxInfo.filePath()));
// make sure we have some basic mappings
populateBasicMapping(_mapping, _fbxInfo.filePath(), _geometry);
populateBasicMapping(_mapping, _fbxInfo.filePath(), *_geometry);
return true;
}
bool ModelPackager::editProperties() {
// open the dialog to configure the rest
ModelPropertiesDialog properties(_modelType, _mapping, _modelFile.path(), _geometry);
ModelPropertiesDialog properties(_modelType, _mapping, _modelFile.path(), *_geometry);
if (properties.exec() == QDialog::Rejected) {
return false;
}
@ -339,7 +340,7 @@ void ModelPackager::populateBasicMapping(QVariantHash& mapping, QString filename
void ModelPackager::listTextures() {
_textures.clear();
foreach (FBXMesh mesh, _geometry.meshes) {
foreach (FBXMesh mesh, _geometry->meshes) {
foreach (FBXMeshPart part, mesh.parts) {
if (!part.diffuseTexture.filename.isEmpty() && part.diffuseTexture.content.isEmpty() &&
!_textures.contains(part.diffuseTexture.filename)) {

View file

@ -39,11 +39,11 @@ private:
QString _texDir;
QVariantHash _mapping;
FBXGeometry _geometry;
std::unique_ptr<FBXGeometry> _geometry;
QStringList _textures;
};
#endif // hifi_ModelPackager_h
#endif // hifi_ModelPackager_h

View file

@ -0,0 +1,161 @@
#include "PluginContainerProxy.h"
#include <QScreen>
#include <QWindow>
#include <plugins/Plugin.h>
#include <plugins/PluginManager.h>
#include <display-plugins/DisplayPlugin.h>
#include "Application.h"
#include "MainWindow.h"
#include "GLCanvas.h"
#include "ui/DialogsManager.h"
PluginContainerProxy::PluginContainerProxy() {
Plugin::setContainer(this);
}
bool PluginContainerProxy::isForeground() {
return qApp->_isForeground && !qApp->getWindow()->isMinimized();
}
void PluginContainerProxy::addMenu(const QString& menuName) {
Menu::getInstance()->addMenu(menuName);
}
void PluginContainerProxy::removeMenu(const QString& menuName) {
Menu::getInstance()->removeMenu(menuName);
}
extern bool _activatingDisplayPlugin;
extern QVector<QPair<QString, QString>> _currentDisplayPluginActions;
extern QVector<QPair<QString, QString>> _currentInputPluginActions;
std::map<QString, QActionGroup*> _exclusiveGroups;
QAction* PluginContainerProxy::addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable, bool checked, const QString& groupName) {
auto menu = Menu::getInstance();
MenuWrapper* parentItem = menu->getMenu(path);
QAction* action = menu->addActionToQMenuAndActionHash(parentItem, name);
if (!groupName.isEmpty()) {
QActionGroup* group{ nullptr };
if (!_exclusiveGroups.count(groupName)) {
group = _exclusiveGroups[groupName] = new QActionGroup(menu);
group->setExclusive(true);
} else {
group = _exclusiveGroups[groupName];
}
group->addAction(action);
}
connect(action, &QAction::triggered, [=] {
onClicked(action->isChecked());
});
action->setCheckable(checkable);
action->setChecked(checked);
if (_activatingDisplayPlugin) {
_currentDisplayPluginActions.push_back({ path, name });
} else {
_currentInputPluginActions.push_back({ path, name });
}
return action;
}
void PluginContainerProxy::removeMenuItem(const QString& menuName, const QString& menuItem) {
Menu::getInstance()->removeMenuItem(menuName, menuItem);
}
bool PluginContainerProxy::isOptionChecked(const QString& name) {
return Menu::getInstance()->isOptionChecked(name);
}
void PluginContainerProxy::setIsOptionChecked(const QString& path, bool checked) {
Menu::getInstance()->setIsOptionChecked(path, checked);
}
// FIXME there is a bug in the fullscreen setting, where leaving
// fullscreen does not restore the window frame, making it difficult
// or impossible to move or size the window.
// Additionally, setting fullscreen isn't hiding the menu on windows
// make it useless for stereoscopic modes.
void PluginContainerProxy::setFullscreen(const QScreen* target, bool hideMenu) {
auto _window = qApp->_window;
if (!_window->isFullScreen()) {
_savedGeometry = _window->geometry();
}
if (nullptr == target) {
// FIXME target the screen where the window currently is
target = qApp->primaryScreen();
}
_window->setGeometry(target->availableGeometry());
_window->windowHandle()->setScreen((QScreen*)target);
_window->showFullScreen();
#ifndef Q_OS_MAC
// also hide the QMainWindow's menuBar
QMenuBar* menuBar = _window->menuBar();
if (menuBar && hideMenu) {
menuBar->setVisible(false);
}
#endif
}
void PluginContainerProxy::unsetFullscreen(const QScreen* avoid) {
auto _window = qApp->_window;
_window->showNormal();
QRect targetGeometry = _savedGeometry;
if (avoid != nullptr) {
QRect avoidGeometry = avoid->geometry();
if (avoidGeometry.contains(targetGeometry.topLeft())) {
QScreen* newTarget = qApp->primaryScreen();
if (newTarget == avoid) {
foreach(auto screen, qApp->screens()) {
if (screen != avoid) {
newTarget = screen;
break;
}
}
}
targetGeometry = newTarget->availableGeometry();
}
}
#ifdef Q_OS_MAC
QTimer* timer = new QTimer();
timer->singleShot(2000, [=] {
_window->setGeometry(targetGeometry);
timer->deleteLater();
});
#else
_window->setGeometry(targetGeometry);
#endif
#ifndef Q_OS_MAC
// also show the QMainWindow's menuBar
QMenuBar* menuBar = _window->menuBar();
if (menuBar) {
menuBar->setVisible(true);
}
#endif
}
void PluginContainerProxy::showDisplayPluginsTools() {
DependencyManager::get<DialogsManager>()->hmdTools(true);
}
QGLWidget* PluginContainerProxy::getPrimarySurface() {
return qApp->_glWidget;
}
void Application::setActiveDisplayPlugin(const QString& pluginName) {
auto menu = Menu::getInstance();
foreach(DisplayPluginPointer displayPlugin, PluginManager::getInstance()->getDisplayPlugins()) {
QString name = displayPlugin->getName();
QAction* action = menu->getActionForOption(name);
if (pluginName == name) {
action->setChecked(true);
}
}
updateDisplayMode();
}

View file

@ -0,0 +1,30 @@
#pragma once
#ifndef hifi_PluginContainerProxy_h
#define hifi_PluginContainerProxy_h
#include <QObject>
#include <QRect>
#include <plugins/Forward.h>
#include <plugins/PluginContainer.h>
class PluginContainerProxy : public QObject, PluginContainer {
Q_OBJECT
PluginContainerProxy();
virtual void addMenu(const QString& menuName) override;
virtual void removeMenu(const QString& menuName) override;
virtual QAction* addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable = false, bool checked = false, const QString& groupName = "") override;
virtual void removeMenuItem(const QString& menuName, const QString& menuItem) override;
virtual bool isOptionChecked(const QString& name) override;
virtual void setIsOptionChecked(const QString& path, bool checked);
virtual void setFullscreen(const QScreen* targetScreen, bool hideMenu = true) override;
virtual void unsetFullscreen(const QScreen* avoidScreen = nullptr) override;
virtual void showDisplayPluginsTools() override;
virtual QGLWidget* getPrimarySurface() override;
virtual bool isForeground() override;
QRect _savedGeometry{ 10, 120, 800, 600 };
friend class Application;
};
#endif

View file

@ -191,7 +191,7 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
auto geometryCache = DependencyManager::get<GeometryCache>();
auto textureCache = DependencyManager::get<TextureCache>();
gpu::Batch batch;
gpu::Batch& batch = *renderArgs->_batch;
batch.setViewTransform(Transform());
batch.setProjectionTransform(renderArgs->_viewFrustum->getProjection());
batch.setModelTransform(Transform().setRotation(glm::inverse(renderArgs->_viewFrustum->getOrientation()) *
@ -219,6 +219,4 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
batch.setInputBuffer(VERTICES_SLOT, posView);
batch.setInputBuffer(COLOR_SLOT, colView);
batch.draw(gpu::Primitive::POINTS, STARFIELD_NUM_STARS);
renderArgs->_context->render(batch);
}

View file

@ -35,41 +35,87 @@ using namespace std;
void renderWorldBox(gpu::Batch& batch) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// Show edge of world
static const glm::vec3 red(1.0f, 0.0f, 0.0f);
static const glm::vec3 green(0.0f, 1.0f, 0.0f);
static const glm::vec3 blue(0.0f, 0.0f, 1.0f);
static const glm::vec3 grey(0.5f, 0.5f, 0.5f);
// Show center of world
static const glm::vec3 RED(1.0f, 0.0f, 0.0f);
static const glm::vec3 GREEN(0.0f, 1.0f, 0.0f);
static const glm::vec3 BLUE(0.0f, 0.0f, 1.0f);
static const glm::vec3 GREY(0.5f, 0.5f, 0.5f);
static const glm::vec4 GREY4(0.5f, 0.5f, 0.5f, 1.0f);
static const glm::vec4 DASHED_RED(1.0f, 0.0f, 0.0f, 1.0f);
static const glm::vec4 DASHED_GREEN(0.0f, 1.0f, 0.0f, 1.0f);
static const glm::vec4 DASHED_BLUE(0.0f, 0.0f, 1.0f, 1.0f);
static const float DASH_LENGTH = 1.0f;
static const float GAP_LENGTH = 1.0f;
auto transform = Transform{};
batch.setModelTransform(transform);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(TREE_SCALE, 0.0f, 0.0f), red);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, TREE_SCALE, 0.0f), green);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, TREE_SCALE), blue);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, TREE_SCALE), glm::vec3(TREE_SCALE, 0.0f, TREE_SCALE), grey);
geometryCache->renderLine(batch, glm::vec3(TREE_SCALE, 0.0f, TREE_SCALE), glm::vec3(TREE_SCALE, 0.0f, 0.0f), grey);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(HALF_TREE_SCALE, 0.0f, 0.0f), RED);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(-HALF_TREE_SCALE, 0.0f, 0.0f), DASHED_RED,
DASH_LENGTH, GAP_LENGTH);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, HALF_TREE_SCALE, 0.0f), GREEN);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -HALF_TREE_SCALE, 0.0f), DASHED_GREEN,
DASH_LENGTH, GAP_LENGTH);
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, HALF_TREE_SCALE), BLUE);
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, -HALF_TREE_SCALE), DASHED_BLUE,
DASH_LENGTH, GAP_LENGTH);
// X center boundaries
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f), GREY);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY);
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY);
// Z center boundaries
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE), GREY);
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE), GREY);
geometryCache->renderLine(batch, glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY);
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE),
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY);
// Center boundaries
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE), GREY);
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY);
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE),
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY);
geometryCache->renderWireCube(batch, TREE_SCALE, GREY4);
// Draw meter markers along the 3 axis to help with measuring things
const float MARKER_DISTANCE = 1.0f;
const float MARKER_RADIUS = 0.05f;
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, red);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, RED);
transform.setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, red);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, RED);
transform.setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, green);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, GREEN);
transform.setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, blue);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, BLUE);
transform.setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE));
batch.setModelTransform(transform);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, grey);
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, GREY);
}
// Return a random vector of average length 1

View file

@ -196,7 +196,6 @@ void Avatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("hand");
getHand()->simulate(deltaTime, false);
}
_skeletonModel.setLODDistance(getLODDistance());
if (!_shouldRenderBillboard && inViewFrustum) {
{
@ -470,8 +469,8 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
* (1.0f - ((float)(now - getHead()->getLookingAtMeStarted()))
/ (LOOKING_AT_ME_DURATION * (float)USECS_PER_SECOND));
if (alpha > 0.0f) {
QSharedPointer<NetworkGeometry> geometry = getHead()->getFaceModel().getGeometry();
if (geometry) {
QSharedPointer<NetworkGeometry> geometry = _skeletonModel.getGeometry();
if (geometry && geometry->isLoaded()) {
const float DEFAULT_EYE_DIAMETER = 0.048f; // Typical human eye
const float RADIUS_INCREMENT = 0.005f;
Transform transform;
@ -562,24 +561,22 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
}
void Avatar::fixupModelsInScene() {
if (!(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
return;
}
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
if (_skeletonModel.needsFixupInScene()) {
if (_skeletonModel.isRenderable() && _skeletonModel.needsFixupInScene()) {
_skeletonModel.removeFromScene(scene, pendingChanges);
_skeletonModel.addToScene(scene, pendingChanges);
}
if (getHead()->getFaceModel().needsFixupInScene()) {
getHead()->getFaceModel().removeFromScene(scene, pendingChanges);
getHead()->getFaceModel().addToScene(scene, pendingChanges);
Model& faceModel = getHead()->getFaceModel();
if (faceModel.isRenderable() && faceModel.needsFixupInScene()) {
faceModel.removeFromScene(scene, pendingChanges);
faceModel.addToScene(scene, pendingChanges);
}
for (auto attachmentModel : _attachmentModels) {
if (attachmentModel->needsFixupInScene()) {
if (attachmentModel->isRenderable() && attachmentModel->needsFixupInScene()) {
attachmentModel->removeFromScene(scene, pendingChanges);
attachmentModel->addToScene(scene, pendingChanges);
}
@ -600,13 +597,12 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, floa
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
// render the billboard until both models are loaded
renderBillboard(renderArgs);
return;
} else {
getHead()->render(renderArgs, 1.0f, renderFrustum);
}
getHand()->render(renderArgs, false);
}
getHead()->render(renderArgs, 1.0f, renderFrustum);
getHead()->renderLookAts(renderArgs);
}
@ -621,11 +617,8 @@ void Avatar::simulateAttachments(float deltaTime) {
int jointIndex = getJointIndex(attachment.jointName);
glm::vec3 jointPosition;
glm::quat jointRotation;
if (!isMyAvatar()) {
model->setLODDistance(getLODDistance());
}
if (_skeletonModel.getJointPositionInWorldFrame(jointIndex, jointPosition) &&
_skeletonModel.getJointCombinedRotation(jointIndex, jointRotation)) {
_skeletonModel.getJointCombinedRotation(jointIndex, jointRotation)) {
model->setTranslation(jointPosition + jointRotation * attachment.translation * _scale);
model->setRotation(jointRotation * attachment.rotation);
model->setScaleToFit(true, _scale * attachment.scale, true); // hack to force rescale
@ -754,11 +747,12 @@ Transform Avatar::calculateDisplayNameTransform(const ViewFrustum& frustum, floa
qDebug() << "ASSERT because isinf(scale)";
}
qDebug() << "textPosition =" << textPosition;
qDebug() << "projMat =" << projMat;
qDebug() << "viewMat =" << viewMat;
qDebug() << "viewProj =" << viewProj;
qDebug() << "windowSizeY =" << windowSizeY;
qDebug() << "p1.y =" << p1.y;
qDebug() << "p1.w =" << p1.w;
qDebug() << "p0.y =" << p0.y;
qDebug() << "p0.w =" << p0.w;
qDebug() << "p1 =" << p1;
qDebug() << "p0 =" << p0;
qDebug() << "qApp->getDevicePixelRatio() =" << qApp->getDevicePixelRatio();
qDebug() << "fontSize =" << fontSize;
qDebug() << "pixelHeight =" << pixelHeight;
@ -978,12 +972,12 @@ void Avatar::scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const {
void Avatar::setFaceModelURL(const QUrl& faceModelURL) {
AvatarData::setFaceModelURL(faceModelURL);
getHead()->getFaceModel().setURL(_faceModelURL, AvatarData::defaultFullAvatarModelUrl(), true, !isMyAvatar());
getHead()->getFaceModel().setURL(_faceModelURL);
}
void Avatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
AvatarData::setSkeletonModelURL(skeletonModelURL);
_skeletonModel.setURL(_skeletonModelURL, AvatarData::defaultFullAvatarModelUrl(), true, !isMyAvatar());
_skeletonModel.setURL(_skeletonModelURL);
}
void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {

View file

@ -233,9 +233,6 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
_saccade = glm::vec3();
}
if (!isMine) {
_faceModel.setLODDistance(static_cast<Avatar*>(_owningAvatar)->getLODDistance());
}
_leftEyePosition = _rightEyePosition = getPosition();
if (!billboard) {
_faceModel.simulate(deltaTime);
@ -277,6 +274,10 @@ void Head::calculateMouthShapes() {
void Head::applyEyelidOffset(glm::quat headOrientation) {
// Adjusts the eyelid blendshape coefficients so that the eyelid follows the iris as the head pitches.
if (Menu::getInstance()->isOptionChecked(MenuOption::DisableEyelidAdjustment)) {
return;
}
glm::quat eyeRotation = rotationBetween(headOrientation * IDENTITY_FRONT, getLookAtPosition() - _eyePosition);
eyeRotation = eyeRotation * glm::angleAxis(safeEulerAngles(headOrientation).y, IDENTITY_UP); // Rotation w.r.t. head
float eyePitch = safeEulerAngles(eyeRotation).x;

View file

@ -843,10 +843,11 @@ void MyAvatar::sendKillAvatar() {
DependencyManager::get<NodeList>()->broadcastToNodes(std::move(killPacket), NodeSet() << NodeType::AvatarMixer);
}
static int counter = 0;
void MyAvatar::updateLookAtTargetAvatar() {
//
// Look at the avatar whose eyes are closest to the ray in direction of my avatar's head
//
// And set the correctedLookAt for all (nearby) avatars that are looking at me.
_lookAtTargetAvatar.reset();
_targetAvatarPosition = glm::vec3(0.0f);
@ -870,14 +871,51 @@ void MyAvatar::updateLookAtTargetAvatar() {
smallestAngleTo = angleTo;
}
if (Application::getInstance()->isLookingAtMyAvatar(avatar)) {
// Alter their gaze to look directly at my camera; this looks more natural than looking at my avatar's face.
// Offset their gaze according to whether they're looking at one of my eyes or my mouth.
glm::vec3 gazeOffset = avatar->getHead()->getLookAtPosition() - getHead()->getEyePosition();
const float HUMAN_EYE_SEPARATION = 0.065f;
float myEyeSeparation = glm::length(getHead()->getLeftEyePosition() - getHead()->getRightEyePosition());
gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ gazeOffset);
glm::vec3 lookAtPosition = avatar->getHead()->getLookAtPosition(); // A position, in world space, on my avatar.
// The camera isn't at the point midway between the avatar eyes. (Even without an HMD, the head can be offset a bit.)
// Let's get everything to world space:
glm::vec3 avatarLeftEye = getHead()->getLeftEyePosition();
glm::vec3 avatarRightEye = getHead()->getRightEyePosition();
// When not in HMD, these might both answer identity (i.e., the bridge of the nose). That's ok.
// By my inpsection of the code and live testing, getEyeOffset and getEyePose are the same. (Application hands identity as offset matrix.)
// This might be more work than needed for any given use, but as we explore different formulations, we go mad if we don't work in world space.
glm::mat4 leftEye = Application::getInstance()->getEyeOffset(Eye::Left);
glm::mat4 rightEye = Application::getInstance()->getEyeOffset(Eye::Right);
glm::vec3 leftEyeHeadLocal = glm::vec3(leftEye[3]);
glm::vec3 rightEyeHeadLocal = glm::vec3(rightEye[3]);
auto humanSystem = Application::getInstance()->getViewFrustum();
glm::vec3 humanLeftEye = humanSystem->getPosition() + (humanSystem->getOrientation() * leftEyeHeadLocal);
glm::vec3 humanRightEye = humanSystem->getPosition() + (humanSystem->getOrientation() * rightEyeHeadLocal);
// First find out where (in world space) the person is looking relative to that bridge-of-the-avatar point.
// (We will be adding that offset to the camera position, after making some other adjustments.)
glm::vec3 gazeOffset = lookAtPosition - getHead()->getEyePosition();
// Scale by proportional differences between avatar and human.
float humanEyeSeparationInModelSpace = glm::length(humanLeftEye - humanRightEye);
float avatarEyeSeparation = glm::length(avatarLeftEye - avatarRightEye);
gazeOffset = gazeOffset * humanEyeSeparationInModelSpace / avatarEyeSeparation;
// If the camera is also not oriented with the head, adjust by getting the offset in head-space...
/* Not needed (i.e., code is a no-op), but I'm leaving the example code here in case something like this is needed someday.
glm::quat avatarHeadOrientation = getHead()->getOrientation();
glm::vec3 gazeOffsetLocalToHead = glm::inverse(avatarHeadOrientation) * gazeOffset;
// ... and treat that as though it were in camera space, bringing it back to world space.
// But camera is fudged to make the picture feel like the avatar's orientation.
glm::quat humanOrientation = humanSystem->getOrientation(); // or just avatar getOrienation() ?
gazeOffset = humanOrientation * gazeOffsetLocalToHead;
glm::vec3 corrected = humanSystem->getPosition() + gazeOffset;
*/
// And now we can finally add that offset to the camera.
glm::vec3 corrected = Application::getInstance()->getViewFrustum()->getPosition() + gazeOffset;
avatar->getHead()->setCorrectedLookAtPosition(corrected);
} else {
avatar->getHead()->clearCorrectedLookAtPosition();
}
@ -1103,7 +1141,7 @@ void MyAvatar::attach(const QString& modelURL, const QString& jointName, const g
void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, float glowLevel) {
if (!(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
if (!_skeletonModel.isRenderable()) {
return; // wait until all models are loaded
}
@ -1114,6 +1152,7 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, fl
getHead()->render(renderArgs, 1.0f, renderFrustum);
}
// This is drawing the lookat vectors from our avatar to wherever we're looking.
if (qApp->isHMDMode()) {
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();

View file

@ -185,7 +185,8 @@ void ConnexionClient::destroy() {
ConnexionData& connexiondata = ConnexionData::getInstance();
int deviceid = connexiondata.getDeviceID();
connexiondata.setDeviceID(0);
Application::getUserInputMapper()->removeDevice(deviceid);
auto userInputMapper = DependencyManager::get<UserInputMapper>();
userInputMapper->removeDevice(deviceid);
}
#define LOGITECH_VENDOR_ID 0x46d
@ -289,14 +290,15 @@ unsigned short HidToVirtualKey(unsigned long pid, unsigned short hidKeyCode) {
bool ConnexionClient::RawInputEventFilter(void* msg, long* result) {
ConnexionData& connexiondata = ConnexionData::getInstance();
auto userInputMapper = DependencyManager::get<UserInputMapper>();
if (Is3dmouseAttached() && connexiondata.getDeviceID() == 0) {
connexiondata.registerToUserInputMapper(*Application::getUserInputMapper());
connexiondata.assignDefaultInputMapping(*Application::getUserInputMapper());
connexiondata.registerToUserInputMapper(*userInputMapper);
connexiondata.assignDefaultInputMapping(*userInputMapper);
UserActivityLogger::getInstance().connectedDevice("controller", "3Dconnexion");
} else if (!Is3dmouseAttached() && connexiondata.getDeviceID() != 0) {
int deviceid = connexiondata.getDeviceID();
connexiondata.setDeviceID(0);
Application::getUserInputMapper()->removeDevice(deviceid);
userInputMapper->removeDevice(deviceid);
}
if (!Is3dmouseAttached()) {

View file

@ -211,7 +211,6 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
//Handle fading and deactivation/activation of UI
gpu::Batch batch;
renderArgs->_context->syncCache();
auto geometryCache = DependencyManager::get<GeometryCache>();
geometryCache->useSimpleDrawPipeline(batch);
@ -279,7 +278,6 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
vec2 canvasSize = qApp->getCanvasSize();
_textureAspectRatio = aspect(canvasSize);
renderArgs->_context->syncCache();
auto geometryCache = DependencyManager::get<GeometryCache>();
gpu::Batch batch;

View file

@ -92,7 +92,6 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
renderOverlays(renderArgs); // renders Scripts Overlay and AudioScope
renderStatsAndLogs(renderArgs); // currently renders nothing
renderArgs->_context->syncCache();
renderArgs->_context->render(batch);
renderArgs->_batch = nullptr; // so future users of renderArgs don't try to use our batch

View file

@ -3,4 +3,6 @@ set(TARGET_NAME animation)
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library(Network Script)
link_hifi_libraries(shared gpu model fbx)
setup_memory_debugger()
link_hifi_libraries(shared gpu model fbx)

View file

@ -13,6 +13,7 @@
#include <QThreadPool>
#include "AnimationCache.h"
#include "AnimationLogging.h"
static int animationPointerMetaTypeId = qRegisterMetaType<AnimationPointer>();
@ -38,35 +39,47 @@ QSharedPointer<Resource> AnimationCache::createResource(const QUrl& url, const Q
return QSharedPointer<Resource>(new Animation(url), &Resource::allReferencesCleared);
}
Animation::Animation(const QUrl& url) : Resource(url) {}
class AnimationReader : public QRunnable {
public:
AnimationReader(const QWeakPointer<Resource>& animation, QNetworkReply* reply);
virtual void run();
private:
QWeakPointer<Resource> _animation;
QNetworkReply* _reply;
};
AnimationReader::AnimationReader(const QWeakPointer<Resource>& animation, QNetworkReply* reply) :
_animation(animation),
AnimationReader::AnimationReader(const QUrl& url, QNetworkReply* reply) :
_url(url),
_reply(reply) {
}
void AnimationReader::run() {
QSharedPointer<Resource> animation = _animation.toStrongRef();
if (!animation.isNull()) {
QMetaObject::invokeMethod(animation.data(), "setGeometry",
Q_ARG(const FBXGeometry&, readFBX(_reply->readAll(), QVariantHash(), _reply->property("url").toString())));
try {
if (!_reply) {
throw QString("Reply is NULL ?!");
}
QString urlname = _url.path().toLower();
bool urlValid = true;
urlValid &= !urlname.isEmpty();
urlValid &= !_url.path().isEmpty();
if (urlValid) {
// Parse the FBX directly from the QNetworkReply
FBXGeometry* fbxgeo = nullptr;
if (_url.path().toLower().endsWith(".fbx")) {
fbxgeo = readFBX(_reply, QVariantHash(), _url.path());
} else {
QString errorStr("usupported format");
emit onError(299, errorStr);
}
emit onSuccess(fbxgeo);
} else {
throw QString("url is invalid");
}
} catch (const QString& error) {
emit onError(299, error);
}
_reply->deleteLater();
}
Animation::Animation(const QUrl& url) : Resource(url) {}
bool Animation::isLoaded() const {
return _loaded && _geometry;
}
QStringList Animation::getJointNames() const {
if (QThread::currentThread() != thread()) {
QStringList result;
@ -75,7 +88,7 @@ QStringList Animation::getJointNames() const {
return result;
}
QStringList names;
foreach (const FBXJoint& joint, _geometry.joints) {
foreach (const FBXJoint& joint, _geometry->joints) {
names.append(joint.name);
}
return names;
@ -88,23 +101,33 @@ QVector<FBXAnimationFrame> Animation::getFrames() const {
Q_RETURN_ARG(QVector<FBXAnimationFrame>, result));
return result;
}
return _geometry.animationFrames;
return _geometry->animationFrames;
}
const QVector<FBXAnimationFrame>& Animation::getFramesReference() const {
return _geometry.animationFrames;
}
void Animation::setGeometry(const FBXGeometry& geometry) {
_geometry = geometry;
finishedLoading(true);
return _geometry->animationFrames;
}
void Animation::downloadFinished(QNetworkReply* reply) {
// send the reader off to the thread pool
QThreadPool::globalInstance()->start(new AnimationReader(_self, reply));
// parse the animation/fbx file on a background thread.
AnimationReader* animationReader = new AnimationReader(reply->url(), reply);
connect(animationReader, SIGNAL(onSuccess(FBXGeometry*)), SLOT(animationParseSuccess(FBXGeometry*)));
connect(animationReader, SIGNAL(onError(int, QString)), SLOT(animationParseError(int, QString)));
QThreadPool::globalInstance()->start(animationReader);
}
void Animation::animationParseSuccess(FBXGeometry* geometry) {
qCDebug(animation) << "Animation parse success" << _url.toDisplayString();
_geometry.reset(geometry);
finishedLoading(true);
}
void Animation::animationParseError(int error, QString str) {
qCCritical(animation) << "Animation failure parsing " << _url.toDisplayString() << "code =" << error << str;
emit failed(QNetworkReply::UnknownContentError);
}
AnimationDetails::AnimationDetails() :
role(), url(), fps(0.0f), priority(0.0f), loop(false), hold(false),

View file

@ -12,6 +12,7 @@
#ifndef hifi_AnimationCache_h
#define hifi_AnimationCache_h
#include <QRunnable>
#include <QScriptEngine>
#include <QScriptValue>
@ -52,7 +53,10 @@ public:
Animation(const QUrl& url);
const FBXGeometry& getGeometry() const { return _geometry; }
const FBXGeometry& getGeometry() const { return *_geometry; }
virtual bool isLoaded() const override;
Q_INVOKABLE QStringList getJointNames() const;
@ -61,16 +65,33 @@ public:
const QVector<FBXAnimationFrame>& getFramesReference() const;
protected:
Q_INVOKABLE void setGeometry(const FBXGeometry& geometry);
virtual void downloadFinished(QNetworkReply* reply);
protected slots:
void animationParseSuccess(FBXGeometry* geometry);
void animationParseError(int error, QString str);
private:
FBXGeometry _geometry;
std::unique_ptr<FBXGeometry> _geometry;
};
/// Reads geometry in a worker thread.
class AnimationReader : public QObject, public QRunnable {
Q_OBJECT
public:
AnimationReader(const QUrl& url, QNetworkReply* reply);
virtual void run();
signals:
void onSuccess(FBXGeometry* geometry);
void onError(int error, QString str);
private:
QUrl _url;
QNetworkReply* _reply;
};
class AnimationDetails {
public:

View file

@ -15,6 +15,7 @@
void AnimationHandle::setURL(const QUrl& url) {
if (_url != url) {
_animation = DependencyManager::get<AnimationCache>()->getAnimation(_url = url);
_animation->ensureLoading();
QObject::connect(_animation.data(), &Resource::onRefresh, this, &AnimationHandle::clearJoints);
_jointMappings.clear();
}

View file

@ -1,5 +1,7 @@
set(TARGET_NAME audio-client)
setup_memory_debugger()
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library(Network Multimedia)
@ -25,4 +27,4 @@ if (APPLE)
find_library(CoreAudio CoreAudio)
find_library(CoreFoundation CoreFoundation)
target_link_libraries(${TARGET_NAME} ${CoreAudio} ${CoreFoundation})
endif ()
endif ()

View file

@ -1,5 +1,7 @@
set(TARGET_NAME audio)
setup_memory_debugger()
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library(Network)

View file

@ -77,9 +77,9 @@ void AudioInjector::injectAudio() {
int byteOffset = (int) floorf(AudioConstants::SAMPLE_RATE * _options.secondOffset * (_options.stereo ? 2.0f : 1.0f));
byteOffset *= sizeof(int16_t);
_currentSendPosition = byteOffset;
_currentSendOffset = byteOffset;
} else {
_currentSendPosition = 0;
_currentSendOffset = 0;
}
if (_options.localOnly) {
@ -119,7 +119,7 @@ void AudioInjector::injectLocally() {
_localBuffer->setVolume(_options.volume);
// give our current send position to the local buffer
_localBuffer->setCurrentOffset(_currentSendPosition);
_localBuffer->setCurrentOffset(_currentSendOffset);
success = _localAudioInterface->outputLocalInjector(_options.stereo, this);
@ -144,9 +144,9 @@ void AudioInjector::injectLocally() {
const uchar MAX_INJECTOR_VOLUME = 0xFF;
void AudioInjector::injectToMixer() {
if (_currentSendPosition < 0 ||
_currentSendPosition >= _audioData.size()) {
_currentSendPosition = 0;
if (_currentSendOffset < 0 ||
_currentSendOffset >= _audioData.size()) {
_currentSendOffset = 0;
}
auto nodeList = DependencyManager::get<NodeList>();
@ -203,15 +203,15 @@ void AudioInjector::injectToMixer() {
// loop to send off our audio in NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL byte chunks
quint16 outgoingInjectedAudioSequenceNumber = 0;
while (_currentSendPosition < _audioData.size() && !_shouldStop) {
while (_currentSendOffset < _audioData.size() && !_shouldStop) {
int bytesToCopy = std::min(((_options.stereo) ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL,
_audioData.size() - _currentSendPosition);
_audioData.size() - _currentSendOffset);
// Measure the loudness of this frame
_loudness = 0.0f;
for (int i = 0; i < bytesToCopy; i += sizeof(int16_t)) {
_loudness += abs(*reinterpret_cast<int16_t*>(_audioData.data() + _currentSendPosition + i)) /
_loudness += abs(*reinterpret_cast<int16_t*>(_audioData.data() + _currentSendOffset + i)) /
(AudioConstants::MAX_SAMPLE_VALUE / 2.0f);
}
_loudness /= (float)(bytesToCopy / sizeof(int16_t));
@ -220,7 +220,7 @@ void AudioInjector::injectToMixer() {
// pack the sequence number
audioPacket->writePrimitive(outgoingInjectedAudioSequenceNumber);
audioPacket->seek(positionOptionOffset);
audioPacket->writePrimitive(_options.position);
audioPacket->writePrimitive(_options.orientation);
@ -232,7 +232,7 @@ void AudioInjector::injectToMixer() {
audioPacket->seek(audioDataOffset);
// copy the next NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL bytes to the packet
audioPacket->write(_audioData.data() + _currentSendPosition, bytesToCopy);
audioPacket->write(_audioData.data() + _currentSendOffset, bytesToCopy);
// set the correct size used for this packet
audioPacket->setPayloadSize(audioPacket->pos());
@ -246,11 +246,11 @@ void AudioInjector::injectToMixer() {
outgoingInjectedAudioSequenceNumber++;
}
_currentSendPosition += bytesToCopy;
_currentSendOffset += bytesToCopy;
// send two packets before the first sleep so the mixer can start playback right away
if (_currentSendPosition != bytesToCopy && _currentSendPosition < _audioData.size()) {
if (_currentSendOffset != bytesToCopy && _currentSendOffset < _audioData.size()) {
// process events in case we have been told to stop and be deleted
QCoreApplication::processEvents();
@ -268,8 +268,8 @@ void AudioInjector::injectToMixer() {
}
}
if (shouldLoop && _currentSendPosition >= _audioData.size()) {
_currentSendPosition = 0;
if (shouldLoop && _currentSendOffset >= _audioData.size()) {
_currentSendOffset = 0;
}
}
}

View file

@ -31,7 +31,6 @@ class AbstractAudioInterface;
class AudioInjector : public QObject {
Q_OBJECT
Q_PROPERTY(AudioInjectorOptions options WRITE setOptions READ getOptions)
public:
AudioInjector(QObject* parent);
AudioInjector(Sound* sound, const AudioInjectorOptions& injectorOptions);
@ -39,7 +38,8 @@ public:
bool isFinished() const { return _isFinished; }
int getCurrentSendPosition() const { return _currentSendPosition; }
int getCurrentSendOffset() const { return _currentSendOffset; }
void setCurrentSendOffset(int currentSendOffset) { _currentSendOffset = currentSendOffset; }
AudioInjectorLocalBuffer* getLocalBuffer() const { return _localBuffer; }
bool isLocalOnly() const { return _options.localOnly; }
@ -58,9 +58,8 @@ public slots:
void stopAndDeleteLater();
const AudioInjectorOptions& getOptions() const { return _options; }
void setOptions(const AudioInjectorOptions& options) { _options = options; }
void setOptions(const AudioInjectorOptions& options) { _options = options; }
void setCurrentSendPosition(int currentSendPosition) { _currentSendPosition = currentSendPosition; }
float getLoudness() const { return _loudness; }
bool isPlaying() const { return _isPlaying; }
void restartPortionAfterFinished();
@ -82,7 +81,7 @@ private:
bool _isStarted = false;
bool _isFinished = false;
bool _shouldDeleteAfterFinish = false;
int _currentSendPosition = 0;
int _currentSendOffset = 0;
AbstractAudioInterface* _localAudioInterface = NULL;
AudioInjectorLocalBuffer* _localBuffer = NULL;
};

View file

@ -112,7 +112,7 @@ int InboundAudioStream::parseData(NLPacket& packet) {
// parse the info after the seq number and before the audio data (the stream properties)
int prePropertyPosition = packet.pos();
int propertyBytes = parseStreamProperties(packet.getType(), packet.read(packet.bytesLeftToRead()), networkSamples);
int propertyBytes = parseStreamProperties(packet.getType(), packet.readWithoutCopy(packet.bytesLeftToRead()), networkSamples);
packet.seek(prePropertyPosition + propertyBytes);
// handle this packet based on its arrival status.
@ -131,7 +131,7 @@ int InboundAudioStream::parseData(NLPacket& packet) {
if (packet.getType() == PacketType::SilentAudioFrame) {
writeDroppableSilentSamples(networkSamples);
} else {
parseAudioData(packet.getType(), packet.read(packet.bytesLeftToRead()), networkSamples);
parseAudioData(packet.getType(), packet.readWithoutCopy(packet.bytesLeftToRead()), networkSamples);
}
break;
}

View file

@ -1,3 +1,6 @@
set(TARGET_NAME auto-updater)
setup_memory_debugger()
setup_hifi_library(Network)
link_hifi_libraries(shared networking)

View file

@ -1,5 +1,7 @@
set(TARGET_NAME avatars)
setup_memory_debugger()
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library(Network Script)

View file

@ -53,11 +53,11 @@ void AvatarHashMap::processAvatarDataPacket(QSharedPointer<NLPacket> packet, Sha
// enumerate over all of the avatars in this packet
// only add them if mixerWeakPointer points to something (meaning that mixer is still around)
while (packet->bytesLeftToRead()) {
QUuid sessionUUID = QUuid::fromRfc4122(packet->read(NUM_BYTES_RFC4122_UUID));
QUuid sessionUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
int positionBeforeRead = packet->pos();
QByteArray byteArray = packet->read(packet->bytesLeftToRead());
QByteArray byteArray = packet->readWithoutCopy(packet->bytesLeftToRead());
if (sessionUUID != _lastOwnerSessionUUID) {
AvatarSharedPointer avatar = _avatarHash.value(sessionUUID);
@ -114,7 +114,7 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<NLPacket> packet,
}
void AvatarHashMap::processAvatarBillboardPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
QUuid sessionUUID = QUuid::fromRfc4122(packet->read(NUM_BYTES_RFC4122_UUID));
QUuid sessionUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
AvatarSharedPointer avatar = _avatarHash.value(sessionUUID);
if (!avatar) {
@ -129,7 +129,7 @@ void AvatarHashMap::processAvatarBillboardPacket(QSharedPointer<NLPacket> packet
void AvatarHashMap::processKillAvatar(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
// read the node id
QUuid sessionUUID = QUuid::fromRfc4122(packet->read(NUM_BYTES_RFC4122_UUID));
QUuid sessionUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
removeAvatar(sessionUUID);
}

View file

@ -371,7 +371,7 @@ void Player::setAudioInjectorPosition() {
int MSEC_PER_SEC = 1000;
int FRAME_SIZE = sizeof(AudioConstants::AudioSample) * _recording->numberAudioChannel();
int currentAudioFrame = elapsed() * FRAME_SIZE * (AudioConstants::SAMPLE_RATE / MSEC_PER_SEC);
_injector->setCurrentSendPosition(currentAudioFrame);
_injector->setCurrentSendOffset(currentAudioFrame);
}
void Player::setPlayFromCurrentLocation(bool playFromCurrentLocation) {

View file

@ -1,5 +1,7 @@
set(TARGET_NAME display-plugins)
setup_memory_debugger()
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library(OpenGL)
@ -31,4 +33,4 @@ if (WIN32)
find_package(OpenVR REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${OPENVR_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${OPENVR_LIBRARIES})
endif()
endif()

View file

@ -9,28 +9,55 @@
#include <plugins/PluginContainer.h>
#include <QWindow>
#include <QGuiApplication>
const QString Basic2DWindowOpenGLDisplayPlugin::NAME("2D Display");
const QString MENU_PARENT = "View";
const QString MENU_NAME = "Display Options";
const QString MENU_PATH = MENU_PARENT + ">" + MENU_NAME;
const QString FULLSCREEN = "Fullscreen";
static const QString FULLSCREEN = "Fullscreen";
const QString& Basic2DWindowOpenGLDisplayPlugin::getName() const {
return NAME;
}
void Basic2DWindowOpenGLDisplayPlugin::activate() {
// container->addMenu(MENU_PATH);
// container->addMenuItem(MENU_PATH, FULLSCREEN,
// [this] (bool clicked) { this->setFullscreen(clicked); },
// true, false);
MainWindowOpenGLDisplayPlugin::activate();
CONTAINER->addMenu(MENU_PATH);
CONTAINER->addMenuItem(MENU_PATH, FULLSCREEN,
[this](bool clicked) {
if (clicked) {
CONTAINER->setFullscreen(getFullscreenTarget());
} else {
CONTAINER->unsetFullscreen();
}
}, true, false);
WindowOpenGLDisplayPlugin::activate();
}
void Basic2DWindowOpenGLDisplayPlugin::deactivate() {
// container->removeMenuItem(MENU_NAME, FULLSCREEN);
// container->removeMenu(MENU_PATH);
MainWindowOpenGLDisplayPlugin::deactivate();
WindowOpenGLDisplayPlugin::deactivate();
}
int Basic2DWindowOpenGLDisplayPlugin::getDesiredInterval(bool isThrottled) const {
static const int THROTTLED_PAINT_TIMER_DELAY = MSECS_PER_SECOND / 15;
static const int PAINT_TIMER_DELAY_MS = 1;
return isThrottled ? THROTTLED_PAINT_TIMER_DELAY : PAINT_TIMER_DELAY_MS;
}
bool Basic2DWindowOpenGLDisplayPlugin::isThrottled() const {
static const QString ThrottleFPSIfNotFocus = "Throttle FPS If Not Focus"; // FIXME - this value duplicated in Menu.h
bool shouldThrottle = (!CONTAINER->isForeground() && CONTAINER->isOptionChecked(ThrottleFPSIfNotFocus));
if (_isThrottled != shouldThrottle) {
int desiredInterval = getDesiredInterval(shouldThrottle);
_timer.start(desiredInterval);
_isThrottled = shouldThrottle;
}
return shouldThrottle;
}
// FIXME target the screen the window is currently on
QScreen* Basic2DWindowOpenGLDisplayPlugin::getFullscreenTarget() {
return qApp->primaryScreen();
}

View file

@ -7,9 +7,10 @@
//
#pragma once
#include "MainWindowOpenGLDisplayPlugin.h"
#include "WindowOpenGLDisplayPlugin.h"
class Basic2DWindowOpenGLDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
class QScreen;
class Basic2DWindowOpenGLDisplayPlugin : public WindowOpenGLDisplayPlugin {
Q_OBJECT
public:
@ -18,6 +19,14 @@ public:
virtual const QString & getName() const override;
virtual bool isThrottled() const override;
protected:
int getDesiredInterval(bool isThrottled) const;
mutable bool _isThrottled = false;
private:
static const QString NAME;
QScreen* getFullscreenTarget();
int _fullscreenTarget{ -1 };
};

View file

@ -15,8 +15,10 @@
#include "Basic2DWindowOpenGLDisplayPlugin.h"
#include "openvr/OpenVrDisplayPlugin.h"
#include "oculus/Oculus_0_5_DisplayPlugin.h"
#include "oculus/Oculus_0_6_DisplayPlugin.h"
#include "oculus/OculusDisplayPlugin.h"
#include "oculus/OculusLegacyDisplayPlugin.h"
const QString DisplayPlugin::MENU_PATH{ "Display" };
// TODO migrate to a DLL model where plugins are discovered and loaded at runtime by the PluginManager class
DisplayPluginList getDisplayPlugins() {
@ -27,14 +29,20 @@ DisplayPluginList getDisplayPlugins() {
#endif
// Stereo modes
// FIXME fix stereo display plugins
//new SideBySideStereoDisplayPlugin(),
//new InterleavedStereoDisplayPlugin(),
// SBS left/right
new SideBySideStereoDisplayPlugin(),
// Interleaved left/right
new InterleavedStereoDisplayPlugin(),
// HMDs
new Oculus_0_5_DisplayPlugin(),
new Oculus_0_6_DisplayPlugin(),
// Windows Oculus SDK
new OculusDisplayPlugin(),
// Mac/Linux Oculus SDK (0.5)
new OculusLegacyDisplayPlugin(),
#ifdef Q_OS_WIN
// SteamVR SDK
new OpenVrDisplayPlugin(),
#endif
nullptr

View file

@ -97,12 +97,12 @@ public:
return baseProjection;
}
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const {
return glm::inverse(getEyePose(eye)) * baseModelview;
virtual glm::mat4 getView(Eye eye, const glm::mat4& baseView) const {
return glm::inverse(getEyePose(eye)) * baseView;
}
// HMD specific methods
// TODO move these into another class
// TODO move these into another class?
virtual glm::mat4 getEyePose(Eye eye) const {
static const glm::mat4 pose; return pose;
}
@ -115,12 +115,7 @@ public:
virtual void resetSensors() {}
virtual float devicePixelRatio() { return 1.0; }
//// The window for the surface, used for event interception. May be null.
//virtual QWindow* getWindow() const = 0;
//virtual void installEventFilter(QObject* filter) {}
//virtual void removeEventFilter(QObject* filter) {}
static const QString MENU_PATH;
signals:
void recommendedFramebufferSizeChanged(const QSize & size);
void requestRender();

View file

@ -1,13 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "WindowOpenGLDisplayPlugin.h"
class MainWindowOpenGLDisplayPlugin : public WindowOpenGLDisplayPlugin {
};

View file

@ -105,7 +105,7 @@ bool OpenGLDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
void OpenGLDisplayPlugin::display(
GLuint finalTexture, const glm::uvec2& sceneSize) {
using namespace oglplus;
uvec2 size = getRecommendedRenderSize();
uvec2 size = getSurfaceSize();
Context::Viewport(size.x, size.y);
glBindTexture(GL_TEXTURE_2D, finalTexture);
drawUnitQuad();

View file

@ -33,12 +33,13 @@ public:
protected:
virtual void customizeContext();
virtual void drawUnitQuad();
virtual glm::uvec2 getSurfaceSize() const = 0;
virtual void makeCurrent() = 0;
virtual void doneCurrent() = 0;
virtual void swapBuffers() = 0;
QTimer _timer;
ProgramPtr _program;
mutable QTimer _timer;
ProgramPtr _program;
ShapeWrapperPtr _plane;
};

View file

@ -16,6 +16,10 @@ WindowOpenGLDisplayPlugin::WindowOpenGLDisplayPlugin() {
}
glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedRenderSize() const {
return getSurfaceSize();
}
glm::uvec2 WindowOpenGLDisplayPlugin::getSurfaceSize() const {
uvec2 result;
if (_window) {
result = toGlm(_window->geometry().size() * _window->devicePixelRatio());
@ -23,6 +27,7 @@ glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedRenderSize() const {
return result;
}
glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedUiSize() const {
uvec2 result;
if (_window) {

View file

@ -21,6 +21,7 @@ public:
virtual void deactivate() override;
protected:
virtual glm::uvec2 getSurfaceSize() const override final;
virtual void makeCurrent() override;
virtual void doneCurrent() override;
virtual void swapBuffers() override;

View file

@ -1,76 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OculusBaseDisplayPlugin.h"
#include <ViewFrustum.h>
#include "OculusHelpers.h"
using namespace Oculus;
void OculusBaseDisplayPlugin::activate() {
glm::uvec2 eyeSizes[2];
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
_desiredFramebufferSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
_frameIndex = 0;
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
qFatal("Could not attach to sensor device");
}
MainWindowOpenGLDisplayPlugin::activate();
}
uvec2 OculusBaseDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
void OculusBaseDisplayPlugin::preRender() {
ovrHmd_GetEyePoses(_hmd, _frameIndex, _eyeOffsets, _eyePoses, nullptr);
}
glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
glm::mat4 OculusBaseDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
return baseModelview * toGlm(_eyePoses[eye]);
}
void OculusBaseDisplayPlugin::resetSensors() {
ovrHmd_RecenterPose(_hmd);
}
glm::mat4 OculusBaseDisplayPlugin::getEyePose(Eye eye) const {
return toGlm(_eyePoses[eye]);
}
// Should NOT be used for rendering as this will mess up timewarp. Use the getModelview() method above for
// any use of head poses for rendering, ensuring you use the correct eye
glm::mat4 OculusBaseDisplayPlugin::getHeadPose() const {
ovrTrackingState state = ovrHmd_GetTrackingState(_hmd, 0.0f);
return toGlm(state.HeadPose.ThePose);
}

View file

@ -1,26 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "../MainWindowOpenGLDisplayPlugin.h"
class OculusBaseDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
public:
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
virtual void activate() override;
virtual void preRender() override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
};

View file

@ -5,7 +5,7 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Oculus_0_6_DisplayPlugin.h"
#include "OculusDisplayPlugin.h"
#include <memory>
@ -15,9 +15,7 @@
#include <GlWindow.h>
#include <QEvent>
#include <QResizeEvent>
#include <OVR_CAPI_GL.h>
#include <QThread>
#include <OglplusHelpers.h>
#include <oglplus/opt/list_init.hpp>
@ -27,18 +25,34 @@
#include <PerfStat.h>
#include <plugins/PluginContainer.h>
#include <ViewFrustum.h>
#include "OculusHelpers.h"
using namespace Oculus;
#if (OVR_MAJOR_VERSION == 6)
SwapFboPtr _sceneFbo;
MirrorFboPtr _mirrorFbo;
ovrLayerEyeFov _sceneLayer;
#if (OVR_MAJOR_VERSION == 6)
#define ovr_Create ovrHmd_Create
#define ovr_CreateSwapTextureSetGL ovrHmd_CreateSwapTextureSetGL
#define ovr_CreateMirrorTextureGL ovrHmd_CreateMirrorTextureGL
#define ovr_Destroy ovrHmd_Destroy
#define ovr_DestroySwapTextureSet ovrHmd_DestroySwapTextureSet
#define ovr_DestroyMirrorTexture ovrHmd_DestroyMirrorTexture
#define ovr_GetFloat ovrHmd_GetFloat
#define ovr_GetFovTextureSize ovrHmd_GetFovTextureSize
#define ovr_GetFrameTiming ovrHmd_GetFrameTiming
#define ovr_GetTrackingState ovrHmd_GetTrackingState
#define ovr_GetRenderDesc ovrHmd_GetRenderDesc
#define ovr_RecenterPose ovrHmd_RecenterPose
#define ovr_SubmitFrame ovrHmd_SubmitFrame
#define ovr_ConfigureTracking ovrHmd_ConfigureTracking
#define ovr_GetHmdDesc(X) *X
#endif
#if (OVR_MAJOR_VERSION >= 6)
// A base class for FBO wrappers that need to use the Oculus C
// API to manage textures via ovrHmd_CreateSwapTextureSetGL,
// ovrHmd_CreateMirrorTextureGL, etc
// API to manage textures via ovr_CreateSwapTextureSetGL,
// ovr_CreateMirrorTextureGL, etc
template <typename C>
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
ovrHmd hmd;
@ -73,7 +87,7 @@ struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*
~SwapFramebufferWrapper() {
if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color);
ovr_DestroySwapTextureSet(hmd, color);
color = nullptr;
}
}
@ -86,11 +100,11 @@ struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*
protected:
virtual void initColor() override {
if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color);
ovr_DestroySwapTextureSet(hmd, color);
color = nullptr;
}
if (!OVR_SUCCESS(ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color))) {
if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color))) {
qFatal("Unable to create swap textures");
}
@ -127,7 +141,7 @@ struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
virtual ~MirrorFramebufferWrapper() {
if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
ovr_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr;
}
}
@ -135,10 +149,10 @@ struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
private:
void initColor() override {
if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
ovr_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr;
}
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
ovrResult result = ovr_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
Q_ASSERT(OVR_SUCCESS(result));
}
@ -149,17 +163,47 @@ private:
}
};
#endif
const QString OculusDisplayPlugin::NAME("Oculus Rift");
const QString Oculus_0_6_DisplayPlugin::NAME("Oculus Rift");
uvec2 OculusDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
const QString & Oculus_0_6_DisplayPlugin::getName() const {
void OculusDisplayPlugin::preRender() {
#if (OVR_MAJOR_VERSION >= 6)
ovrFrameTiming ftiming = ovr_GetFrameTiming(_hmd, _frameIndex);
_trackingState = ovr_GetTrackingState(_hmd, ftiming.DisplayMidpointSeconds);
ovr_CalcEyePoses(_trackingState.HeadPose.ThePose, _eyeOffsets, _eyePoses);
#endif
}
glm::mat4 OculusDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
void OculusDisplayPlugin::resetSensors() {
#if (OVR_MAJOR_VERSION >= 6)
ovr_RecenterPose(_hmd);
#endif
}
glm::mat4 OculusDisplayPlugin::getEyePose(Eye eye) const {
return toGlm(_eyePoses[eye]);
}
glm::mat4 OculusDisplayPlugin::getHeadPose() const {
return toGlm(_trackingState.HeadPose.ThePose);
}
const QString & OculusDisplayPlugin::getName() const {
return NAME;
}
bool Oculus_0_6_DisplayPlugin::isSupported() const {
#if (OVR_MAJOR_VERSION == 6)
bool OculusDisplayPlugin::isSupported() const {
#if (OVR_MAJOR_VERSION >= 6)
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
return false;
}
@ -174,27 +218,81 @@ bool Oculus_0_6_DisplayPlugin::isSupported() const {
#endif
}
void OculusDisplayPlugin::init() {
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
qFatal("Could not init OVR");
}
}
#if (OVR_MAJOR_VERSION == 6)
ovrLayerEyeFov& getSceneLayer() {
void OculusDisplayPlugin::deinit() {
ovr_Shutdown();
}
#if (OVR_MAJOR_VERSION >= 6)
ovrLayerEyeFov& OculusDisplayPlugin::getSceneLayer() {
return _sceneLayer;
}
#endif
//static gpu::TexturePointer _texture;
void Oculus_0_6_DisplayPlugin::activate() {
#if (OVR_MAJOR_VERSION == 6)
void OculusDisplayPlugin::activate() {
#if (OVR_MAJOR_VERSION >= 6)
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
Q_ASSERT(false);
qFatal("Failed to Initialize SDK");
}
if (!OVR_SUCCESS(ovrHmd_Create(0, &_hmd))) {
// CONTAINER->getPrimarySurface()->makeCurrent();
#if (OVR_MAJOR_VERSION == 6)
if (!OVR_SUCCESS(ovr_Create(0, &_hmd))) {
#elif (OVR_MAJOR_VERSION == 7)
if (!OVR_SUCCESS(ovr_Create(&_hmd, &_luid))) {
#endif
Q_ASSERT(false);
qFatal("Failed to acquire HMD");
}
OculusBaseDisplayPlugin::activate();
_hmdDesc = ovr_GetHmdDesc(_hmd);
_ipd = ovr_GetFloat(_hmd, OVR_KEY_IPD, _ipd);
glm::uvec2 eyeSizes[2];
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
ovrFovPort combined = _eyeFovs[Left];
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
_desiredFramebufferSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
_frameIndex = 0;
if (!OVR_SUCCESS(ovr_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
qFatal("Could not attach to sensor device");
}
WindowOpenGLDisplayPlugin::activate();
// Parent class relies on our _hmd intialization, so it must come after that.
ovrLayerEyeFov& sceneLayer = getSceneLayer();
@ -203,7 +301,7 @@ void Oculus_0_6_DisplayPlugin::activate() {
sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
ovr_for_each_eye([&](ovrEyeType eye) {
ovrFovPort & fov = sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov;
ovrSizei & size = sceneLayer.Viewport[eye].Size = ovrHmd_GetFovTextureSize(_hmd, eye, fov, 1.0f);
ovrSizei & size = sceneLayer.Viewport[eye].Size = ovr_GetFovTextureSize(_hmd, eye, fov, 1.0f);
sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 };
});
// We're rendering both eyes to the same texture, so only one of the
@ -214,17 +312,16 @@ void Oculus_0_6_DisplayPlugin::activate() {
PerformanceTimer::setActive(true);
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
if (!OVR_SUCCESS(ovr_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
qFatal("Could not attach to sensor device");
}
#endif
}
void Oculus_0_6_DisplayPlugin::customizeContext() {
#if (OVR_MAJOR_VERSION == 6)
OculusBaseDisplayPlugin::customizeContext();
void OculusDisplayPlugin::customizeContext() {
WindowOpenGLDisplayPlugin::customizeContext();
#if (OVR_MAJOR_VERSION >= 6)
//_texture = DependencyManager::get<TextureCache>()->
// getImageTexture(PathUtils::resourcesPath() + "/images/cube_texture.png");
uvec2 mirrorSize = toGlm(_window->geometry().size());
@ -236,24 +333,29 @@ void Oculus_0_6_DisplayPlugin::customizeContext() {
#endif
}
void Oculus_0_6_DisplayPlugin::deactivate() {
#if (OVR_MAJOR_VERSION == 6)
void OculusDisplayPlugin::deactivate() {
#if (OVR_MAJOR_VERSION >= 6)
makeCurrent();
_sceneFbo.reset();
_mirrorFbo.reset();
doneCurrent();
PerformanceTimer::setActive(false);
OculusBaseDisplayPlugin::deactivate();
WindowOpenGLDisplayPlugin::deactivate();
ovrHmd_Destroy(_hmd);
ovr_Destroy(_hmd);
_hmd = nullptr;
ovr_Shutdown();
#endif
}
void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
#if (OVR_MAJOR_VERSION == 6)
void OculusDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
static bool inDisplay = false;
if (inDisplay) {
return;
}
inDisplay = true;
#if (OVR_MAJOR_VERSION >= 6)
using namespace oglplus;
// Need to make sure only the display plugin is responsible for
// controlling vsync
@ -263,6 +365,7 @@ void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sc
auto size = _sceneFbo->size;
Context::Viewport(size.x, size.y);
glBindTexture(GL_TEXTURE_2D, finalTexture);
GLenum err = glGetError();
drawUnitQuad();
});
@ -280,17 +383,25 @@ void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sc
the UI visible in the output window (unlikely). This should be done before
_sceneFbo->Increment or we're be using the wrong texture
*/
//_sceneFbo->Bound(GL_READ_FRAMEBUFFER, [&] {
// glBlitFramebuffer(
// 0, 0, _sceneFbo->size.x, _sceneFbo->size.y,
// 0, 0, windowSize.x, _mirrorFbo.y,
// GL_COLOR_BUFFER_BIT, GL_NEAREST);
//});
_sceneFbo->Bound(Framebuffer::Target::Read, [&] {
glBlitFramebuffer(
0, 0, _sceneFbo->size.x, _sceneFbo->size.y,
0, 0, windowSize.x, windowSize.y,
GL_COLOR_BUFFER_BIT, GL_NEAREST);
});
{
PerformanceTimer("OculusSubmit");
ovrViewScaleDesc viewScaleDesc;
viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
viewScaleDesc.HmdToEyeViewOffset[0] = _eyeOffsets[0];
viewScaleDesc.HmdToEyeViewOffset[1] = _eyeOffsets[1];
ovrLayerHeader* layers = &sceneLayer.Header;
ovrResult result = ovrHmd_SubmitFrame(_hmd, _frameIndex, nullptr, &layers, 1);
ovrResult result = ovr_SubmitFrame(_hmd, 0, &viewScaleDesc, &layers, 1);
if (!OVR_SUCCESS(result)) {
qDebug() << result;
}
}
_sceneFbo->Increment();
@ -299,21 +410,22 @@ void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sc
will contain the post-distorted and fully composited scene regardless of how many layers
we send.
*/
auto mirrorSize = _mirrorFbo->size;
_mirrorFbo->Bound(Framebuffer::Target::Read, [&] {
Context::BlitFramebuffer(
0, mirrorSize.y, mirrorSize.x, 0,
0, 0, windowSize.x, windowSize.y,
BufferSelectBit::ColorBuffer, BlitFilter::Nearest);
});
//auto mirrorSize = _mirrorFbo->size;
//_mirrorFbo->Bound(Framebuffer::Target::Read, [&] {
// Context::BlitFramebuffer(
// 0, mirrorSize.y, mirrorSize.x, 0,
// 0, 0, windowSize.x, windowSize.y,
// BufferSelectBit::ColorBuffer, BlitFilter::Nearest);
//});
++_frameIndex;
#endif
inDisplay = false;
}
// Pass input events on to the application
bool Oculus_0_6_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
#if (OVR_MAJOR_VERSION == 6)
bool OculusDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
#if (OVR_MAJOR_VERSION >= 6)
if (event->type() == QEvent::Resize) {
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
qDebug() << resizeEvent->size().width() << " x " << resizeEvent->size().height();
@ -323,7 +435,7 @@ bool Oculus_0_6_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
doneCurrent();
}
#endif
return OculusBaseDisplayPlugin::eventFilter(receiver, event);
return WindowOpenGLDisplayPlugin::eventFilter(receiver, event);
}
/*
@ -331,8 +443,8 @@ bool Oculus_0_6_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
However, it should only be done if we can reliably disable v-sync on the mirror surface,
otherwise the swapbuffer delay will interefere with the framerate of the headset
*/
void Oculus_0_6_DisplayPlugin::finishFrame() {
swapBuffers();
void OculusDisplayPlugin::finishFrame() {
//swapBuffers();
doneCurrent();
};

View file

@ -0,0 +1,78 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "../WindowOpenGLDisplayPlugin.h"
#include <QTimer>
#include <OVR_CAPI.h>
class OffscreenGlCanvas;
struct SwapFramebufferWrapper;
struct MirrorFramebufferWrapper;
using SwapFboPtr = QSharedPointer<SwapFramebufferWrapper>;
using MirrorFboPtr = QSharedPointer<MirrorFramebufferWrapper>;
class OculusDisplayPlugin : public WindowOpenGLDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString & getName() const override;
virtual void init() override;
virtual void deinit() override;
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
protected:
virtual void preRender() override;
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
virtual void customizeContext() override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
static const QString NAME;
ovrHmd _hmd;
float _ipd{ OVR_DEFAULT_IPD };
unsigned int _frameIndex;
ovrEyeRenderDesc _eyeRenderDescs[2];
ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[3];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
ovrTrackingState _trackingState;
#if (OVR_MAJOR_VERSION >= 6)
ovrLayerEyeFov& getSceneLayer();
ovrHmdDesc _hmdDesc;
SwapFboPtr _sceneFbo;
MirrorFboPtr _mirrorFbo;
ovrLayerEyeFov _sceneLayer;
#endif
#if (OVR_MAJOR_VERSION == 7)
ovrGraphicsLuid _luid;
#endif
};

View file

@ -7,7 +7,7 @@
//
#pragma once
#include <OVR_CAPI.h>
#include <OVR_CAPI_GL.h>
#include <GLMHelpers.h>
#include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp>
@ -79,14 +79,3 @@ inline ovrQuatf ovrFromGlm(const glm::quat & q) {
return{ q.x, q.y, q.z, q.w };
}
namespace Oculus {
extern ovrHmd _hmd;
extern unsigned int _frameIndex;
extern ovrEyeRenderDesc _eyeRenderDescs[2];
extern ovrPosef _eyePoses[2];
extern ovrVector3f _eyeOffsets[2];
extern ovrFovPort _eyeFovs[2];
extern mat4 _eyeProjections[2];
extern mat4 _compositeEyeProjections[2];
extern uvec2 _desiredFramebufferSize;
}

View file

@ -5,7 +5,7 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Oculus_0_5_DisplayPlugin.h"
#include "OculusLegacyDisplayPlugin.h"
#include <memory>
@ -19,34 +19,66 @@
#include <QGuiApplication>
#include <QScreen>
#include <OVR_CAPI_GL.h>
#include <PerfStat.h>
#include <OglplusHelpers.h>
#include <ViewFrustum.h>
#include "plugins/PluginContainer.h"
#include "OculusHelpers.h"
using namespace Oculus;
ovrTexture _eyeTextures[2];
int _hmdScreen{ -1 };
bool _hswDismissed{ false };
DisplayPlugin* makeOculusDisplayPlugin() {
return new Oculus_0_5_DisplayPlugin();
}
using namespace oglplus;
const QString Oculus_0_5_DisplayPlugin::NAME("Oculus Rift (0.5)");
const QString OculusLegacyDisplayPlugin::NAME("Oculus Rift (0.5)");
const QString & Oculus_0_5_DisplayPlugin::getName() const {
const QString & OculusLegacyDisplayPlugin::getName() const {
return NAME;
}
OculusLegacyDisplayPlugin::OculusLegacyDisplayPlugin() : _ipd(OVR_DEFAULT_IPD) {
}
bool Oculus_0_5_DisplayPlugin::isSupported() const {
uvec2 OculusLegacyDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
void OculusLegacyDisplayPlugin::preRender() {
#if (OVR_MAJOR_VERSION == 5)
ovrHmd_GetEyePoses(_hmd, _frameIndex, _eyeOffsets, _eyePoses, &_trackingState);
ovrHmd_BeginFrame(_hmd, _frameIndex);
#endif
WindowOpenGLDisplayPlugin::preRender();
}
glm::mat4 OculusLegacyDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
void OculusLegacyDisplayPlugin::resetSensors() {
#if (OVR_MAJOR_VERSION == 5)
ovrHmd_RecenterPose(_hmd);
#endif
}
glm::mat4 OculusLegacyDisplayPlugin::getEyePose(Eye eye) const {
#if (OVR_MAJOR_VERSION == 5)
return toGlm(_eyePoses[eye]);
#else
return WindowOpenGLDisplayPlugin::getEyePose(eye);
#endif
}
// Should NOT be used for rendering as this will mess up timewarp. Use the getModelview() method above for
// any use of head poses for rendering, ensuring you use the correct eye
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose() const {
#if (OVR_MAJOR_VERSION == 5)
return toGlm(_trackingState.HeadPose.ThePose);
#else
return WindowOpenGLDisplayPlugin::getHeadPose();
#endif
}
bool OculusLegacyDisplayPlugin::isSupported() const {
#if (OVR_MAJOR_VERSION == 5)
if (!ovr_Initialize(nullptr)) {
return false;
@ -77,7 +109,7 @@ bool Oculus_0_5_DisplayPlugin::isSupported() const {
#endif
}
void Oculus_0_5_DisplayPlugin::activate() {
void OculusLegacyDisplayPlugin::activate() {
#if (OVR_MAJOR_VERSION == 5)
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
Q_ASSERT(false);
@ -89,7 +121,41 @@ void Oculus_0_5_DisplayPlugin::activate() {
qFatal("Failed to acquire HMD");
}
OculusBaseDisplayPlugin::activate();
glm::uvec2 eyeSizes[2];
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
ovrEyeRenderDesc erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
ovrFovPort combined = _eyeFovs[Left];
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
_desiredFramebufferSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
_frameIndex = 0;
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
qFatal("Could not attach to sensor device");
}
WindowOpenGLDisplayPlugin::activate();
int screen = getHmdScreen();
if (screen != -1) {
CONTAINER->setFullscreen(qApp->screens()[screen]);
@ -118,17 +184,16 @@ void Oculus_0_5_DisplayPlugin::activate() {
}
});
ovrEyeRenderDesc _eyeRenderDescs[ovrEye_Count];
ovrBool result = ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs);
Q_ASSERT(result);
#endif
}
void Oculus_0_5_DisplayPlugin::deactivate() {
void OculusLegacyDisplayPlugin::deactivate() {
#if (OVR_MAJOR_VERSION == 5)
_window->removeEventFilter(this);
OculusBaseDisplayPlugin::deactivate();
WindowOpenGLDisplayPlugin::deactivate();
QScreen* riftScreen = nullptr;
if (_hmdScreen >= 0) {
@ -142,18 +207,11 @@ void Oculus_0_5_DisplayPlugin::deactivate() {
#endif
}
void Oculus_0_5_DisplayPlugin::preRender() {
#if (OVR_MAJOR_VERSION == 5)
OculusBaseDisplayPlugin::preRender();
ovrHmd_BeginFrame(_hmd, _frameIndex);
#endif
}
void Oculus_0_5_DisplayPlugin::preDisplay() {
void OculusLegacyDisplayPlugin::preDisplay() {
_window->makeCurrent();
}
void Oculus_0_5_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
void OculusLegacyDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
++_frameIndex;
#if (OVR_MAJOR_VERSION == 5)
ovr_for_each_eye([&](ovrEyeType eye) {
@ -164,7 +222,7 @@ void Oculus_0_5_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sc
}
// Pass input events on to the application
bool Oculus_0_5_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
bool OculusLegacyDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
#if (OVR_MAJOR_VERSION == 5)
if (!_hswDismissed && (event->type() == QEvent::KeyPress)) {
static ovrHSWDisplayState hswState;
@ -176,17 +234,19 @@ bool Oculus_0_5_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
}
}
#endif
return OculusBaseDisplayPlugin::eventFilter(receiver, event);
return WindowOpenGLDisplayPlugin::eventFilter(receiver, event);
}
// FIXME mirroring tot he main window is diffucult on OSX because it requires that we
// trigger a swap, which causes the client to wait for the v-sync of the main screen running
// at 60 Hz. This would introduce judder. Perhaps we can push mirroring to a separate
// thread
void Oculus_0_5_DisplayPlugin::finishFrame() {
// FIXME If we move to the 'batch rendering on a different thread' we can possibly do this.
// however, we need to make sure it doesn't block the event handling.
void OculusLegacyDisplayPlugin::finishFrame() {
_window->doneCurrent();
};
int Oculus_0_5_DisplayPlugin::getHmdScreen() const {
int OculusLegacyDisplayPlugin::getHmdScreen() const {
return _hmdScreen;
}

View file

@ -0,0 +1,63 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "../WindowOpenGLDisplayPlugin.h"
#include <QTimer>
#include <OVR_CAPI.h>
class OculusLegacyDisplayPlugin : public WindowOpenGLDisplayPlugin {
public:
OculusLegacyDisplayPlugin();
virtual bool isSupported() const override;
virtual const QString & getName() const override;
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
virtual int getHmdScreen() const override;
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
protected:
virtual void preRender() override;
virtual void preDisplay() override;
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
static const QString NAME;
float _ipd{ OVR_DEFAULT_IPD };
ovrHmd _hmd;
unsigned int _frameIndex;
ovrTrackingState _trackingState;
ovrEyeRenderDesc _eyeRenderDescs[2];
ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[3];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
ovrTexture _eyeTextures[2];
mutable int _hmdScreen{ -1 };
bool _hswDismissed{ false };
};

View file

@ -1,37 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "OculusBaseDisplayPlugin.h"
#include <QTimer>
class Oculus_0_5_DisplayPlugin : public OculusBaseDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString & getName() const override;
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
virtual int getHmdScreen() const override;
protected:
virtual void preRender() override;
virtual void preDisplay() override;
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
static const QString NAME;
};

View file

@ -1,41 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "OculusBaseDisplayPlugin.h"
#include <QTimer>
class OffscreenGlCanvas;
struct SwapFramebufferWrapper;
struct MirrorFramebufferWrapper;
using SwapFboPtr = QSharedPointer<SwapFramebufferWrapper>;
using MirrorFboPtr = QSharedPointer<MirrorFramebufferWrapper>;
class Oculus_0_6_DisplayPlugin : public OculusBaseDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString & getName() const override;
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
protected:
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
virtual void customizeContext() override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
static const QString NAME;
};

View file

@ -128,7 +128,7 @@ void OpenVrDisplayPlugin::activate() {
delete[] buffer;
}
Q_ASSERT(unSize <= 1);
MainWindowOpenGLDisplayPlugin::activate();
WindowOpenGLDisplayPlugin::activate();
}
void OpenVrDisplayPlugin::deactivate() {
@ -141,7 +141,7 @@ void OpenVrDisplayPlugin::deactivate() {
_hmd = nullptr;
}
_compositor = nullptr;
MainWindowOpenGLDisplayPlugin::deactivate();
WindowOpenGLDisplayPlugin::deactivate();
}
uvec2 OpenVrDisplayPlugin::getRecommendedRenderSize() const {
@ -149,19 +149,19 @@ uvec2 OpenVrDisplayPlugin::getRecommendedRenderSize() const {
}
mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) const {
// FIXME hack to ensure that we don't crash trying to get the combined matrix
if (eye == Mono) {
eye = Left;
}
return _eyesData[eye]._projectionMatrix;
}
glm::mat4 OpenVrDisplayPlugin::getModelview(Eye eye, const mat4& baseModelview) const {
return baseModelview * getEyePose(eye);
}
void OpenVrDisplayPlugin::resetSensors() {
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0]));
}
glm::mat4 OpenVrDisplayPlugin::getEyePose(Eye eye) const {
return getHeadPose() * _eyesData[eye]._eyeOffset;
return _eyesData[eye]._eyeOffset * getHeadPose();
}
glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
@ -169,7 +169,7 @@ glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
}
void OpenVrDisplayPlugin::customizeContext() {
MainWindowOpenGLDisplayPlugin::customizeContext();
WindowOpenGLDisplayPlugin::customizeContext();
}
void OpenVrDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {

View file

@ -11,9 +11,9 @@
#if defined(Q_OS_WIN)
#include "../MainWindowOpenGLDisplayPlugin.h"
#include "../WindowOpenGLDisplayPlugin.h"
class OpenVrDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
class OpenVrDisplayPlugin : public WindowOpenGLDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString & getName() const override;
@ -27,7 +27,6 @@ public:
// Stereo specific methods
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override;

View file

@ -17,6 +17,42 @@
#include <gpu/GLBackend.h>
static const char * INTERLEAVED_TEXTURED_VS = R"VS(#version 410 core
#pragma line __LINE__
in vec3 Position;
in vec2 TexCoord;
out vec2 vTexCoord;
void main() {
gl_Position = vec4(Position, 1);
vTexCoord = TexCoord;
}
)VS";
static const char * INTERLEAVED_TEXTURED_FS = R"FS(#version 410 core
#pragma line __LINE__
uniform sampler2D sampler;
uniform ivec2 textureSize;
in vec2 vTexCoord;
out vec4 FragColor;
void main() {
ivec2 texCoord = ivec2(floor(vTexCoord * textureSize));
texCoord.x /= 2;
int row = int(floor(gl_FragCoord.y));
if (row % 2 > 0) {
texCoord.x += (textureSize.x / 2);
}
FragColor = texelFetch(sampler, texCoord, 0); //texture(sampler, texCoord);
}
)FS";
const QString InterleavedStereoDisplayPlugin::NAME("Interleaved Stereo Display");
const QString & InterleavedStereoDisplayPlugin::getName() const {
@ -29,5 +65,20 @@ InterleavedStereoDisplayPlugin::InterleavedStereoDisplayPlugin() {
void InterleavedStereoDisplayPlugin::customizeContext() {
StereoDisplayPlugin::customizeContext();
// Set up the stencil buffers? Or use a custom shader?
compileProgram(_program, INTERLEAVED_TEXTURED_VS, INTERLEAVED_TEXTURED_FS);
}
glm::uvec2 InterleavedStereoDisplayPlugin::getRecommendedRenderSize() const {
uvec2 result = WindowOpenGLDisplayPlugin::getRecommendedRenderSize();
result.x *= 2;
result.y /= 2;
return result;
}
void InterleavedStereoDisplayPlugin::display(
GLuint finalTexture, const glm::uvec2& sceneSize) {
using namespace oglplus;
_program->Bind();
Uniform<ivec2>(*_program, "textureSize").SetValue(sceneSize);
WindowOpenGLDisplayPlugin::display(finalTexture, sceneSize);
}

View file

@ -18,6 +18,9 @@ public:
// initialize OpenGL context settings needed by the plugin
virtual void customizeContext() override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
private:
static const QString NAME;
};

View file

@ -10,14 +10,16 @@
#include <QApplication>
#include <QDesktopWidget>
#include <QScreen>
#include <GlWindow.h>
#include <ViewFrustum.h>
#include <MatrixStack.h>
#include <gpu/GLBackend.h>
#include <plugins/PluginContainer.h>
const QString SideBySideStereoDisplayPlugin::NAME("SBS Stereo Display");
const QString SideBySideStereoDisplayPlugin::NAME("3D TV - Side by Side Stereo");
const QString & SideBySideStereoDisplayPlugin::getName() const {
return NAME;
@ -26,3 +28,10 @@ const QString & SideBySideStereoDisplayPlugin::getName() const {
SideBySideStereoDisplayPlugin::SideBySideStereoDisplayPlugin() {
}
glm::uvec2 SideBySideStereoDisplayPlugin::getRecommendedRenderSize() const {
uvec2 result = WindowOpenGLDisplayPlugin::getRecommendedRenderSize();
result.x *= 2;
return result;
}

View file

@ -9,11 +9,14 @@
#include "StereoDisplayPlugin.h"
class QScreen;
class SideBySideStereoDisplayPlugin : public StereoDisplayPlugin {
Q_OBJECT
public:
SideBySideStereoDisplayPlugin();
virtual const QString & getName() const override;
virtual const QString& getName() const override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
private:
static const QString NAME;
};

View file

@ -10,11 +10,14 @@
#include <QApplication>
#include <QDesktopWidget>
#include <QAction>
#include <gpu/GLBackend.h>
#include <ViewFrustum.h>
#include <MatrixStack.h>
#include <plugins/PluginContainer.h>
#include <QGuiApplication>
#include <QScreen>
StereoDisplayPlugin::StereoDisplayPlugin() {
}
@ -32,6 +35,11 @@ glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProje
// Refer to http://www.nvidia.com/content/gtc-2010/pdfs/2010_gtc2010.pdf on creating
// stereo projection matrices. Do NOT use "toe-in", use translation.
if (eye == Mono) {
// FIXME provide a combined matrix, needed for proper culling
return baseProjection;
}
float nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
float screenZ = 0.25f; // screen projection plane
// FIXME verify this is the right calculation
@ -42,16 +50,44 @@ glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProje
return glm::translate(baseProjection, vec3(frustumshift, 0, 0));
}
glm::mat4 StereoDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
glm::mat4 StereoDisplayPlugin::getEyePose(Eye eye) const {
float modelviewShift = HALF_DEFAULT_IPD;
if (eye == Left) {
modelviewShift = -modelviewShift;
}
return baseModelview * glm::translate(mat4(), vec3(modelviewShift, 0, 0));
return glm::translate(mat4(), vec3(modelviewShift, 0, 0));
}
std::vector<QAction*> _screenActions;
void StereoDisplayPlugin::activate() {
WindowOpenGLDisplayPlugin::activate();
auto screens = qApp->screens();
_screenActions.resize(screens.size());
for (int i = 0; i < screens.size(); ++i) {
auto screen = screens.at(i);
QString name = QString("Screen %1: %2").arg(i + 1).arg(screen->name());
bool checked = false;
if (screen == qApp->primaryScreen()) {
checked = true;
}
auto action = CONTAINER->addMenuItem(MENU_PATH, name,
[this](bool clicked) { updateScreen(); }, true, checked, "Screens");
_screenActions[i] = action;
}
CONTAINER->setFullscreen(qApp->primaryScreen());
// FIXME Add menu items
WindowOpenGLDisplayPlugin::activate();
}
void StereoDisplayPlugin::updateScreen() {
for (int i = 0; i < _screenActions.size(); ++i) {
if (_screenActions[i]->isChecked()) {
CONTAINER->setFullscreen(qApp->screens().at(i));
break;
}
}
}
void StereoDisplayPlugin::deactivate() {
_screenActions.clear();
CONTAINER->unsetFullscreen();
WindowOpenGLDisplayPlugin::deactivate();
}

View file

@ -7,9 +7,9 @@
//
#pragma once
#include "../MainWindowOpenGLDisplayPlugin.h"
#include "../WindowOpenGLDisplayPlugin.h"
class StereoDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
class StereoDisplayPlugin : public WindowOpenGLDisplayPlugin {
Q_OBJECT
public:
StereoDisplayPlugin();
@ -17,8 +17,12 @@ public:
virtual bool isSupported() const override final;
virtual void activate() override;
virtual void deactivate() override;
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
virtual glm::mat4 getEyePose(Eye eye) const override;
protected:
void updateScreen();
float _ipd{ 0.064f };
};

View file

@ -1,4 +1,6 @@
set(TARGET_NAME embedded-webserver)
setup_memory_debugger()
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library(Network)
setup_hifi_library(Network)

View file

@ -9,6 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QtCore/QCoreApplication>
#include <QtCore/QDebug>
#include <QtCore/QFile>
#include <QtCore/QFileInfo>
@ -19,16 +20,20 @@
#include "EmbeddedWebserverLogging.h"
#include "HTTPManager.h"
const int SOCKET_ERROR_EXIT_CODE = 2;
const int SOCKET_CHECK_INTERVAL_IN_MS = 30000;
HTTPManager::HTTPManager(quint16 port, const QString& documentRoot, HTTPRequestHandler* requestHandler, QObject* parent) :
QTcpServer(parent),
_documentRoot(documentRoot),
_requestHandler(requestHandler)
_requestHandler(requestHandler),
_port(port)
{
// start listening on the passed port
if (!listen(QHostAddress("0.0.0.0"), port)) {
qCDebug(embeddedwebserver) << "Failed to open HTTP server socket:" << errorString();
return;
}
bindSocket();
_isListeningTimer = new QTimer(this);
connect(_isListeningTimer, &QTimer::timeout, this, &HTTPManager::isTcpServerListening);
_isListeningTimer->start(SOCKET_CHECK_INTERVAL_IN_MS);
}
void HTTPManager::incomingConnection(qintptr socketDescriptor) {
@ -157,3 +162,29 @@ bool HTTPManager::handleHTTPRequest(HTTPConnection* connection, const QUrl& url,
bool HTTPManager::requestHandledByRequestHandler(HTTPConnection* connection, const QUrl& url) {
return _requestHandler && _requestHandler->handleHTTPRequest(connection, url);
}
void HTTPManager::isTcpServerListening() {
if (!isListening()) {
qCWarning(embeddedwebserver) << "Socket on port " << QString::number(_port) << " is no longer listening";
bindSocket();
}
}
bool HTTPManager::bindSocket() {
qCDebug(embeddedwebserver) << "Attempting to bind TCP socket on port " << QString::number(_port);
if (listen(QHostAddress::AnyIPv4, _port)) {
qCDebug(embeddedwebserver) << "TCP socket is listening on" << serverAddress() << "and port" << serverPort();
return true;
} else {
qCritical() << "Failed to open HTTP server socket:" << errorString() << " can't continue";
QMetaObject::invokeMethod(this, "queuedExit", Qt::QueuedConnection);
return false;
}
}
void HTTPManager::queuedExit() {
QCoreApplication::exit(SOCKET_ERROR_EXIT_CODE);
}

View file

@ -17,6 +17,7 @@
#define hifi_HTTPManager_h
#include <QtNetwork/QTcpServer>
#include <QtCore/QTimer>
class HTTPConnection;
class HTTPSConnection;
@ -35,14 +36,23 @@ public:
HTTPManager(quint16 port, const QString& documentRoot, HTTPRequestHandler* requestHandler = NULL, QObject* parent = 0);
bool handleHTTPRequest(HTTPConnection* connection, const QUrl& url, bool skipSubHandler = false);
private slots:
void isTcpServerListening();
void queuedExit();
private:
bool bindSocket();
protected:
/// Accepts all pending connections
virtual void incomingConnection(qintptr socketDescriptor);
virtual bool requestHandledByRequestHandler(HTTPConnection* connection, const QUrl& url);
protected:
QString _documentRoot;
HTTPRequestHandler* _requestHandler;
QTimer* _isListeningTimer;
const quint16 _port;
};
#endif // hifi_HTTPManager_h

View file

@ -26,4 +26,6 @@ find_package(PolyVox REQUIRED)
target_include_directories(${TARGET_NAME} SYSTEM PUBLIC ${POLYVOX_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${POLYVOX_LIBRARIES})
setup_memory_debugger()
link_hifi_libraries(shared gpu script-engine render render-utils)

View file

@ -38,7 +38,7 @@ void RenderableZoneEntityItem::changeProperties(Lambda setNewProperties) {
_model = getModel();
_needsInitialSimulation = true;
_model->setURL(getCompoundShapeURL(), QUrl(), true, true);
_model->setURL(getCompoundShapeURL());
}
if (oldPosition != getPosition() ||
oldRotation != getRotation() ||
@ -85,7 +85,7 @@ void RenderableZoneEntityItem::initialSimulation() {
void RenderableZoneEntityItem::updateGeometry() {
if (_model && !_model->isActive() && hasCompoundShapeURL()) {
// Since we have a delayload, we need to update the geometry if it has been downloaded
_model->setURL(getCompoundShapeURL(), QUrl(), true);
_model->setURL(getCompoundShapeURL());
}
if (_model && _model->isActive() && _needsInitialSimulation) {
initialSimulation();

View file

@ -1,5 +1,7 @@
set(TARGET_NAME entities)
setup_memory_debugger()
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library(Network Script)

View file

@ -25,7 +25,8 @@ AddEntityOperator::AddEntityOperator(EntityTree* tree,
{
// caller must have verified existence of newEntity
assert(_newEntity);
_newEntityBox = _newEntity->getMaximumAACube().clamp(0.0f, (float)TREE_SCALE);
_newEntityBox = _newEntity->getMaximumAACube().clamp((float)(-HALF_TREE_SCALE), (float)HALF_TREE_SCALE);
}
bool AddEntityOperator::preRecursion(OctreeElement* element) {

View file

@ -253,7 +253,7 @@ void EntityItemPropertiesFromScriptValueHonorReadOnly(const QScriptValue &object
// define these inline here so the macros work
inline void EntityItemProperties::setPosition(const glm::vec3& value)
{ _position = glm::clamp(value, 0.0f, (float)TREE_SCALE); _positionChanged = true; }
{ _position = glm::clamp(value, (float)-HALF_TREE_SCALE, (float)HALF_TREE_SCALE); _positionChanged = true; }
inline QDebug operator<<(QDebug debug, const EntityItemProperties& properties) {
debug << "EntityItemProperties[" << "\n";

View file

@ -113,7 +113,7 @@ void EntitySimulation::sortEntitiesThatMoved() {
// External changes to entity position/shape are expected to be sorted outside of the EntitySimulation.
PerformanceTimer perfTimer("sortingEntities");
MovingEntitiesOperator moveOperator(_entityTree);
AACube domainBounds(glm::vec3(0.0f,0.0f,0.0f), (float)TREE_SCALE);
AACube domainBounds(glm::vec3((float)-HALF_TREE_SCALE), (float)TREE_SCALE);
SetOfEntities::iterator itemItr = _entitiesToSort.begin();
while (itemItr != _entitiesToSort.end()) {
EntityItemPointer entity = *itemItr;
@ -195,7 +195,7 @@ void EntitySimulation::changeEntity(EntityItemPointer entity) {
bool wasRemoved = false;
uint32_t dirtyFlags = entity->getDirtyFlags();
if (dirtyFlags & EntityItem::DIRTY_POSITION) {
AACube domainBounds(glm::vec3(0.0f,0.0f,0.0f), (float)TREE_SCALE);
AACube domainBounds(glm::vec3((float)-HALF_TREE_SCALE), (float)TREE_SCALE);
AACube newCube = entity->getMaximumAACube();
if (!domainBounds.touches(newCube)) {
qCDebug(entities) << "Entity " << entity->getEntityItemID() << " moved out of domain bounds.";

Some files were not shown because too many files have changed in this diff Show more