mirror of
https://github.com/overte-org/overte.git
synced 2025-08-08 11:17:34 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into betterAway
This commit is contained in:
commit
3aa1c36f53
209 changed files with 7220 additions and 4333 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -45,7 +45,7 @@ gvr-interface/libs/*
|
||||||
|
|
||||||
# ignore files for various dev environments
|
# ignore files for various dev environments
|
||||||
TAGS
|
TAGS
|
||||||
*.swp
|
*.sw[po]
|
||||||
|
|
||||||
# ignore node files for the console
|
# ignore node files for the console
|
||||||
node_modules
|
node_modules
|
||||||
|
|
|
@ -48,8 +48,7 @@ static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
|
||||||
Agent::Agent(ReceivedMessage& message) :
|
Agent::Agent(ReceivedMessage& message) :
|
||||||
ThreadedAssignment(message),
|
ThreadedAssignment(message),
|
||||||
_entityEditSender(),
|
_entityEditSender(),
|
||||||
_receivedAudioStream(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO,
|
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES) {
|
||||||
RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES) {
|
|
||||||
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
|
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
|
||||||
|
|
||||||
ResourceManager::init();
|
ResourceManager::init();
|
||||||
|
@ -62,6 +61,7 @@ Agent::Agent(ReceivedMessage& message) :
|
||||||
DependencyManager::set<recording::Deck>();
|
DependencyManager::set<recording::Deck>();
|
||||||
DependencyManager::set<recording::Recorder>();
|
DependencyManager::set<recording::Recorder>();
|
||||||
DependencyManager::set<RecordingScriptingInterface>();
|
DependencyManager::set<RecordingScriptingInterface>();
|
||||||
|
DependencyManager::set<ScriptCache>();
|
||||||
|
|
||||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||||
|
|
||||||
|
@ -351,6 +351,21 @@ void Agent::setIsAvatar(bool isAvatar) {
|
||||||
_avatarIdentityTimer->stop();
|
_avatarIdentityTimer->stop();
|
||||||
delete _avatarIdentityTimer;
|
delete _avatarIdentityTimer;
|
||||||
_avatarIdentityTimer = nullptr;
|
_avatarIdentityTimer = nullptr;
|
||||||
|
|
||||||
|
// The avatar mixer never times out a connection (e.g., based on identity or data packets)
|
||||||
|
// but rather keeps avatars in its list as long as "connected". As a result, clients timeout
|
||||||
|
// when we stop sending identity, but then get woken up again by the mixer itself, which sends
|
||||||
|
// identity packets to everyone. Here we explicitly tell the mixer to kill the entry for us.
|
||||||
|
auto nodeList = DependencyManager::get<NodeList>();
|
||||||
|
auto packetList = NLPacketList::create(PacketType::KillAvatar, QByteArray(), true, true);
|
||||||
|
packetList->write(getSessionUUID().toRfc4122());
|
||||||
|
nodeList->eachMatchingNode(
|
||||||
|
[&](const SharedNodePointer& node)->bool {
|
||||||
|
return node->getType() == NodeType::AvatarMixer && node->getActiveSocket();
|
||||||
|
},
|
||||||
|
[&](const SharedNodePointer& node) {
|
||||||
|
nodeList->sendPacketList(std::move(packetList), *node);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
21
cmake/externals/oglplus/CMakeLists.txt
vendored
21
cmake/externals/oglplus/CMakeLists.txt
vendored
|
@ -1,21 +0,0 @@
|
||||||
set(EXTERNAL_NAME oglplus)
|
|
||||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
|
||||||
|
|
||||||
include(ExternalProject)
|
|
||||||
ExternalProject_Add(
|
|
||||||
${EXTERNAL_NAME}
|
|
||||||
URL http://hifi-public.s3.amazonaws.com/dependencies/oglplus-0.63.0.zip
|
|
||||||
URL_MD5 de984ab245b185b45c87415c0e052135
|
|
||||||
CONFIGURE_COMMAND ""
|
|
||||||
BUILD_COMMAND ""
|
|
||||||
INSTALL_COMMAND ""
|
|
||||||
LOG_DOWNLOAD 1
|
|
||||||
)
|
|
||||||
|
|
||||||
# Hide this external target (for ide users)
|
|
||||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
|
||||||
|
|
||||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
|
||||||
|
|
||||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include ${SOURCE_DIR}/implement CACHE TYPE INTERNAL)
|
|
||||||
|
|
4
cmake/externals/wasapi/CMakeLists.txt
vendored
4
cmake/externals/wasapi/CMakeLists.txt
vendored
|
@ -6,8 +6,8 @@ if (WIN32)
|
||||||
include(ExternalProject)
|
include(ExternalProject)
|
||||||
ExternalProject_Add(
|
ExternalProject_Add(
|
||||||
${EXTERNAL_NAME}
|
${EXTERNAL_NAME}
|
||||||
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi2.zip
|
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi3.zip
|
||||||
URL_MD5 272b27bd6c211c45c0c23d4701b63b5e
|
URL_MD5 1a2433f80a788a54c70f505ff4f43ac1
|
||||||
CONFIGURE_COMMAND ""
|
CONFIGURE_COMMAND ""
|
||||||
BUILD_COMMAND ""
|
BUILD_COMMAND ""
|
||||||
INSTALL_COMMAND ""
|
INSTALL_COMMAND ""
|
||||||
|
|
|
@ -44,13 +44,25 @@ macro(PACKAGE_LIBRARIES_FOR_DEPLOYMENT)
|
||||||
|
|
||||||
set(QTAUDIO_PATH $<TARGET_FILE_DIR:${TARGET_NAME}>/audio)
|
set(QTAUDIO_PATH $<TARGET_FILE_DIR:${TARGET_NAME}>/audio)
|
||||||
|
|
||||||
# if present, replace qtaudio_windows.dll with qtaudio_wasapi.dll
|
if (DEPLOY_PACKAGE)
|
||||||
add_custom_command(
|
# copy qtaudio_wasapi.dll alongside qtaudio_windows.dll, and let the installer resolve
|
||||||
TARGET ${TARGET_NAME}
|
add_custom_command(
|
||||||
POST_BUILD
|
TARGET ${TARGET_NAME}
|
||||||
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windows.dll ( ${CMAKE_COMMAND} -E remove ${QTAUDIO_PATH}/qtaudio_windows.dll && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapi.dll ${QTAUDIO_PATH} && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapi.pdb ${QTAUDIO_PATH} )
|
POST_BUILD
|
||||||
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windowsd.dll ( ${CMAKE_COMMAND} -E remove ${QTAUDIO_PATH}/qtaudio_windowsd.dll && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapid.dll ${QTAUDIO_PATH} && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapid.pdb ${QTAUDIO_PATH} )
|
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windows.dll ( ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapi.dll ${QTAUDIO_PATH} && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapi.pdb ${QTAUDIO_PATH} )
|
||||||
)
|
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windowsd.dll ( ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapid.dll ${QTAUDIO_PATH} && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapid.pdb ${QTAUDIO_PATH} )
|
||||||
|
)
|
||||||
|
elseif (${CMAKE_SYSTEM_VERSION} VERSION_LESS 6.2)
|
||||||
|
# continue using qtaudio_windows.dll on Windows 7
|
||||||
|
else ()
|
||||||
|
# replace qtaudio_windows.dll with qtaudio_wasapi.dll on Windows 8/8.1/10
|
||||||
|
add_custom_command(
|
||||||
|
TARGET ${TARGET_NAME}
|
||||||
|
POST_BUILD
|
||||||
|
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windows.dll ( ${CMAKE_COMMAND} -E remove ${QTAUDIO_PATH}/qtaudio_windows.dll && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapi.dll ${QTAUDIO_PATH} && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapi.pdb ${QTAUDIO_PATH} )
|
||||||
|
COMMAND if exist ${QTAUDIO_PATH}/qtaudio_windowsd.dll ( ${CMAKE_COMMAND} -E remove ${QTAUDIO_PATH}/qtaudio_windowsd.dll && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapid.dll ${QTAUDIO_PATH} && ${CMAKE_COMMAND} -E copy ${WASAPI_DLL_PATH}/qtaudio_wasapid.pdb ${QTAUDIO_PATH} )
|
||||||
|
)
|
||||||
|
endif ()
|
||||||
|
|
||||||
endif ()
|
endif ()
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
#
|
|
||||||
# Copyright 2015 High Fidelity, Inc.
|
|
||||||
# Created by Bradley Austin Davis on 2015/10/10
|
|
||||||
#
|
|
||||||
# Distributed under the Apache License, Version 2.0.
|
|
||||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
|
||||||
#
|
|
||||||
macro(TARGET_OGLPLUS)
|
|
||||||
# our OGL plus setup requires glew
|
|
||||||
target_glew()
|
|
||||||
|
|
||||||
# our OGL plus setup requires boostconfig
|
|
||||||
add_dependency_external_projects(boostconfig)
|
|
||||||
find_package(BoostConfig REQUIRED)
|
|
||||||
target_include_directories(${TARGET_NAME} PUBLIC ${BOOSTCONFIG_INCLUDE_DIRS})
|
|
||||||
|
|
||||||
|
|
||||||
add_dependency_external_projects(oglplus)
|
|
||||||
find_package(OGLPLUS REQUIRED)
|
|
||||||
target_include_directories(${TARGET_NAME} PUBLIC ${OGLPLUS_INCLUDE_DIRS})
|
|
||||||
endmacro()
|
|
|
@ -6,6 +6,7 @@
|
||||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
#
|
#
|
||||||
macro(TARGET_OPENGL)
|
macro(TARGET_OPENGL)
|
||||||
|
add_definitions(-DGLEW_STATIC)
|
||||||
if (APPLE)
|
if (APPLE)
|
||||||
# link in required OS X frameworks and include the right GL headers
|
# link in required OS X frameworks and include the right GL headers
|
||||||
find_library(OpenGL OpenGL)
|
find_library(OpenGL OpenGL)
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
#
|
|
||||||
# Try to find OGLPLUS include path.
|
|
||||||
# Once done this will define
|
|
||||||
#
|
|
||||||
# OGLPLUS_INCLUDE_DIRS
|
|
||||||
#
|
|
||||||
# Created by Bradley Austin Davis on 2015/05/22
|
|
||||||
# Copyright 2015 High Fidelity, Inc.
|
|
||||||
#
|
|
||||||
# Distributed under the Apache License, Version 2.0.
|
|
||||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
|
||||||
#
|
|
||||||
|
|
||||||
# setup hints for OGLPLUS search
|
|
||||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
|
||||||
hifi_library_search_hints("oglplus")
|
|
||||||
|
|
||||||
# locate header
|
|
||||||
find_path(OGLPLUS_INCLUDE_DIRS "oglplus/fwd.hpp" HINTS ${OGLPLUS_SEARCH_DIRS})
|
|
||||||
|
|
||||||
include(FindPackageHandleStandardArgs)
|
|
||||||
find_package_handle_standard_args(OGLPLUS DEFAULT_MSG OGLPLUS_INCLUDE_DIRS)
|
|
||||||
|
|
||||||
mark_as_advanced(OGLPLUS_INCLUDE_DIRS OGLPLUS_SEARCH_DIRS)
|
|
|
@ -23,6 +23,11 @@
|
||||||
;Default installation folder
|
;Default installation folder
|
||||||
InstallDir "@CPACK_NSIS_INSTALL_ROOT@\@CPACK_PACKAGE_INSTALL_DIRECTORY@"
|
InstallDir "@CPACK_NSIS_INSTALL_ROOT@\@CPACK_PACKAGE_INSTALL_DIRECTORY@"
|
||||||
|
|
||||||
|
;--------------------------------
|
||||||
|
;Include WinVer to get Windows version
|
||||||
|
|
||||||
|
!include "WinVer.nsh"
|
||||||
|
|
||||||
;--------------------------------
|
;--------------------------------
|
||||||
;General
|
;General
|
||||||
; leverage the UAC NSIS plugin to promote uninstaller to elevated privileges
|
; leverage the UAC NSIS plugin to promote uninstaller to elevated privileges
|
||||||
|
@ -600,8 +605,16 @@ Section "-Core installation"
|
||||||
Delete "$INSTDIR\version"
|
Delete "$INSTDIR\version"
|
||||||
Delete "$INSTDIR\xinput1_3.dll"
|
Delete "$INSTDIR\xinput1_3.dll"
|
||||||
|
|
||||||
;Delete old Qt files
|
; The installer includes two different Qt audio plugins.
|
||||||
Delete "$INSTDIR\audio\qtaudio_windows.dll"
|
; On Windows 8 and above, only qtaudio_wasapi.dll should be installed.
|
||||||
|
; On Windows 7 and below, only qtaudio_windows.dll should be installed.
|
||||||
|
${If} ${AtLeastWin8}
|
||||||
|
Delete "$INSTDIR\audio\qtaudio_windows.dll"
|
||||||
|
Delete "$INSTDIR\audio\qtaudio_windows.pdb"
|
||||||
|
${Else}
|
||||||
|
Delete "$INSTDIR\audio\qtaudio_wasapi.dll"
|
||||||
|
Delete "$INSTDIR\audio\qtaudio_wasapi.pdb"
|
||||||
|
${EndIf}
|
||||||
|
|
||||||
; Delete old desktop shortcuts before they were renamed during Sandbox rename
|
; Delete old desktop shortcuts before they were renamed during Sandbox rename
|
||||||
Delete "$DESKTOP\@PRE_SANDBOX_INTERFACE_SHORTCUT_NAME@.lnk"
|
Delete "$DESKTOP\@PRE_SANDBOX_INTERFACE_SHORTCUT_NAME@.lnk"
|
||||||
|
|
71
interface/resources/html/help.html
Normal file
71
interface/resources/html/help.html
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
<!-- Copyright 2016 High Fidelity, Inc. -->
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8"/>
|
||||||
|
<input type="hidden" id="version" value="1"/>
|
||||||
|
<title>Welcome to Interface</title>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
background: black;
|
||||||
|
width: 100%;
|
||||||
|
overflow-x: hidden;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#kbm_button {
|
||||||
|
position: absolute;
|
||||||
|
left: 70;
|
||||||
|
top: 118;
|
||||||
|
width: 297;
|
||||||
|
height: 80;
|
||||||
|
}
|
||||||
|
|
||||||
|
#hand_controllers_button {
|
||||||
|
position: absolute;
|
||||||
|
left: 367;
|
||||||
|
top: 118;
|
||||||
|
width: 267;
|
||||||
|
height: 80;
|
||||||
|
}
|
||||||
|
|
||||||
|
#game_controller_button {
|
||||||
|
position: absolute;
|
||||||
|
left: 634;
|
||||||
|
top: 118;
|
||||||
|
width: 297;
|
||||||
|
height: 80;
|
||||||
|
}
|
||||||
|
|
||||||
|
#image_area {
|
||||||
|
width: 1024;
|
||||||
|
height: 720;
|
||||||
|
margin: auto;
|
||||||
|
position: absolute;
|
||||||
|
top: 0; left: 0; bottom: 0; right: 0;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<script>
|
||||||
|
function showKbm() {
|
||||||
|
document.getElementById("main_image").setAttribute("src", "img/controls-help-keyboard.png");
|
||||||
|
}
|
||||||
|
function showHandControllers() {
|
||||||
|
document.getElementById("main_image").setAttribute("src", "img/controls-help-vive.png");
|
||||||
|
}
|
||||||
|
function showGameController() {
|
||||||
|
document.getElementById("main_image").setAttribute("src", "img/controls-help-gamepad.png");
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="image_area">
|
||||||
|
<img id="main_image" src="img/controls-help-keyboard.png" width="1024px" height="720px"></img>
|
||||||
|
<a href="#" id="kbm_button" onmousedown="showKbm()"></a>
|
||||||
|
<a href="#" id="hand_controllers_button" onmousedown="showHandControllers()"></a>
|
||||||
|
<a href="#" id="game_controller_button" onmousedown="showGameController()"></a>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
BIN
interface/resources/html/img/controls-help-gamepad.png
Normal file
BIN
interface/resources/html/img/controls-help-gamepad.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 94 KiB |
BIN
interface/resources/html/img/controls-help-keyboard.png
Normal file
BIN
interface/resources/html/img/controls-help-keyboard.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 67 KiB |
BIN
interface/resources/html/img/controls-help-vive.png
Normal file
BIN
interface/resources/html/img/controls-help-vive.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 100 KiB |
|
@ -11,9 +11,12 @@
|
||||||
var POLL_FREQUENCY = 500; // ms
|
var POLL_FREQUENCY = 500; // ms
|
||||||
var MAX_WARNINGS = 3;
|
var MAX_WARNINGS = 3;
|
||||||
var numWarnings = 0;
|
var numWarnings = 0;
|
||||||
|
var isKeyboardRaised = false;
|
||||||
|
var isNumericKeyboard = false;
|
||||||
|
var KEYBOARD_HEIGHT = 200;
|
||||||
|
|
||||||
function shouldRaiseKeyboard() {
|
function shouldRaiseKeyboard() {
|
||||||
if (document.activeElement.nodeName == "INPUT" || document.activeElement.nodeName == "TEXTAREA") {
|
if (document.activeElement.nodeName === "INPUT" || document.activeElement.nodeName === "TEXTAREA") {
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
// check for contenteditable attribute
|
// check for contenteditable attribute
|
||||||
|
@ -27,15 +30,39 @@
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function shouldSetNumeric() {
|
||||||
|
return document.activeElement.type === "number";
|
||||||
|
};
|
||||||
|
|
||||||
setInterval(function () {
|
setInterval(function () {
|
||||||
var event = shouldRaiseKeyboard() ? "_RAISE_KEYBOARD" : "_LOWER_KEYBOARD";
|
var keyboardRaised = shouldRaiseKeyboard();
|
||||||
if (typeof EventBridge != "undefined") {
|
var numericKeyboard = shouldSetNumeric();
|
||||||
EventBridge.emitWebEvent(event);
|
|
||||||
} else {
|
if (keyboardRaised !== isKeyboardRaised || numericKeyboard !== isNumericKeyboard) {
|
||||||
if (numWarnings < MAX_WARNINGS) {
|
|
||||||
console.log("WARNING: no global EventBridge object found");
|
if (typeof EventBridge !== "undefined") {
|
||||||
numWarnings++;
|
EventBridge.emitWebEvent(
|
||||||
|
keyboardRaised ? ("_RAISE_KEYBOARD" + (numericKeyboard ? "_NUMERIC" : "")) : "_LOWER_KEYBOARD"
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
if (numWarnings < MAX_WARNINGS) {
|
||||||
|
console.log("WARNING: no global EventBridge object found");
|
||||||
|
numWarnings++;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!isKeyboardRaised) {
|
||||||
|
var delta = document.activeElement.getBoundingClientRect().bottom + 10
|
||||||
|
- (document.body.clientHeight - KEYBOARD_HEIGHT);
|
||||||
|
if (delta > 0) {
|
||||||
|
setTimeout(function () {
|
||||||
|
document.body.scrollTop += delta;
|
||||||
|
}, 500); // Allow time for keyboard to be raised in QML.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
isKeyboardRaised = keyboardRaised;
|
||||||
|
isNumericKeyboard = numericKeyboard;
|
||||||
}
|
}
|
||||||
}, POLL_FREQUENCY);
|
}, POLL_FREQUENCY);
|
||||||
})();
|
})();
|
||||||
|
|
|
@ -15,6 +15,7 @@ import "styles"
|
||||||
import "windows"
|
import "windows"
|
||||||
import "hifi"
|
import "hifi"
|
||||||
import "hifi/toolbars"
|
import "hifi/toolbars"
|
||||||
|
import "styles-uit" as HifiStyles
|
||||||
import "controls-uit" as HifiControls
|
import "controls-uit" as HifiControls
|
||||||
|
|
||||||
Window {
|
Window {
|
||||||
|
@ -58,21 +59,31 @@ Window {
|
||||||
}
|
}
|
||||||
addressLine.text = targetString;
|
addressLine.text = targetString;
|
||||||
toggleOrGo(true);
|
toggleOrGo(true);
|
||||||
|
clearAddressLineTimer.start();
|
||||||
}
|
}
|
||||||
property var allStories: [];
|
property var allStories: [];
|
||||||
property int cardWidth: 200;
|
property int cardWidth: 200;
|
||||||
property int cardHeight: 152;
|
property int cardHeight: 152;
|
||||||
property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/";
|
property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/";
|
||||||
|
property bool isCursorVisible: false // Override default cursor visibility.
|
||||||
|
|
||||||
AddressBarDialog {
|
AddressBarDialog {
|
||||||
id: addressBarDialog
|
id: addressBarDialog
|
||||||
|
|
||||||
|
property bool keyboardRaised: false
|
||||||
|
property bool punctuationMode: false
|
||||||
|
|
||||||
implicitWidth: backgroundImage.width
|
implicitWidth: backgroundImage.width
|
||||||
implicitHeight: backgroundImage.height
|
implicitHeight: backgroundImage.height + (keyboardRaised ? 200 : 0)
|
||||||
|
|
||||||
// The buttons have their button state changed on hover, so we have to manually fix them up here
|
// The buttons have their button state changed on hover, so we have to manually fix them up here
|
||||||
onBackEnabledChanged: backArrow.buttonState = addressBarDialog.backEnabled ? 1 : 0;
|
onBackEnabledChanged: backArrow.buttonState = addressBarDialog.backEnabled ? 1 : 0;
|
||||||
onForwardEnabledChanged: forwardArrow.buttonState = addressBarDialog.forwardEnabled ? 1 : 0;
|
onForwardEnabledChanged: forwardArrow.buttonState = addressBarDialog.forwardEnabled ? 1 : 0;
|
||||||
onReceivedHifiSchemeURL: resetAfterTeleport();
|
onReceivedHifiSchemeURL: resetAfterTeleport();
|
||||||
|
|
||||||
|
// Update location after using back and forward buttons.
|
||||||
|
onMetaverseServerUrlChanged: updateLocationTextTimer.start();
|
||||||
|
|
||||||
ListModel { id: suggestions }
|
ListModel { id: suggestions }
|
||||||
|
|
||||||
ListView {
|
ListView {
|
||||||
|
@ -168,7 +179,24 @@ Window {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME replace with TextField
|
HifiStyles.RalewayLight {
|
||||||
|
id: notice;
|
||||||
|
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.50;
|
||||||
|
anchors {
|
||||||
|
top: parent.top
|
||||||
|
topMargin: parent.inputAreaStep + 12
|
||||||
|
left: addressLine.left
|
||||||
|
right: addressLine.right
|
||||||
|
}
|
||||||
|
}
|
||||||
|
HifiStyles.FiraSansRegular {
|
||||||
|
id: location;
|
||||||
|
font.pixelSize: addressLine.font.pixelSize;
|
||||||
|
color: "gray";
|
||||||
|
clip: true;
|
||||||
|
anchors.fill: addressLine;
|
||||||
|
visible: addressLine.text.length === 0
|
||||||
|
}
|
||||||
TextInput {
|
TextInput {
|
||||||
id: addressLine
|
id: addressLine
|
||||||
focus: true
|
focus: true
|
||||||
|
@ -179,20 +207,57 @@ Window {
|
||||||
right: parent.right
|
right: parent.right
|
||||||
leftMargin: forwardArrow.width
|
leftMargin: forwardArrow.width
|
||||||
rightMargin: forwardArrow.width / 2
|
rightMargin: forwardArrow.width / 2
|
||||||
topMargin: parent.inputAreaStep + hifi.layout.spacing
|
topMargin: parent.inputAreaStep + (2 * hifi.layout.spacing)
|
||||||
bottomMargin: parent.inputAreaStep + hifi.layout.spacing
|
bottomMargin: parent.inputAreaStep
|
||||||
}
|
}
|
||||||
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.75
|
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.75
|
||||||
helperText: "Go to: place, @user, /path, network address"
|
cursorVisible: false
|
||||||
helperPixelSize: font.pixelSize * 0.75
|
onTextChanged: {
|
||||||
helperItalic: true
|
filterChoicesByText();
|
||||||
onTextChanged: filterChoicesByText()
|
updateLocationText(text.length > 0);
|
||||||
|
if (!isCursorVisible && text.length > 0) {
|
||||||
|
isCursorVisible = true;
|
||||||
|
cursorVisible = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
onActiveFocusChanged: {
|
||||||
|
cursorVisible = isCursorVisible;
|
||||||
|
}
|
||||||
|
MouseArea {
|
||||||
|
// If user clicks in address bar show cursor to indicate ability to enter address.
|
||||||
|
anchors.fill: parent
|
||||||
|
onClicked: {
|
||||||
|
isCursorVisible = true;
|
||||||
|
parent.cursorVisible = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Timer {
|
||||||
|
// Delay updating location text a bit to avoid flicker of content and so that connection status is valid.
|
||||||
|
id: updateLocationTextTimer
|
||||||
|
running: false
|
||||||
|
interval: 500 // ms
|
||||||
|
repeat: false
|
||||||
|
onTriggered: updateLocationText(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
Timer {
|
||||||
|
// Delay clearing address line so as to avoid flicker of "not connected" being displayed after entering an address.
|
||||||
|
id: clearAddressLineTimer
|
||||||
|
running: false
|
||||||
|
interval: 100 // ms
|
||||||
|
repeat: false
|
||||||
|
onTriggered: {
|
||||||
|
addressLine.text = "";
|
||||||
|
isCursorVisible = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Window {
|
Window {
|
||||||
width: 938;
|
width: 938
|
||||||
height: 625;
|
height: 625
|
||||||
scale: 0.8 // Reset scale of Window to 1.0 (counteract address bar's scale value of 1.25)
|
scale: 0.8 // Reset scale of Window to 1.0 (counteract address bar's scale value of 1.25)
|
||||||
HifiControls.WebView {
|
HifiControls.WebView {
|
||||||
anchors.fill: parent;
|
anchors.fill: parent;
|
||||||
|
@ -209,6 +274,35 @@ Window {
|
||||||
horizontalCenter: scroll.horizontalCenter;
|
horizontalCenter: scroll.horizontalCenter;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// virtual keyboard, letters
|
||||||
|
HifiControls.Keyboard {
|
||||||
|
id: keyboard1
|
||||||
|
y: parent.keyboardRaised ? parent.height : 0
|
||||||
|
height: parent.keyboardRaised ? 200 : 0
|
||||||
|
visible: parent.keyboardRaised && !parent.punctuationMode
|
||||||
|
enabled: parent.keyboardRaised && !parent.punctuationMode
|
||||||
|
anchors.right: parent.right
|
||||||
|
anchors.rightMargin: 0
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.leftMargin: 0
|
||||||
|
anchors.bottom: parent.bottom
|
||||||
|
anchors.bottomMargin: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
HifiControls.KeyboardPunctuation {
|
||||||
|
id: keyboard2
|
||||||
|
y: parent.keyboardRaised ? parent.height : 0
|
||||||
|
height: parent.keyboardRaised ? 200 : 0
|
||||||
|
visible: parent.keyboardRaised && parent.punctuationMode
|
||||||
|
enabled: parent.keyboardRaised && parent.punctuationMode
|
||||||
|
anchors.right: parent.right
|
||||||
|
anchors.rightMargin: 0
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.leftMargin: 0
|
||||||
|
anchors.bottom: parent.bottom
|
||||||
|
anchors.bottomMargin: 0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
|
function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
|
||||||
|
@ -344,12 +438,23 @@ Window {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
onVisibleChanged: {
|
function updateLocationText(enteringAddress) {
|
||||||
if (visible) {
|
if (enteringAddress) {
|
||||||
addressLine.forceActiveFocus()
|
notice.text = "Go to a place, @user, path or network address";
|
||||||
fillDestinations();
|
notice.color = "gray";
|
||||||
} else {
|
} else {
|
||||||
addressLine.text = ""
|
notice.text = AddressManager.isConnected ? "Your location:" : "Not Connected";
|
||||||
|
notice.color = AddressManager.isConnected ? "gray" : "crimson";
|
||||||
|
// Display hostname, which includes ip address, localhost, and other non-placenames.
|
||||||
|
location.text = (AddressManager.hostname || '') + (AddressManager.pathname ? AddressManager.pathname.match(/\/[^\/]+/)[0] : '');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onVisibleChanged: {
|
||||||
|
updateLocationText(false);
|
||||||
|
if (visible) {
|
||||||
|
addressLine.forceActiveFocus();
|
||||||
|
fillDestinations();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -365,11 +470,13 @@ Window {
|
||||||
case Qt.Key_Escape:
|
case Qt.Key_Escape:
|
||||||
case Qt.Key_Back:
|
case Qt.Key_Back:
|
||||||
root.shown = false
|
root.shown = false
|
||||||
|
clearAddressLineTimer.start();
|
||||||
event.accepted = true
|
event.accepted = true
|
||||||
break
|
break
|
||||||
case Qt.Key_Enter:
|
case Qt.Key_Enter:
|
||||||
case Qt.Key_Return:
|
case Qt.Key_Return:
|
||||||
toggleOrGo()
|
toggleOrGo()
|
||||||
|
clearAddressLineTimer.start();
|
||||||
event.accepted = true
|
event.accepted = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import QtQuick 2.5
|
import QtQuick 2.5
|
||||||
import QtQuick.Controls 1.2
|
import QtQuick.Controls 1.2
|
||||||
|
import QtWebChannel 1.0
|
||||||
import QtWebEngine 1.2
|
import QtWebEngine 1.2
|
||||||
|
|
||||||
import "controls-uit"
|
import "controls-uit"
|
||||||
|
@ -19,6 +20,9 @@ ScrollingWindow {
|
||||||
property variant permissionsBar: {'securityOrigin':'none','feature':'none'}
|
property variant permissionsBar: {'securityOrigin':'none','feature':'none'}
|
||||||
property alias url: webview.url
|
property alias url: webview.url
|
||||||
property alias webView: webview
|
property alias webView: webview
|
||||||
|
|
||||||
|
property alias eventBridge: eventBridgeWrapper.eventBridge
|
||||||
|
|
||||||
x: 100
|
x: 100
|
||||||
y: 100
|
y: 100
|
||||||
|
|
||||||
|
@ -130,10 +134,11 @@ ScrollingWindow {
|
||||||
case Qt.Key_Return:
|
case Qt.Key_Return:
|
||||||
event.accepted = true
|
event.accepted = true
|
||||||
if (text.indexOf("http") != 0) {
|
if (text.indexOf("http") != 0) {
|
||||||
text = "http://" + text
|
text = "http://" + text;
|
||||||
}
|
}
|
||||||
root.hidePermissionsBar();
|
root.hidePermissionsBar();
|
||||||
webview.url = text
|
root.keyboardRaised = false;
|
||||||
|
webview.url = text;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -197,32 +202,60 @@ ScrollingWindow {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
WebEngineView {
|
WebView {
|
||||||
id: webview
|
id: webview
|
||||||
url: "https://highfidelity.com"
|
url: "https://highfidelity.com"
|
||||||
|
|
||||||
|
property alias eventBridgeWrapper: eventBridgeWrapper
|
||||||
|
|
||||||
|
QtObject {
|
||||||
|
id: eventBridgeWrapper
|
||||||
|
WebChannel.id: "eventBridgeWrapper"
|
||||||
|
property var eventBridge;
|
||||||
|
}
|
||||||
|
|
||||||
|
webChannel.registeredObjects: [eventBridgeWrapper]
|
||||||
|
|
||||||
|
// Create a global EventBridge object for raiseAndLowerKeyboard.
|
||||||
|
WebEngineScript {
|
||||||
|
id: createGlobalEventBridge
|
||||||
|
sourceCode: eventBridgeJavaScriptToInject
|
||||||
|
injectionPoint: WebEngineScript.DocumentCreation
|
||||||
|
worldId: WebEngineScript.MainWorld
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect when may want to raise and lower keyboard.
|
||||||
|
WebEngineScript {
|
||||||
|
id: raiseAndLowerKeyboard
|
||||||
|
injectionPoint: WebEngineScript.Deferred
|
||||||
|
sourceUrl: resourceDirectoryUrl + "/html/raiseAndLowerKeyboard.js"
|
||||||
|
worldId: WebEngineScript.MainWorld
|
||||||
|
}
|
||||||
|
|
||||||
|
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard ]
|
||||||
|
|
||||||
anchors.top: buttons.bottom
|
anchors.top: buttons.bottom
|
||||||
anchors.topMargin: 8
|
anchors.topMargin: 8
|
||||||
anchors.bottom: parent.bottom
|
anchors.bottom: parent.bottom
|
||||||
anchors.left: parent.left
|
anchors.left: parent.left
|
||||||
anchors.right: parent.right
|
anchors.right: parent.right
|
||||||
|
|
||||||
onFeaturePermissionRequested: {
|
onFeaturePermissionRequested: {
|
||||||
permissionsBar.securityOrigin = securityOrigin;
|
permissionsBar.securityOrigin = securityOrigin;
|
||||||
permissionsBar.feature = feature;
|
permissionsBar.feature = feature;
|
||||||
root.showPermissionsBar();
|
root.showPermissionsBar();
|
||||||
}
|
}
|
||||||
|
|
||||||
onLoadingChanged: {
|
onLoadingChanged: {
|
||||||
if (loadRequest.status === WebEngineView.LoadSucceededStatus) {
|
if (loadRequest.status === WebEngineView.LoadSucceededStatus) {
|
||||||
addressBar.text = loadRequest.url
|
addressBar.text = loadRequest.url
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
onIconChanged: {
|
onIconChanged: {
|
||||||
console.log("New icon: " + icon)
|
console.log("New icon: " + icon)
|
||||||
}
|
}
|
||||||
onNewViewRequested: {
|
|
||||||
var component = Qt.createComponent("Browser.qml");
|
|
||||||
var newWindow = component.createObject(desktop);
|
|
||||||
request.openIn(newWindow.webView)
|
|
||||||
}
|
|
||||||
onWindowCloseRequested: {
|
onWindowCloseRequested: {
|
||||||
root.destroy();
|
root.destroy();
|
||||||
}
|
}
|
||||||
|
@ -230,8 +263,6 @@ ScrollingWindow {
|
||||||
Component.onCompleted: {
|
Component.onCompleted: {
|
||||||
desktop.initWebviewProfileHandlers(webview.profile)
|
desktop.initWebviewProfileHandlers(webview.profile)
|
||||||
}
|
}
|
||||||
|
|
||||||
profile: desktop.browserProfile
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // item
|
} // item
|
||||||
|
|
|
@ -66,6 +66,24 @@ Windows.ScrollingWindow {
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
focus: true
|
focus: true
|
||||||
webChannel.registeredObjects: [eventBridgeWrapper]
|
webChannel.registeredObjects: [eventBridgeWrapper]
|
||||||
|
|
||||||
|
// Create a global EventBridge object for raiseAndLowerKeyboard.
|
||||||
|
WebEngineScript {
|
||||||
|
id: createGlobalEventBridge
|
||||||
|
sourceCode: eventBridgeJavaScriptToInject
|
||||||
|
injectionPoint: WebEngineScript.DocumentCreation
|
||||||
|
worldId: WebEngineScript.MainWorld
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect when may want to raise and lower keyboard.
|
||||||
|
WebEngineScript {
|
||||||
|
id: raiseAndLowerKeyboard
|
||||||
|
injectionPoint: WebEngineScript.Deferred
|
||||||
|
sourceUrl: resourceDirectoryUrl + "/html/raiseAndLowerKeyboard.js"
|
||||||
|
worldId: WebEngineScript.MainWorld
|
||||||
|
}
|
||||||
|
|
||||||
|
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard ]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ import "windows"
|
||||||
import "controls-uit"
|
import "controls-uit"
|
||||||
import "styles-uit"
|
import "styles-uit"
|
||||||
|
|
||||||
|
|
||||||
ScrollingWindow {
|
ScrollingWindow {
|
||||||
id: toolWindow
|
id: toolWindow
|
||||||
resizable: true
|
resizable: true
|
||||||
|
@ -47,92 +48,101 @@ ScrollingWindow {
|
||||||
property alias y: toolWindow.y
|
property alias y: toolWindow.y
|
||||||
}
|
}
|
||||||
|
|
||||||
TabView {
|
Item {
|
||||||
id: tabView;
|
id: toolWindowTabViewItem
|
||||||
|
height: pane.scrollHeight
|
||||||
width: pane.contentWidth
|
width: pane.contentWidth
|
||||||
height: pane.scrollHeight // Pane height so that don't use Window's scrollbars otherwise tabs may be scrolled out of view.
|
anchors.left: parent.left
|
||||||
property int tabCount: 0
|
anchors.top: parent.top
|
||||||
|
|
||||||
Repeater {
|
TabView {
|
||||||
model: 4
|
id: tabView
|
||||||
Tab {
|
width: pane.contentWidth
|
||||||
// Force loading of the content even if the tab is not visible
|
// Pane height so that don't use Window's scrollbars otherwise tabs may be scrolled out of view.
|
||||||
// (required for letting the C++ code access the webview)
|
height: pane.scrollHeight
|
||||||
active: true
|
property int tabCount: 0
|
||||||
enabled: false
|
|
||||||
property string originalUrl: "";
|
|
||||||
|
|
||||||
WebView {
|
Repeater {
|
||||||
id: webView;
|
model: 4
|
||||||
anchors.fill: parent
|
Tab {
|
||||||
|
// Force loading of the content even if the tab is not visible
|
||||||
|
// (required for letting the C++ code access the webview)
|
||||||
|
active: true
|
||||||
enabled: false
|
enabled: false
|
||||||
property alias eventBridgeWrapper: eventBridgeWrapper
|
property string originalUrl: ""
|
||||||
|
|
||||||
QtObject {
|
WebView {
|
||||||
id: eventBridgeWrapper
|
id: webView
|
||||||
WebChannel.id: "eventBridgeWrapper"
|
anchors.fill: parent
|
||||||
property var eventBridge;
|
enabled: false
|
||||||
|
property alias eventBridgeWrapper: eventBridgeWrapper
|
||||||
|
|
||||||
|
QtObject {
|
||||||
|
id: eventBridgeWrapper
|
||||||
|
WebChannel.id: "eventBridgeWrapper"
|
||||||
|
property var eventBridge
|
||||||
|
}
|
||||||
|
|
||||||
|
webChannel.registeredObjects: [eventBridgeWrapper]
|
||||||
|
onEnabledChanged: toolWindow.updateVisiblity()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
style: TabViewStyle {
|
||||||
|
|
||||||
|
frame: Rectangle { // Background shown before content loads.
|
||||||
|
anchors.fill: parent
|
||||||
|
color: hifi.colors.baseGray
|
||||||
|
}
|
||||||
|
|
||||||
|
frameOverlap: 0
|
||||||
|
|
||||||
|
tab: Rectangle {
|
||||||
|
implicitWidth: text.width
|
||||||
|
implicitHeight: 3 * text.height
|
||||||
|
color: styleData.selected ? hifi.colors.black : hifi.colors.tabBackgroundDark
|
||||||
|
|
||||||
|
RalewayRegular {
|
||||||
|
id: text
|
||||||
|
text: styleData.title
|
||||||
|
font.capitalization: Font.AllUppercase
|
||||||
|
size: hifi.fontSizes.tabName
|
||||||
|
width: tabView.tabCount > 1 ? styleData.availableWidth / tabView.tabCount : implicitWidth + 2 * hifi.dimensions.contentSpacing.x
|
||||||
|
elide: Text.ElideRight
|
||||||
|
color: styleData.selected ? hifi.colors.primaryHighlight : hifi.colors.lightGrayText
|
||||||
|
horizontalAlignment: Text.AlignHCenter
|
||||||
|
anchors.centerIn: parent
|
||||||
}
|
}
|
||||||
|
|
||||||
webChannel.registeredObjects: [eventBridgeWrapper]
|
Rectangle { // Separator.
|
||||||
onEnabledChanged: toolWindow.updateVisiblity();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
style: TabViewStyle {
|
|
||||||
|
|
||||||
frame: Rectangle { // Background shown before content loads.
|
|
||||||
anchors.fill: parent
|
|
||||||
color: hifi.colors.baseGray
|
|
||||||
}
|
|
||||||
|
|
||||||
frameOverlap: 0
|
|
||||||
|
|
||||||
tab: Rectangle {
|
|
||||||
implicitWidth: text.width
|
|
||||||
implicitHeight: 3 * text.height
|
|
||||||
color: styleData.selected ? hifi.colors.black : hifi.colors.tabBackgroundDark
|
|
||||||
|
|
||||||
RalewayRegular {
|
|
||||||
id: text
|
|
||||||
text: styleData.title
|
|
||||||
font.capitalization: Font.AllUppercase
|
|
||||||
size: hifi.fontSizes.tabName
|
|
||||||
width: tabView.tabCount > 1 ? styleData.availableWidth / tabView.tabCount : implicitWidth + 2 * hifi.dimensions.contentSpacing.x
|
|
||||||
elide: Text.ElideRight
|
|
||||||
color: styleData.selected ? hifi.colors.primaryHighlight : hifi.colors.lightGrayText
|
|
||||||
horizontalAlignment: Text.AlignHCenter
|
|
||||||
anchors.centerIn: parent
|
|
||||||
}
|
|
||||||
|
|
||||||
Rectangle { // Separator.
|
|
||||||
width: 1
|
|
||||||
height: parent.height
|
|
||||||
color: hifi.colors.black
|
|
||||||
anchors.left: parent.left
|
|
||||||
anchors.top: parent.top
|
|
||||||
visible: styleData.index > 0
|
|
||||||
|
|
||||||
Rectangle {
|
|
||||||
width: 1
|
width: 1
|
||||||
height: 1
|
height: parent.height
|
||||||
color: hifi.colors.baseGray
|
color: hifi.colors.black
|
||||||
anchors.left: parent.left
|
anchors.left: parent.left
|
||||||
|
anchors.top: parent.top
|
||||||
|
visible: styleData.index > 0
|
||||||
|
|
||||||
|
Rectangle {
|
||||||
|
width: 1
|
||||||
|
height: 1
|
||||||
|
color: hifi.colors.baseGray
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.bottom: parent.bottom
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Rectangle { // Active underline.
|
||||||
|
width: parent.width - (styleData.index > 0 ? 1 : 0)
|
||||||
|
height: 1
|
||||||
|
anchors.right: parent.right
|
||||||
anchors.bottom: parent.bottom
|
anchors.bottom: parent.bottom
|
||||||
|
color: styleData.selected ? hifi.colors.primaryHighlight : hifi.colors.baseGray
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Rectangle { // Active underline.
|
tabOverlap: 0
|
||||||
width: parent.width - (styleData.index > 0 ? 1 : 0)
|
|
||||||
height: 1
|
|
||||||
anchors.right: parent.right
|
|
||||||
anchors.bottom: parent.bottom
|
|
||||||
color: styleData.selected ? hifi.colors.primaryHighlight : hifi.colors.baseGray
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
tabOverlap: 0
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -224,7 +234,6 @@ ScrollingWindow {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (properties.width) {
|
if (properties.width) {
|
||||||
tabView.width = Math.min(Math.max(tabView.width, properties.width), toolWindow.maxSize.x);
|
tabView.width = Math.min(Math.max(tabView.width, properties.width), toolWindow.maxSize.x);
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
//
|
//
|
||||||
|
|
||||||
import QtQuick 2.5
|
import QtQuick 2.5
|
||||||
import QtWebEngine 1.1
|
import QtWebEngine 1.2
|
||||||
|
|
||||||
WebEngineView {
|
WebEngineView {
|
||||||
id: root
|
id: root
|
||||||
|
|
|
@ -109,8 +109,13 @@ Column {
|
||||||
}
|
}
|
||||||
|
|
||||||
MouseArea {
|
MouseArea {
|
||||||
|
// Events are propogated so that any active control is defocused.
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
onClicked: toggleCollapsed()
|
propagateComposedEvents: true
|
||||||
|
onPressed: {
|
||||||
|
toggleCollapsed();
|
||||||
|
mouse.accepted = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -304,13 +304,13 @@ Item {
|
||||||
Key {
|
Key {
|
||||||
id: key31
|
id: key31
|
||||||
width: 43
|
width: 43
|
||||||
glyph: ","
|
glyph: "_"
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
||||||
id: key33
|
id: key33
|
||||||
width: 43
|
width: 43
|
||||||
glyph: "."
|
glyph: "?"
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
|
@ -208,49 +208,49 @@ Item {
|
||||||
Key {
|
Key {
|
||||||
id: key22
|
id: key22
|
||||||
width: 43
|
width: 43
|
||||||
glyph: "_"
|
glyph: ","
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
||||||
id: key23
|
id: key23
|
||||||
width: 43
|
width: 43
|
||||||
glyph: ";"
|
glyph: "."
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
||||||
id: key24
|
id: key24
|
||||||
width: 43
|
width: 43
|
||||||
glyph: ":"
|
glyph: ";"
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
||||||
id: key25
|
id: key25
|
||||||
width: 43
|
width: 43
|
||||||
glyph: "'"
|
glyph: ":"
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
||||||
id: key26
|
id: key26
|
||||||
width: 43
|
width: 43
|
||||||
glyph: "\""
|
glyph: "'"
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
||||||
id: key31
|
id: key31
|
||||||
width: 43
|
width: 43
|
||||||
glyph: "<"
|
glyph: "\""
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
||||||
id: key33
|
id: key33
|
||||||
width: 43
|
width: 43
|
||||||
glyph: ">"
|
glyph: "<"
|
||||||
}
|
}
|
||||||
|
|
||||||
Key {
|
Key {
|
||||||
id: key36
|
id: key36
|
||||||
width: 43
|
width: 43
|
||||||
glyph: "?"
|
glyph: ">"
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
|
@ -1,6 +1,7 @@
|
||||||
import QtQuick 2.5
|
import QtQuick 2.5
|
||||||
import QtWebEngine 1.1
|
import QtWebEngine 1.1
|
||||||
import QtWebChannel 1.0
|
import QtWebChannel 1.0
|
||||||
|
import "../controls-uit" as HiFiControls
|
||||||
|
|
||||||
Item {
|
Item {
|
||||||
property alias url: root.url
|
property alias url: root.url
|
||||||
|
@ -105,7 +106,7 @@ Item {
|
||||||
}
|
}
|
||||||
|
|
||||||
// virtual keyboard, letters
|
// virtual keyboard, letters
|
||||||
Keyboard {
|
HiFiControls.Keyboard {
|
||||||
id: keyboard1
|
id: keyboard1
|
||||||
y: keyboardRaised ? parent.height : 0
|
y: keyboardRaised ? parent.height : 0
|
||||||
height: keyboardRaised ? 200 : 0
|
height: keyboardRaised ? 200 : 0
|
||||||
|
@ -119,7 +120,7 @@ Item {
|
||||||
anchors.bottomMargin: 0
|
anchors.bottomMargin: 0
|
||||||
}
|
}
|
||||||
|
|
||||||
KeyboardPunctuation {
|
HiFiControls.KeyboardPunctuation {
|
||||||
id: keyboard2
|
id: keyboard2
|
||||||
y: keyboardRaised ? parent.height : 0
|
y: keyboardRaised ? parent.height : 0
|
||||||
height: keyboardRaised ? 200 : 0
|
height: keyboardRaised ? 200 : 0
|
||||||
|
|
|
@ -22,6 +22,7 @@ ModalWindow {
|
||||||
implicitWidth: 640;
|
implicitWidth: 640;
|
||||||
implicitHeight: 320;
|
implicitHeight: 320;
|
||||||
visible: true;
|
visible: true;
|
||||||
|
keyboardEnabled: false // Disable ModalWindow's keyboard.
|
||||||
|
|
||||||
signal selected(var result);
|
signal selected(var result);
|
||||||
signal canceled();
|
signal canceled();
|
||||||
|
@ -50,6 +51,10 @@ ModalWindow {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
property bool keyboardRaised: false
|
||||||
|
property bool punctuationMode: false
|
||||||
|
onKeyboardRaisedChanged: d.resize();
|
||||||
|
|
||||||
property var warning: "";
|
property var warning: "";
|
||||||
property var result;
|
property var result;
|
||||||
|
|
||||||
|
@ -110,7 +115,9 @@ ModalWindow {
|
||||||
var targetWidth = Math.max(titleWidth, pane.width);
|
var targetWidth = Math.max(titleWidth, pane.width);
|
||||||
var targetHeight = (textField.visible ? textField.controlHeight + hifi.dimensions.contentSpacing.y : 0) +
|
var targetHeight = (textField.visible ? textField.controlHeight + hifi.dimensions.contentSpacing.y : 0) +
|
||||||
(extraInputs.visible ? extraInputs.height + hifi.dimensions.contentSpacing.y : 0) +
|
(extraInputs.visible ? extraInputs.height + hifi.dimensions.contentSpacing.y : 0) +
|
||||||
(buttons.height + 3 * hifi.dimensions.contentSpacing.y);
|
(buttons.height + 3 * hifi.dimensions.contentSpacing.y) +
|
||||||
|
(root.keyboardRaised ? (200 + hifi.dimensions.contentSpacing.y) : 0);
|
||||||
|
|
||||||
root.width = (targetWidth < d.minWidth) ? d.minWidth : ((targetWidth > d.maxWdith) ? d.maxWidth : targetWidth);
|
root.width = (targetWidth < d.minWidth) ? d.minWidth : ((targetWidth > d.maxWdith) ? d.maxWidth : targetWidth);
|
||||||
root.height = (targetHeight < d.minHeight) ? d.minHeight : ((targetHeight > d.maxHeight) ?
|
root.height = (targetHeight < d.minHeight) ? d.minHeight : ((targetHeight > d.maxHeight) ?
|
||||||
d.maxHeight : targetHeight);
|
d.maxHeight : targetHeight);
|
||||||
|
@ -130,7 +137,6 @@ ModalWindow {
|
||||||
left: parent.left;
|
left: parent.left;
|
||||||
right: parent.right;
|
right: parent.right;
|
||||||
margins: 0;
|
margins: 0;
|
||||||
bottomMargin: hifi.dimensions.contentSpacing.y;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME make a text field type that can be bound to a history for autocompletion
|
// FIXME make a text field type that can be bound to a history for autocompletion
|
||||||
|
@ -142,7 +148,43 @@ ModalWindow {
|
||||||
anchors {
|
anchors {
|
||||||
left: parent.left;
|
left: parent.left;
|
||||||
right: parent.right;
|
right: parent.right;
|
||||||
bottom: parent.bottom;
|
bottom: keyboard.top;
|
||||||
|
bottomMargin: hifi.dimensions.contentSpacing.y;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Item {
|
||||||
|
id: keyboard
|
||||||
|
|
||||||
|
height: keyboardRaised ? 200 : 0
|
||||||
|
|
||||||
|
anchors {
|
||||||
|
left: parent.left
|
||||||
|
right: parent.right
|
||||||
|
bottom: parent.bottom
|
||||||
|
bottomMargin: keyboardRaised ? hifi.dimensions.contentSpacing.y : 0
|
||||||
|
}
|
||||||
|
|
||||||
|
Keyboard {
|
||||||
|
id: keyboard1
|
||||||
|
visible: keyboardRaised && !punctuationMode
|
||||||
|
enabled: keyboardRaised && !punctuationMode
|
||||||
|
anchors {
|
||||||
|
left: parent.left
|
||||||
|
right: parent.right
|
||||||
|
bottom: parent.bottom
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
KeyboardPunctuation {
|
||||||
|
id: keyboard2
|
||||||
|
visible: keyboardRaised && punctuationMode
|
||||||
|
enabled: keyboardRaised && punctuationMode
|
||||||
|
anchors {
|
||||||
|
left: parent.left
|
||||||
|
right: parent.right
|
||||||
|
bottom: parent.bottom
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ ModalWindow {
|
||||||
id: root
|
id: root
|
||||||
resizable: true
|
resizable: true
|
||||||
implicitWidth: 480
|
implicitWidth: 480
|
||||||
implicitHeight: 360
|
implicitHeight: 360 + (fileDialogItem.keyboardRaised ? 200 + hifi.dimensions.contentSpacing.y : 0)
|
||||||
|
|
||||||
minSize: Qt.vector2d(360, 240)
|
minSize: Qt.vector2d(360, 240)
|
||||||
draggable: true
|
draggable: true
|
||||||
|
@ -100,16 +100,23 @@ ModalWindow {
|
||||||
}
|
}
|
||||||
|
|
||||||
Item {
|
Item {
|
||||||
|
id: fileDialogItem
|
||||||
clip: true
|
clip: true
|
||||||
width: pane.width
|
width: pane.width
|
||||||
height: pane.height
|
height: pane.height
|
||||||
anchors.margins: 0
|
anchors.margins: 0
|
||||||
|
|
||||||
|
property bool keyboardRaised: false
|
||||||
|
property bool punctuationMode: false
|
||||||
|
|
||||||
MouseArea {
|
MouseArea {
|
||||||
// Clear selection when click on internal unused area.
|
// Clear selection when click on internal unused area.
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
drag.target: root
|
drag.target: root
|
||||||
onClicked: d.clearSelection()
|
onClicked: {
|
||||||
|
d.clearSelection();
|
||||||
|
frame.forceActiveFocus(); // Defocus text field so that the keyboard gets hidden.
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Row {
|
Row {
|
||||||
|
@ -619,7 +626,7 @@ ModalWindow {
|
||||||
left: parent.left
|
left: parent.left
|
||||||
right: selectionType.visible ? selectionType.left: parent.right
|
right: selectionType.visible ? selectionType.left: parent.right
|
||||||
rightMargin: selectionType.visible ? hifi.dimensions.contentSpacing.x : 0
|
rightMargin: selectionType.visible ? hifi.dimensions.contentSpacing.x : 0
|
||||||
bottom: buttonRow.top
|
bottom: keyboard1.top
|
||||||
bottomMargin: hifi.dimensions.contentSpacing.y
|
bottomMargin: hifi.dimensions.contentSpacing.y
|
||||||
}
|
}
|
||||||
readOnly: !root.saveDialog
|
readOnly: !root.saveDialog
|
||||||
|
@ -640,6 +647,28 @@ ModalWindow {
|
||||||
KeyNavigation.right: openButton
|
KeyNavigation.right: openButton
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Keyboard {
|
||||||
|
id: keyboard1
|
||||||
|
height: parent.keyboardRaised ? 200 : 0
|
||||||
|
visible: parent.keyboardRaised && !parent.punctuationMode
|
||||||
|
enabled: visible
|
||||||
|
anchors.right: parent.right
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.bottom: buttonRow.top
|
||||||
|
anchors.bottomMargin: visible ? hifi.dimensions.contentSpacing.y : 0
|
||||||
|
}
|
||||||
|
|
||||||
|
KeyboardPunctuation {
|
||||||
|
id: keyboard2
|
||||||
|
height: parent.keyboardRaised ? 200 : 0
|
||||||
|
visible: parent.keyboardRaised && parent.punctuationMode
|
||||||
|
enabled: visible
|
||||||
|
anchors.right: parent.right
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.bottom: buttonRow.top
|
||||||
|
anchors.bottomMargin: visible ? hifi.dimensions.contentSpacing.y : 0
|
||||||
|
}
|
||||||
|
|
||||||
Row {
|
Row {
|
||||||
id: buttonRow
|
id: buttonRow
|
||||||
anchors {
|
anchors {
|
||||||
|
|
|
@ -97,9 +97,9 @@ ScrollingWindow {
|
||||||
|
|
||||||
footer: Row {
|
footer: Row {
|
||||||
anchors {
|
anchors {
|
||||||
right: parent.right;
|
top: parent.top
|
||||||
|
right: parent.right
|
||||||
rightMargin: hifi.dimensions.contentMargin.x
|
rightMargin: hifi.dimensions.contentMargin.x
|
||||||
verticalCenter: parent.verticalCenter
|
|
||||||
}
|
}
|
||||||
spacing: hifi.dimensions.contentSpacing.x
|
spacing: hifi.dimensions.contentSpacing.x
|
||||||
|
|
||||||
|
|
|
@ -53,11 +53,17 @@ ModalWindow {
|
||||||
}
|
}
|
||||||
|
|
||||||
Item {
|
Item {
|
||||||
|
id: modalWindowItem
|
||||||
clip: true
|
clip: true
|
||||||
width: pane.width
|
width: pane.width
|
||||||
height: pane.height
|
height: pane.height
|
||||||
anchors.margins: 0
|
anchors.margins: 0
|
||||||
|
|
||||||
|
property bool keyboardRaised: false
|
||||||
|
property bool punctuationMode: false
|
||||||
|
|
||||||
|
onKeyboardRaisedChanged: d.resize();
|
||||||
|
|
||||||
QtObject {
|
QtObject {
|
||||||
id: d
|
id: d
|
||||||
readonly property int minWidth: 480
|
readonly property int minWidth: 480
|
||||||
|
@ -69,14 +75,14 @@ ModalWindow {
|
||||||
var targetWidth = Math.max(titleWidth, pane.width)
|
var targetWidth = Math.max(titleWidth, pane.width)
|
||||||
var targetHeight = (items ? comboBox.controlHeight : textResult.controlHeight) + 5 * hifi.dimensions.contentSpacing.y + buttons.height
|
var targetHeight = (items ? comboBox.controlHeight : textResult.controlHeight) + 5 * hifi.dimensions.contentSpacing.y + buttons.height
|
||||||
root.width = (targetWidth < d.minWidth) ? d.minWidth : ((targetWidth > d.maxWdith) ? d.maxWidth : targetWidth)
|
root.width = (targetWidth < d.minWidth) ? d.minWidth : ((targetWidth > d.maxWdith) ? d.maxWidth : targetWidth)
|
||||||
root.height = (targetHeight < d.minHeight) ? d.minHeight: ((targetHeight > d.maxHeight) ? d.maxHeight : targetHeight)
|
root.height = ((targetHeight < d.minHeight) ? d.minHeight: ((targetHeight > d.maxHeight) ? d.maxHeight : targetHeight)) + (modalWindowItem.keyboardRaised ? (200 + 2 * hifi.dimensions.contentSpacing.y) : 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Item {
|
Item {
|
||||||
anchors {
|
anchors {
|
||||||
top: parent.top
|
top: parent.top
|
||||||
bottom: buttons.top;
|
bottom: keyboard1.top;
|
||||||
left: parent.left;
|
left: parent.left;
|
||||||
right: parent.right;
|
right: parent.right;
|
||||||
margins: 0
|
margins: 0
|
||||||
|
@ -110,6 +116,35 @@ ModalWindow {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// virtual keyboard, letters
|
||||||
|
Keyboard {
|
||||||
|
id: keyboard1
|
||||||
|
y: parent.keyboardRaised ? parent.height : 0
|
||||||
|
height: parent.keyboardRaised ? 200 : 0
|
||||||
|
visible: parent.keyboardRaised && !parent.punctuationMode
|
||||||
|
enabled: parent.keyboardRaised && !parent.punctuationMode
|
||||||
|
anchors.right: parent.right
|
||||||
|
anchors.rightMargin: 0
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.leftMargin: 0
|
||||||
|
anchors.bottom: buttons.top
|
||||||
|
anchors.bottomMargin: parent.keyboardRaised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||||
|
}
|
||||||
|
|
||||||
|
KeyboardPunctuation {
|
||||||
|
id: keyboard2
|
||||||
|
y: parent.keyboardRaised ? parent.height : 0
|
||||||
|
height: parent.keyboardRaised ? 200 : 0
|
||||||
|
visible: parent.keyboardRaised && parent.punctuationMode
|
||||||
|
enabled: parent.keyboardRaised && parent.punctuationMode
|
||||||
|
anchors.right: parent.right
|
||||||
|
anchors.rightMargin: 0
|
||||||
|
anchors.left: parent.left
|
||||||
|
anchors.leftMargin: 0
|
||||||
|
anchors.bottom: buttons.top
|
||||||
|
anchors.bottomMargin: parent.keyboardRaised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||||
|
}
|
||||||
|
|
||||||
Flow {
|
Flow {
|
||||||
id: buttons
|
id: buttons
|
||||||
focus: true
|
focus: true
|
||||||
|
|
|
@ -45,7 +45,7 @@ ScrollingWindow {
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
width: parent.width
|
width: parent.width
|
||||||
height: root.height
|
height: root.height - (keyboardRaised ? 200 : 0)
|
||||||
radius: 4
|
radius: 4
|
||||||
color: hifi.colors.baseGray
|
color: hifi.colors.baseGray
|
||||||
|
|
||||||
|
@ -128,6 +128,10 @@ ScrollingWindow {
|
||||||
}
|
}
|
||||||
onCountChanged: MyAvatar.setAttachmentsVariant(attachments);
|
onCountChanged: MyAvatar.setAttachmentsVariant(attachments);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function scrollBy(delta) {
|
||||||
|
flickableItem.contentY += delta;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -204,5 +208,22 @@ ScrollingWindow {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
onKeyboardRaisedChanged: {
|
||||||
|
if (keyboardRaised) {
|
||||||
|
// Scroll to item with focus if necessary.
|
||||||
|
var footerHeight = newAttachmentButton.height + buttonRow.height + 3 * hifi.dimensions.contentSpacing.y;
|
||||||
|
var delta = activator.mouseY
|
||||||
|
- (activator.height + activator.y - 200 - footerHeight - hifi.dimensions.controlLineHeight);
|
||||||
|
|
||||||
|
if (delta > 0) {
|
||||||
|
scrollView.scrollBy(delta);
|
||||||
|
} else {
|
||||||
|
// HACK: Work around for case where are 100% scrolled; stops window from erroneously scrolling to 100% when show keyboard.
|
||||||
|
scrollView.scrollBy(-1);
|
||||||
|
scrollView.scrollBy(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ ScrollingWindow {
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
width: parent.width
|
width: parent.width
|
||||||
height: root.height
|
height: root.height - (keyboardRaised ? 200 : 0)
|
||||||
radius: 4
|
radius: 4
|
||||||
color: hifi.colors.baseGray
|
color: hifi.colors.baseGray
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
//
|
//
|
||||||
// Window.qml
|
// Window.qml
|
||||||
//
|
//
|
||||||
|
@ -15,6 +16,7 @@ import QtGraphicalEffects 1.0
|
||||||
|
|
||||||
import "."
|
import "."
|
||||||
import "../styles-uit"
|
import "../styles-uit"
|
||||||
|
import "../controls-uit" as HiFiControls
|
||||||
|
|
||||||
// FIXME how do I set the initial position of a window without
|
// FIXME how do I set the initial position of a window without
|
||||||
// overriding places where the a individual client of the window
|
// overriding places where the a individual client of the window
|
||||||
|
@ -23,12 +25,18 @@ import "../styles-uit"
|
||||||
// FIXME how to I enable dragging without allowing the window to lay outside
|
// FIXME how to I enable dragging without allowing the window to lay outside
|
||||||
// of the desktop? How do I ensure when the desktop resizes all the windows
|
// of the desktop? How do I ensure when the desktop resizes all the windows
|
||||||
// are still at least partially visible?
|
// are still at least partially visible?
|
||||||
|
|
||||||
Window {
|
Window {
|
||||||
id: window
|
id: window
|
||||||
HifiConstants { id: hifi }
|
HifiConstants { id: hifi }
|
||||||
children: [ swallower, frame, pane, activator ]
|
children: [ swallower, frame, defocuser, pane, activator ]
|
||||||
|
|
||||||
property var footer: Item { } // Optional static footer at the bottom of the dialog.
|
property var footer: Item { } // Optional static footer at the bottom of the dialog.
|
||||||
|
readonly property var footerContentHeight: footer.height > 0 ? (footer.height + 2 * hifi.dimensions.contentSpacing.y + 3) : 0
|
||||||
|
|
||||||
|
property bool keyboardEnabled: true // Set false if derived control implements its own keyboard.
|
||||||
|
property bool keyboardRaised: false
|
||||||
|
property bool punctuationMode: false
|
||||||
|
|
||||||
// Scrollable window content.
|
// Scrollable window content.
|
||||||
// FIXME this should not define any visual content in this type. The base window
|
// FIXME this should not define any visual content in this type. The base window
|
||||||
|
@ -73,7 +81,7 @@ Window {
|
||||||
verticalScrollBarPolicy: Qt.ScrollBarAsNeeded
|
verticalScrollBarPolicy: Qt.ScrollBarAsNeeded
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
anchors.rightMargin: parent.isScrolling ? 1 : 0
|
anchors.rightMargin: parent.isScrolling ? 1 : 0
|
||||||
anchors.bottomMargin: footer.height > 0 ? footerPane.height : 0
|
anchors.bottomMargin: footerPane.height
|
||||||
|
|
||||||
style: ScrollViewStyle {
|
style: ScrollViewStyle {
|
||||||
|
|
||||||
|
@ -116,21 +124,36 @@ Window {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function scrollBy(delta) {
|
||||||
|
scrollView.flickableItem.contentY += delta;
|
||||||
|
}
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
// Optional non-scrolling footer.
|
// Optional non-scrolling footer.
|
||||||
id: footerPane
|
id: footerPane
|
||||||
|
|
||||||
|
property alias keyboardEnabled: window.keyboardEnabled
|
||||||
|
property alias keyboardRaised: window.keyboardRaised
|
||||||
|
property alias punctuationMode: window.punctuationMode
|
||||||
|
|
||||||
anchors {
|
anchors {
|
||||||
left: parent.left
|
left: parent.left
|
||||||
bottom: parent.bottom
|
bottom: parent.bottom
|
||||||
}
|
}
|
||||||
width: parent.contentWidth
|
width: parent.contentWidth
|
||||||
height: footer.height + 2 * hifi.dimensions.contentSpacing.y + 3
|
height: footerContentHeight + (keyboardEnabled && keyboardRaised ? 200 : 0)
|
||||||
color: hifi.colors.baseGray
|
color: hifi.colors.baseGray
|
||||||
visible: footer.height > 0
|
visible: footer.height > 0 || keyboardEnabled && keyboardRaised
|
||||||
|
|
||||||
Item {
|
Item {
|
||||||
// Horizontal rule.
|
// Horizontal rule.
|
||||||
anchors.fill: parent
|
anchors {
|
||||||
|
top: parent.top
|
||||||
|
left: parent.left
|
||||||
|
right: parent.right
|
||||||
|
}
|
||||||
|
|
||||||
|
visible: footer.height > 0
|
||||||
|
|
||||||
Rectangle {
|
Rectangle {
|
||||||
width: parent.width
|
width: parent.width
|
||||||
|
@ -148,10 +171,53 @@ Window {
|
||||||
}
|
}
|
||||||
|
|
||||||
Item {
|
Item {
|
||||||
anchors.fill: parent
|
anchors {
|
||||||
anchors.topMargin: 3 // Horizontal rule.
|
left: parent.left
|
||||||
|
right: parent.right
|
||||||
|
top: parent.top
|
||||||
|
topMargin: hifi.dimensions.contentSpacing.y + 3
|
||||||
|
}
|
||||||
children: [ footer ]
|
children: [ footer ]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
HiFiControls.Keyboard {
|
||||||
|
id: keyboard1
|
||||||
|
height: parent.keyboardEnabled && parent.keyboardRaised ? 200 : 0
|
||||||
|
visible: parent.keyboardEnabled && parent.keyboardRaised && !parent.punctuationMode
|
||||||
|
enabled: parent.keyboardEnabled && parent.keyboardRaised && !parent.punctuationMode
|
||||||
|
anchors {
|
||||||
|
left: parent.left
|
||||||
|
right: parent.right
|
||||||
|
bottom: parent.bottom
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
HiFiControls.KeyboardPunctuation {
|
||||||
|
id: keyboard2
|
||||||
|
height: parent.keyboardEnabled && parent.keyboardRaised ? 200 : 0
|
||||||
|
visible: parent.keyboardEnabled && parent.keyboardRaised && parent.punctuationMode
|
||||||
|
enabled: parent.keyboardEnabled && parent.keyboardRaised && parent.punctuationMode
|
||||||
|
anchors {
|
||||||
|
left: parent.left
|
||||||
|
right: parent.right
|
||||||
|
bottom: parent.bottom
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onKeyboardRaisedChanged: {
|
||||||
|
if (keyboardEnabled && keyboardRaised) {
|
||||||
|
var delta = activator.mouseY
|
||||||
|
- (activator.height + activator.y - 200 - footerContentHeight - hifi.dimensions.controlLineHeight);
|
||||||
|
|
||||||
|
if (delta > 0) {
|
||||||
|
pane.scrollBy(delta);
|
||||||
|
} else {
|
||||||
|
// HACK: Work around for case where are 100% scrolled; stops window from erroneously scrolling to 100% when show keyboard.
|
||||||
|
pane.scrollBy(-1);
|
||||||
|
pane.scrollBy(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,7 +44,7 @@ Fadable {
|
||||||
implicitHeight: content ? content.height : 0
|
implicitHeight: content ? content.height : 0
|
||||||
implicitWidth: content ? content.width : 0
|
implicitWidth: content ? content.width : 0
|
||||||
x: desktop.invalid_position; y: desktop.invalid_position;
|
x: desktop.invalid_position; y: desktop.invalid_position;
|
||||||
children: [ swallower, frame, content, activator ]
|
children: [ swallower, frame, defocuser, content, activator ]
|
||||||
|
|
||||||
//
|
//
|
||||||
// Custom properties
|
// Custom properties
|
||||||
|
@ -122,6 +122,21 @@ Fadable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This mouse area defocuses the current control so that the HMD keyboard gets hidden.
|
||||||
|
property var defocuser: MouseArea {
|
||||||
|
width: frame.decoration ? frame.decoration.width : window.width
|
||||||
|
height: frame.decoration ? frame.decoration.height : window.height
|
||||||
|
x: frame.decoration ? frame.decoration.anchors.leftMargin : 0
|
||||||
|
y: frame.decoration ? frame.decoration.anchors.topMargin : 0
|
||||||
|
propagateComposedEvents: true
|
||||||
|
acceptedButtons: Qt.AllButtons
|
||||||
|
enabled: window.visible
|
||||||
|
onPressed: {
|
||||||
|
frame.forceActiveFocus();
|
||||||
|
mouse.accepted = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// This mouse area serves to swallow mouse events while the mouse is over the window
|
// This mouse area serves to swallow mouse events while the mouse is over the window
|
||||||
// to prevent things like mouse wheel events from reaching the application and changing
|
// to prevent things like mouse wheel events from reaching the application and changing
|
||||||
// the camera if the user is scrolling through a list and gets to the end.
|
// the camera if the user is scrolling through a list and gets to the end.
|
||||||
|
|
|
@ -1,78 +0,0 @@
|
||||||
//
|
|
||||||
// Created by Bradley Austin Davis on 2016/07/11
|
|
||||||
// Copyright 2013-2016 High Fidelity, Inc.
|
|
||||||
//
|
|
||||||
// Distributed under the Apache License, Version 2.0.
|
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
|
||||||
//
|
|
||||||
|
|
||||||
#version 410 core
|
|
||||||
|
|
||||||
uniform sampler2D sampler;
|
|
||||||
uniform mat3 reprojection = mat3(1);
|
|
||||||
uniform mat4 inverseProjections[2];
|
|
||||||
uniform mat4 projections[2];
|
|
||||||
|
|
||||||
in vec2 vTexCoord;
|
|
||||||
in vec3 vPosition;
|
|
||||||
|
|
||||||
out vec4 FragColor;
|
|
||||||
|
|
||||||
void main() {
|
|
||||||
vec2 uv = vTexCoord;
|
|
||||||
|
|
||||||
mat4 eyeInverseProjection;
|
|
||||||
mat4 eyeProjection;
|
|
||||||
|
|
||||||
float xoffset = 1.0;
|
|
||||||
vec2 uvmin = vec2(0.0);
|
|
||||||
vec2 uvmax = vec2(1.0);
|
|
||||||
// determine the correct projection and inverse projection to use.
|
|
||||||
if (vTexCoord.x < 0.5) {
|
|
||||||
uvmax.x = 0.5;
|
|
||||||
eyeInverseProjection = inverseProjections[0];
|
|
||||||
eyeProjection = projections[0];
|
|
||||||
} else {
|
|
||||||
xoffset = -1.0;
|
|
||||||
uvmin.x = 0.5;
|
|
||||||
uvmax.x = 1.0;
|
|
||||||
eyeInverseProjection = inverseProjections[1];
|
|
||||||
eyeProjection = projections[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Account for stereo in calculating the per-eye NDC coordinates
|
|
||||||
vec4 ndcSpace = vec4(vPosition, 1.0);
|
|
||||||
ndcSpace.x *= 2.0;
|
|
||||||
ndcSpace.x += xoffset;
|
|
||||||
|
|
||||||
// Convert from NDC to eyespace
|
|
||||||
vec4 eyeSpace = eyeInverseProjection * ndcSpace;
|
|
||||||
eyeSpace /= eyeSpace.w;
|
|
||||||
|
|
||||||
// Convert to a noramlized ray
|
|
||||||
vec3 ray = eyeSpace.xyz;
|
|
||||||
ray = normalize(ray);
|
|
||||||
|
|
||||||
// Adjust the ray by the rotation
|
|
||||||
ray = reprojection * ray;
|
|
||||||
|
|
||||||
// Project back on to the texture plane
|
|
||||||
ray *= eyeSpace.z / ray.z;
|
|
||||||
|
|
||||||
// Update the eyespace vector
|
|
||||||
eyeSpace.xyz = ray;
|
|
||||||
|
|
||||||
// Reproject back into NDC
|
|
||||||
ndcSpace = eyeProjection * eyeSpace;
|
|
||||||
ndcSpace /= ndcSpace.w;
|
|
||||||
ndcSpace.x -= xoffset;
|
|
||||||
ndcSpace.x /= 2.0;
|
|
||||||
|
|
||||||
// Calculate the new UV coordinates
|
|
||||||
uv = (ndcSpace.xy / 2.0) + 0.5;
|
|
||||||
if (any(greaterThan(uv, uvmax)) || any(lessThan(uv, uvmin))) {
|
|
||||||
FragColor = vec4(0.0, 0.0, 0.0, 1.0);
|
|
||||||
} else {
|
|
||||||
FragColor = texture(sampler, uv);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
//
|
|
||||||
// Created by Bradley Austin Davis on 2016/07/11
|
|
||||||
// Copyright 2013-2016 High Fidelity, Inc.
|
|
||||||
//
|
|
||||||
// Distributed under the Apache License, Version 2.0.
|
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
|
||||||
//
|
|
||||||
|
|
||||||
#version 410 core
|
|
||||||
in vec3 Position;
|
|
||||||
in vec2 TexCoord;
|
|
||||||
|
|
||||||
out vec3 vPosition;
|
|
||||||
out vec2 vTexCoord;
|
|
||||||
|
|
||||||
void main() {
|
|
||||||
gl_Position = vec4(Position, 1);
|
|
||||||
vTexCoord = TexCoord;
|
|
||||||
vPosition = Position;
|
|
||||||
}
|
|
|
@ -87,6 +87,7 @@
|
||||||
#include <PhysicsEngine.h>
|
#include <PhysicsEngine.h>
|
||||||
#include <PhysicsHelpers.h>
|
#include <PhysicsHelpers.h>
|
||||||
#include <plugins/PluginManager.h>
|
#include <plugins/PluginManager.h>
|
||||||
|
#include <plugins/PluginUtils.h>
|
||||||
#include <plugins/CodecPlugin.h>
|
#include <plugins/CodecPlugin.h>
|
||||||
#include <RecordingScriptingInterface.h>
|
#include <RecordingScriptingInterface.h>
|
||||||
#include <RenderableWebEntityItem.h>
|
#include <RenderableWebEntityItem.h>
|
||||||
|
@ -198,8 +199,9 @@ static const float MIRROR_FIELD_OF_VIEW = 30.0f;
|
||||||
|
|
||||||
static const quint64 TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS = 1 * USECS_PER_SECOND;
|
static const quint64 TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS = 1 * USECS_PER_SECOND;
|
||||||
|
|
||||||
static const QString INFO_HELP_PATH = "html/interface-welcome.html";
|
static const QString INFO_WELCOME_PATH = "html/interface-welcome.html";
|
||||||
static const QString INFO_EDIT_ENTITIES_PATH = "html/edit-commands.html";
|
static const QString INFO_EDIT_ENTITIES_PATH = "html/edit-commands.html";
|
||||||
|
static const QString INFO_HELP_PATH = "html/help.html";
|
||||||
|
|
||||||
static const unsigned int THROTTLED_SIM_FRAMERATE = 15;
|
static const unsigned int THROTTLED_SIM_FRAMERATE = 15;
|
||||||
static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SIM_FRAMERATE;
|
static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SIM_FRAMERATE;
|
||||||
|
@ -486,7 +488,7 @@ bool setupEssentials(int& argc, char** argv) {
|
||||||
// FIXME move to header, or better yet, design some kind of UI manager
|
// FIXME move to header, or better yet, design some kind of UI manager
|
||||||
// to take care of highlighting keyboard focused items, rather than
|
// to take care of highlighting keyboard focused items, rather than
|
||||||
// continuing to overburden Application.cpp
|
// continuing to overburden Application.cpp
|
||||||
Cube3DOverlay* _keyboardFocusHighlight{ nullptr };
|
std::shared_ptr<Cube3DOverlay> _keyboardFocusHighlight{ nullptr };
|
||||||
int _keyboardFocusHighlightID{ -1 };
|
int _keyboardFocusHighlightID{ -1 };
|
||||||
|
|
||||||
|
|
||||||
|
@ -682,10 +684,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
// send a location update immediately
|
// send a location update immediately
|
||||||
discoverabilityManager->updateLocation();
|
discoverabilityManager->updateLocation();
|
||||||
|
|
||||||
|
auto myAvatar = getMyAvatar();
|
||||||
|
|
||||||
connect(nodeList.data(), &NodeList::nodeAdded, this, &Application::nodeAdded);
|
connect(nodeList.data(), &NodeList::nodeAdded, this, &Application::nodeAdded);
|
||||||
connect(nodeList.data(), &NodeList::nodeKilled, this, &Application::nodeKilled);
|
connect(nodeList.data(), &NodeList::nodeKilled, this, &Application::nodeKilled);
|
||||||
connect(nodeList.data(), &NodeList::nodeActivated, this, &Application::nodeActivated);
|
connect(nodeList.data(), &NodeList::nodeActivated, this, &Application::nodeActivated);
|
||||||
connect(nodeList.data(), &NodeList::uuidChanged, getMyAvatar(), &MyAvatar::setSessionUUID);
|
connect(nodeList.data(), &NodeList::uuidChanged, myAvatar.get(), &MyAvatar::setSessionUUID);
|
||||||
connect(nodeList.data(), &NodeList::uuidChanged, this, &Application::setSessionUUID);
|
connect(nodeList.data(), &NodeList::uuidChanged, this, &Application::setSessionUUID);
|
||||||
connect(nodeList.data(), &NodeList::packetVersionMismatch, this, &Application::notifyPacketVersionMismatch);
|
connect(nodeList.data(), &NodeList::packetVersionMismatch, this, &Application::notifyPacketVersionMismatch);
|
||||||
|
|
||||||
|
@ -715,7 +719,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
connect(this, &Application::activeDisplayPluginChanged, this, &Application::updateThreadPoolCount);
|
connect(this, &Application::activeDisplayPluginChanged, this, &Application::updateThreadPoolCount);
|
||||||
|
|
||||||
// Save avatar location immediately after a teleport.
|
// Save avatar location immediately after a teleport.
|
||||||
connect(getMyAvatar(), &MyAvatar::positionGoneTo,
|
connect(myAvatar.get(), &MyAvatar::positionGoneTo,
|
||||||
DependencyManager::get<AddressManager>().data(), &AddressManager::storeCurrentAddress);
|
DependencyManager::get<AddressManager>().data(), &AddressManager::storeCurrentAddress);
|
||||||
|
|
||||||
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
||||||
|
@ -752,7 +756,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
connect(&_entityEditSender, &EntityEditPacketSender::packetSent, this, &Application::packetSent);
|
connect(&_entityEditSender, &EntityEditPacketSender::packetSent, this, &Application::packetSent);
|
||||||
|
|
||||||
// send the identity packet for our avatar each second to our avatar mixer
|
// send the identity packet for our avatar each second to our avatar mixer
|
||||||
connect(&identityPacketTimer, &QTimer::timeout, getMyAvatar(), &MyAvatar::sendIdentityPacket);
|
connect(&identityPacketTimer, &QTimer::timeout, myAvatar.get(), &MyAvatar::sendIdentityPacket);
|
||||||
identityPacketTimer.start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS);
|
identityPacketTimer.start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS);
|
||||||
|
|
||||||
const char** constArgv = const_cast<const char**>(argv);
|
const char** constArgv = const_cast<const char**>(argv);
|
||||||
|
@ -815,7 +819,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
{ "gl_version", glContextData["version"] },
|
{ "gl_version", glContextData["version"] },
|
||||||
{ "gl_vender", glContextData["vendor"] },
|
{ "gl_vender", glContextData["vendor"] },
|
||||||
{ "gl_sl_version", glContextData["slVersion"] },
|
{ "gl_sl_version", glContextData["slVersion"] },
|
||||||
{ "gl_renderer", glContextData["renderer"] }
|
{ "gl_renderer", glContextData["renderer"] },
|
||||||
|
{ "ideal_thread_count", QThread::idealThreadCount() }
|
||||||
};
|
};
|
||||||
auto macVersion = QSysInfo::macVersion();
|
auto macVersion = QSysInfo::macVersion();
|
||||||
if (macVersion != QSysInfo::MV_None) {
|
if (macVersion != QSysInfo::MV_None) {
|
||||||
|
@ -825,13 +830,23 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
if (windowsVersion != QSysInfo::WV_None) {
|
if (windowsVersion != QSysInfo::WV_None) {
|
||||||
properties["os_win_version"] = QSysInfo::windowsVersion();
|
properties["os_win_version"] = QSysInfo::windowsVersion();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ProcessorInfo procInfo;
|
||||||
|
if (getProcessorInfo(procInfo)) {
|
||||||
|
properties["processor_core_count"] = procInfo.numProcessorCores;
|
||||||
|
properties["logical_processor_count"] = procInfo.numLogicalProcessors;
|
||||||
|
properties["processor_l1_cache_count"] = procInfo.numProcessorCachesL1;
|
||||||
|
properties["processor_l2_cache_count"] = procInfo.numProcessorCachesL2;
|
||||||
|
properties["processor_l3_cache_count"] = procInfo.numProcessorCachesL3;
|
||||||
|
}
|
||||||
|
|
||||||
UserActivityLogger::getInstance().logAction("launch", properties);
|
UserActivityLogger::getInstance().logAction("launch", properties);
|
||||||
|
|
||||||
_connectionMonitor.init();
|
_connectionMonitor.init();
|
||||||
|
|
||||||
// Tell our entity edit sender about our known jurisdictions
|
// Tell our entity edit sender about our known jurisdictions
|
||||||
_entityEditSender.setServerJurisdictions(&_entityServerJurisdictions);
|
_entityEditSender.setServerJurisdictions(&_entityServerJurisdictions);
|
||||||
_entityEditSender.setMyAvatar(getMyAvatar());
|
_entityEditSender.setMyAvatar(myAvatar.get());
|
||||||
|
|
||||||
// For now we're going to set the PPS for outbound packets to be super high, this is
|
// For now we're going to set the PPS for outbound packets to be super high, this is
|
||||||
// probably not the right long term solution. But for now, we're going to do this to
|
// probably not the right long term solution. But for now, we're going to do this to
|
||||||
|
@ -852,7 +867,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
bandwidthRecorder.data(), &BandwidthRecorder::updateInboundData);
|
bandwidthRecorder.data(), &BandwidthRecorder::updateInboundData);
|
||||||
|
|
||||||
// FIXME -- I'm a little concerned about this.
|
// FIXME -- I'm a little concerned about this.
|
||||||
connect(getMyAvatar()->getSkeletonModel().get(), &SkeletonModel::skeletonLoaded,
|
connect(myAvatar->getSkeletonModel().get(), &SkeletonModel::skeletonLoaded,
|
||||||
this, &Application::checkSkeleton, Qt::QueuedConnection);
|
this, &Application::checkSkeleton, Qt::QueuedConnection);
|
||||||
|
|
||||||
// Setup the userInputMapper with the actions
|
// Setup the userInputMapper with the actions
|
||||||
|
@ -1067,7 +1082,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
applicationUpdater->checkForUpdate();
|
applicationUpdater->checkForUpdate();
|
||||||
|
|
||||||
// Now that menu is initialized we can sync myAvatar with it's state.
|
// Now that menu is initialized we can sync myAvatar with it's state.
|
||||||
getMyAvatar()->updateMotionBehaviorFromMenu();
|
myAvatar->updateMotionBehaviorFromMenu();
|
||||||
|
|
||||||
// FIXME spacemouse code still needs cleanup
|
// FIXME spacemouse code still needs cleanup
|
||||||
#if 0
|
#if 0
|
||||||
|
@ -1102,10 +1117,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
static int SEND_STATS_INTERVAL_MS = 10000;
|
static int SEND_STATS_INTERVAL_MS = 10000;
|
||||||
static int NEARBY_AVATAR_RADIUS_METERS = 10;
|
static int NEARBY_AVATAR_RADIUS_METERS = 10;
|
||||||
|
|
||||||
static glm::vec3 lastAvatarPosition = getMyAvatar()->getPosition();
|
static glm::vec3 lastAvatarPosition = myAvatar->getPosition();
|
||||||
static glm::mat4 lastHMDHeadPose = getHMDSensorPose();
|
static glm::mat4 lastHMDHeadPose = getHMDSensorPose();
|
||||||
static controller::Pose lastLeftHandPose = getMyAvatar()->getLeftHandPose();
|
static controller::Pose lastLeftHandPose = myAvatar->getLeftHandPose();
|
||||||
static controller::Pose lastRightHandPose = getMyAvatar()->getRightHandPose();
|
static controller::Pose lastRightHandPose = myAvatar->getRightHandPose();
|
||||||
|
|
||||||
// Periodically send fps as a user activity event
|
// Periodically send fps as a user activity event
|
||||||
QTimer* sendStatsTimer = new QTimer(this);
|
QTimer* sendStatsTimer = new QTimer(this);
|
||||||
|
@ -1123,6 +1138,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
auto displayPlugin = qApp->getActiveDisplayPlugin();
|
auto displayPlugin = qApp->getActiveDisplayPlugin();
|
||||||
|
|
||||||
properties["fps"] = _frameCounter.rate();
|
properties["fps"] = _frameCounter.rate();
|
||||||
|
properties["target_frame_rate"] = getTargetFrameRate();
|
||||||
properties["present_rate"] = displayPlugin->presentRate();
|
properties["present_rate"] = displayPlugin->presentRate();
|
||||||
properties["new_frame_present_rate"] = displayPlugin->newFramePresentRate();
|
properties["new_frame_present_rate"] = displayPlugin->newFramePresentRate();
|
||||||
properties["dropped_frame_rate"] = displayPlugin->droppedFrameRate();
|
properties["dropped_frame_rate"] = displayPlugin->droppedFrameRate();
|
||||||
|
@ -1153,7 +1169,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
|
|
||||||
properties["throttled"] = _displayPlugin ? _displayPlugin->isThrottled() : false;
|
properties["throttled"] = _displayPlugin ? _displayPlugin->isThrottled() : false;
|
||||||
|
|
||||||
glm::vec3 avatarPosition = getMyAvatar()->getPosition();
|
auto myAvatar = getMyAvatar();
|
||||||
|
glm::vec3 avatarPosition = myAvatar->getPosition();
|
||||||
properties["avatar_has_moved"] = lastAvatarPosition != avatarPosition;
|
properties["avatar_has_moved"] = lastAvatarPosition != avatarPosition;
|
||||||
lastAvatarPosition = avatarPosition;
|
lastAvatarPosition = avatarPosition;
|
||||||
|
|
||||||
|
@ -1164,12 +1181,15 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
properties["deleted_entity_cnt"] = entityActivityTracking.deletedEntityCount;
|
properties["deleted_entity_cnt"] = entityActivityTracking.deletedEntityCount;
|
||||||
properties["edited_entity_cnt"] = entityActivityTracking.editedEntityCount;
|
properties["edited_entity_cnt"] = entityActivityTracking.editedEntityCount;
|
||||||
|
|
||||||
|
properties["active_display_plugin"] = getActiveDisplayPlugin()->getName();
|
||||||
|
properties["using_hmd"] = isHMDMode();
|
||||||
|
|
||||||
auto hmdHeadPose = getHMDSensorPose();
|
auto hmdHeadPose = getHMDSensorPose();
|
||||||
properties["hmd_head_pose_changed"] = isHMDMode() && (hmdHeadPose != lastHMDHeadPose);
|
properties["hmd_head_pose_changed"] = isHMDMode() && (hmdHeadPose != lastHMDHeadPose);
|
||||||
lastHMDHeadPose = hmdHeadPose;
|
lastHMDHeadPose = hmdHeadPose;
|
||||||
|
|
||||||
auto leftHandPose = getMyAvatar()->getLeftHandPose();
|
auto leftHandPose = myAvatar->getLeftHandPose();
|
||||||
auto rightHandPose = getMyAvatar()->getRightHandPose();
|
auto rightHandPose = myAvatar->getRightHandPose();
|
||||||
// controller::Pose considers two poses to be different if either are invalid. In our case, we actually
|
// controller::Pose considers two poses to be different if either are invalid. In our case, we actually
|
||||||
// want to consider the pose to be unchanged if it was invalid and still is invalid, so we check that first.
|
// want to consider the pose to be unchanged if it was invalid and still is invalid, so we check that first.
|
||||||
properties["hand_pose_changed"] =
|
properties["hand_pose_changed"] =
|
||||||
|
@ -1216,7 +1236,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
|
|
||||||
OctreeEditPacketSender* packetSender = entityScriptingInterface->getPacketSender();
|
OctreeEditPacketSender* packetSender = entityScriptingInterface->getPacketSender();
|
||||||
EntityEditPacketSender* entityPacketSender = static_cast<EntityEditPacketSender*>(packetSender);
|
EntityEditPacketSender* entityPacketSender = static_cast<EntityEditPacketSender*>(packetSender);
|
||||||
entityPacketSender->setMyAvatar(getMyAvatar());
|
entityPacketSender->setMyAvatar(myAvatar.get());
|
||||||
|
|
||||||
connect(this, &Application::applicationStateChanged, this, &Application::activeChanged);
|
connect(this, &Application::applicationStateChanged, this, &Application::activeChanged);
|
||||||
qCDebug(interfaceapp, "Startup time: %4.2f seconds.", (double)startupTimer.elapsed() / 1000.0);
|
qCDebug(interfaceapp, "Startup time: %4.2f seconds.", (double)startupTimer.elapsed() / 1000.0);
|
||||||
|
@ -1236,8 +1256,87 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||||
return entityServerNode && !isPhysicsEnabled();
|
return entityServerNode && !isPhysicsEnabled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Get sandbox content set version, if available
|
||||||
|
auto acDirPath = PathUtils::getRootDataDirectory() + BuildInfo::MODIFIED_ORGANIZATION + "/assignment-client/";
|
||||||
|
auto contentVersionPath = acDirPath + "content-version.txt";
|
||||||
|
qDebug() << "Checking " << contentVersionPath << " for content version";
|
||||||
|
auto contentVersion = 0;
|
||||||
|
QFile contentVersionFile(contentVersionPath);
|
||||||
|
if (contentVersionFile.open(QIODevice::ReadOnly | QIODevice::Text)) {
|
||||||
|
QString line = contentVersionFile.readAll();
|
||||||
|
// toInt() returns 0 if the conversion fails, so we don't need to specifically check for failure
|
||||||
|
contentVersion = line.toInt();
|
||||||
|
}
|
||||||
|
qDebug() << "Server content version: " << contentVersion;
|
||||||
|
|
||||||
|
bool hasTutorialContent = contentVersion >= 1;
|
||||||
|
|
||||||
|
Setting::Handle<bool> firstRun { Settings::firstRun, true };
|
||||||
|
bool hasHMDAndHandControllers = PluginUtils::isHMDAvailable("OpenVR (Vive)") && PluginUtils::isHandControllerAvailable();
|
||||||
|
Setting::Handle<bool> tutorialComplete { "tutorialComplete", false };
|
||||||
|
|
||||||
|
bool shouldGoToTutorial = hasHMDAndHandControllers && hasTutorialContent && !tutorialComplete.get();
|
||||||
|
|
||||||
|
qDebug() << "Has HMD + Hand Controllers: " << hasHMDAndHandControllers << ", current plugin: " << _displayPlugin->getName();
|
||||||
|
qDebug() << "Has tutorial content: " << hasTutorialContent;
|
||||||
|
qDebug() << "Tutorial complete: " << tutorialComplete.get();
|
||||||
|
qDebug() << "Should go to tutorial: " << shouldGoToTutorial;
|
||||||
|
|
||||||
|
// when --url in command line, teleport to location
|
||||||
|
const QString HIFI_URL_COMMAND_LINE_KEY = "--url";
|
||||||
|
int urlIndex = arguments().indexOf(HIFI_URL_COMMAND_LINE_KEY);
|
||||||
|
QString addressLookupString;
|
||||||
|
if (urlIndex != -1) {
|
||||||
|
addressLookupString = arguments().value(urlIndex + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const QString TUTORIAL_PATH = "/tutorial_begin";
|
||||||
|
|
||||||
|
if (shouldGoToTutorial) {
|
||||||
|
DependencyManager::get<AddressManager>()->ifLocalSandboxRunningElse([=]() {
|
||||||
|
qDebug() << "Home sandbox appears to be running, going to Home.";
|
||||||
|
DependencyManager::get<AddressManager>()->goToLocalSandbox(TUTORIAL_PATH);
|
||||||
|
}, [=]() {
|
||||||
|
qDebug() << "Home sandbox does not appear to be running, going to Entry.";
|
||||||
|
if (firstRun.get()) {
|
||||||
|
showHelp();
|
||||||
|
}
|
||||||
|
if (addressLookupString.isEmpty()) {
|
||||||
|
DependencyManager::get<AddressManager>()->goToEntry();
|
||||||
|
} else {
|
||||||
|
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
|
||||||
|
bool isFirstRun = firstRun.get();
|
||||||
|
|
||||||
|
if (isFirstRun) {
|
||||||
|
showHelp();
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this is a first run we short-circuit the address passed in
|
||||||
|
if (isFirstRun) {
|
||||||
|
if (hasHMDAndHandControllers) {
|
||||||
|
DependencyManager::get<AddressManager>()->ifLocalSandboxRunningElse([=]() {
|
||||||
|
qDebug() << "Home sandbox appears to be running, going to Home.";
|
||||||
|
DependencyManager::get<AddressManager>()->goToLocalSandbox();
|
||||||
|
}, [=]() {
|
||||||
|
qDebug() << "Home sandbox does not appear to be running, going to Entry.";
|
||||||
|
DependencyManager::get<AddressManager>()->goToEntry();
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
DependencyManager::get<AddressManager>()->goToEntry();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
qDebug() << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
|
||||||
|
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// After all of the constructor is completed, then set firstRun to false.
|
// After all of the constructor is completed, then set firstRun to false.
|
||||||
Setting::Handle<bool> firstRun{ Settings::firstRun, true };
|
|
||||||
firstRun.set(false);
|
firstRun.set(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1600,7 +1699,7 @@ void Application::initializeUi() {
|
||||||
FileScriptingInterface* fileDownload = new FileScriptingInterface(engine);
|
FileScriptingInterface* fileDownload = new FileScriptingInterface(engine);
|
||||||
rootContext->setContextProperty("File", fileDownload);
|
rootContext->setContextProperty("File", fileDownload);
|
||||||
connect(fileDownload, &FileScriptingInterface::unzipSuccess, this, &Application::showAssetServerWidget);
|
connect(fileDownload, &FileScriptingInterface::unzipSuccess, this, &Application::showAssetServerWidget);
|
||||||
rootContext->setContextProperty("MyAvatar", getMyAvatar());
|
rootContext->setContextProperty("MyAvatar", getMyAvatar().get());
|
||||||
rootContext->setContextProperty("Messages", DependencyManager::get<MessagesClient>().data());
|
rootContext->setContextProperty("Messages", DependencyManager::get<MessagesClient>().data());
|
||||||
rootContext->setContextProperty("Recording", DependencyManager::get<RecordingScriptingInterface>().data());
|
rootContext->setContextProperty("Recording", DependencyManager::get<RecordingScriptingInterface>().data());
|
||||||
rootContext->setContextProperty("Preferences", DependencyManager::get<Preferences>().data());
|
rootContext->setContextProperty("Preferences", DependencyManager::get<Preferences>().data());
|
||||||
|
@ -1981,11 +2080,11 @@ void Application::setFieldOfView(float fov) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::aboutApp() {
|
void Application::aboutApp() {
|
||||||
InfoView::show(INFO_HELP_PATH);
|
InfoView::show(INFO_WELCOME_PATH);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::showHelp() {
|
void Application::showHelp() {
|
||||||
InfoView::show(INFO_EDIT_ENTITIES_PATH);
|
InfoView::show(INFO_HELP_PATH);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::resizeEvent(QResizeEvent* event) {
|
void Application::resizeEvent(QResizeEvent* event) {
|
||||||
|
@ -2170,7 +2269,7 @@ bool Application::event(QEvent* event) {
|
||||||
// handle custom URL
|
// handle custom URL
|
||||||
if (event->type() == QEvent::FileOpen) {
|
if (event->type() == QEvent::FileOpen) {
|
||||||
|
|
||||||
QFileOpenEvent* fileEvent = static_cast<QFileOpenEvent*>(event);
|
QFileOpenEvent* fileEvent = static_cast<QFileOpenEvent*>(event);
|
||||||
|
|
||||||
QUrl url = fileEvent->url();
|
QUrl url = fileEvent->url();
|
||||||
|
|
||||||
|
@ -3260,15 +3359,6 @@ void Application::init() {
|
||||||
|
|
||||||
_timerStart.start();
|
_timerStart.start();
|
||||||
_lastTimeUpdated.start();
|
_lastTimeUpdated.start();
|
||||||
|
|
||||||
// when --url in command line, teleport to location
|
|
||||||
const QString HIFI_URL_COMMAND_LINE_KEY = "--url";
|
|
||||||
int urlIndex = arguments().indexOf(HIFI_URL_COMMAND_LINE_KEY);
|
|
||||||
QString addressLookupString;
|
|
||||||
if (urlIndex != -1) {
|
|
||||||
addressLookupString = arguments().value(urlIndex + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// when +connect_lobby in command line, join steam lobby
|
// when +connect_lobby in command line, join steam lobby
|
||||||
const QString STEAM_LOBBY_COMMAND_LINE_KEY = "+connect_lobby";
|
const QString STEAM_LOBBY_COMMAND_LINE_KEY = "+connect_lobby";
|
||||||
int lobbyIndex = arguments().indexOf(STEAM_LOBBY_COMMAND_LINE_KEY);
|
int lobbyIndex = arguments().indexOf(STEAM_LOBBY_COMMAND_LINE_KEY);
|
||||||
|
@ -3277,21 +3367,6 @@ void Application::init() {
|
||||||
SteamClient::joinLobby(lobbyId);
|
SteamClient::joinLobby(lobbyId);
|
||||||
}
|
}
|
||||||
|
|
||||||
Setting::Handle<bool> firstRun { Settings::firstRun, true };
|
|
||||||
if (addressLookupString.isEmpty() && firstRun.get()) {
|
|
||||||
qCDebug(interfaceapp) << "First run and no URL passed... attempting to go to Home or Entry...";
|
|
||||||
DependencyManager::get<AddressManager>()->ifLocalSandboxRunningElse([](){
|
|
||||||
qCDebug(interfaceapp) << "Home sandbox appears to be running, going to Home.";
|
|
||||||
DependencyManager::get<AddressManager>()->goToLocalSandbox();
|
|
||||||
},
|
|
||||||
[](){
|
|
||||||
qCDebug(interfaceapp) << "Home sandbox does not appear to be running, going to Entry.";
|
|
||||||
DependencyManager::get<AddressManager>()->goToEntry();
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
qCDebug(interfaceapp) << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
|
|
||||||
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
|
|
||||||
}
|
|
||||||
|
|
||||||
qCDebug(interfaceapp) << "Loaded settings";
|
qCDebug(interfaceapp) << "Loaded settings";
|
||||||
|
|
||||||
|
@ -3350,7 +3425,7 @@ void Application::init() {
|
||||||
entity->setCollisionSound(sound);
|
entity->setCollisionSound(sound);
|
||||||
}
|
}
|
||||||
}, Qt::QueuedConnection);
|
}, Qt::QueuedConnection);
|
||||||
connect(getMyAvatar(), &MyAvatar::newCollisionSoundURL, this, [this](QUrl newURL) {
|
connect(getMyAvatar().get(), &MyAvatar::newCollisionSoundURL, this, [this](QUrl newURL) {
|
||||||
if (auto avatar = getMyAvatar()) {
|
if (auto avatar = getMyAvatar()) {
|
||||||
auto sound = DependencyManager::get<SoundCache>()->getSound(newURL);
|
auto sound = DependencyManager::get<SoundCache>()->getSound(newURL);
|
||||||
avatar->setCollisionSound(sound);
|
avatar->setCollisionSound(sound);
|
||||||
|
@ -3406,7 +3481,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
AvatarSharedPointer lookingAt = myAvatar->getLookAtTargetAvatar().lock();
|
AvatarSharedPointer lookingAt = myAvatar->getLookAtTargetAvatar().lock();
|
||||||
if (lookingAt && myAvatar != lookingAt.get()) {
|
if (lookingAt && myAvatar.get() != lookingAt.get()) {
|
||||||
// If I am looking at someone else, look directly at one of their eyes
|
// If I am looking at someone else, look directly at one of their eyes
|
||||||
isLookingAtSomeone = true;
|
isLookingAtSomeone = true;
|
||||||
auto lookingAtHead = static_pointer_cast<Avatar>(lookingAt)->getHead();
|
auto lookingAtHead = static_pointer_cast<Avatar>(lookingAt)->getHead();
|
||||||
|
@ -3607,7 +3682,7 @@ void Application::setKeyboardFocusEntity(EntityItemID entityItemID) {
|
||||||
_keyboardFocusedItem.set(entityItemID);
|
_keyboardFocusedItem.set(entityItemID);
|
||||||
_lastAcceptedKeyPress = usecTimestampNow();
|
_lastAcceptedKeyPress = usecTimestampNow();
|
||||||
if (_keyboardFocusHighlightID < 0 || !getOverlays().isAddedOverlay(_keyboardFocusHighlightID)) {
|
if (_keyboardFocusHighlightID < 0 || !getOverlays().isAddedOverlay(_keyboardFocusHighlightID)) {
|
||||||
_keyboardFocusHighlight = new Cube3DOverlay();
|
_keyboardFocusHighlight = std::make_shared<Cube3DOverlay>();
|
||||||
_keyboardFocusHighlight->setAlpha(1.0f);
|
_keyboardFocusHighlight->setAlpha(1.0f);
|
||||||
_keyboardFocusHighlight->setBorderSize(1.0f);
|
_keyboardFocusHighlight->setBorderSize(1.0f);
|
||||||
_keyboardFocusHighlight->setColor({ 0xFF, 0xEF, 0x00 });
|
_keyboardFocusHighlight->setColor({ 0xFF, 0xEF, 0x00 });
|
||||||
|
@ -4255,7 +4330,7 @@ PickRay Application::computePickRay(float x, float y) const {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
MyAvatar* Application::getMyAvatar() const {
|
std::shared_ptr<MyAvatar> Application::getMyAvatar() const {
|
||||||
return DependencyManager::get<AvatarManager>()->getMyAvatar();
|
return DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4361,8 +4436,13 @@ namespace render {
|
||||||
auto scene = DependencyManager::get<SceneScriptingInterface>()->getStage();
|
auto scene = DependencyManager::get<SceneScriptingInterface>()->getStage();
|
||||||
auto sceneKeyLight = scene->getKeyLight();
|
auto sceneKeyLight = scene->getKeyLight();
|
||||||
auto defaultSkyboxAmbientTexture = qApp->getDefaultSkyboxAmbientTexture();
|
auto defaultSkyboxAmbientTexture = qApp->getDefaultSkyboxAmbientTexture();
|
||||||
sceneKeyLight->setAmbientSphere(defaultSkyboxAmbientTexture->getIrradiance());
|
if (defaultSkyboxAmbientTexture) {
|
||||||
sceneKeyLight->setAmbientMap(defaultSkyboxAmbientTexture);
|
sceneKeyLight->setAmbientSphere(defaultSkyboxAmbientTexture->getIrradiance());
|
||||||
|
sceneKeyLight->setAmbientMap(defaultSkyboxAmbientTexture);
|
||||||
|
} else {
|
||||||
|
static QString repeatedMessage = LogHandler::getInstance().addRepeatedMessageRegex(
|
||||||
|
"Failed to get a valid Default Skybox Ambient Texture ? probably because it couldn't be find during initialization step");
|
||||||
|
}
|
||||||
// fall through: render defaults skybox
|
// fall through: render defaults skybox
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
|
@ -4843,7 +4923,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
||||||
scriptEngine->registerGlobalObject("Rates", new RatesScriptingInterface(this));
|
scriptEngine->registerGlobalObject("Rates", new RatesScriptingInterface(this));
|
||||||
|
|
||||||
// hook our avatar and avatar hash map object into this script engine
|
// hook our avatar and avatar hash map object into this script engine
|
||||||
scriptEngine->registerGlobalObject("MyAvatar", getMyAvatar());
|
scriptEngine->registerGlobalObject("MyAvatar", getMyAvatar().get());
|
||||||
qScriptRegisterMetaType(scriptEngine, audioListenModeToScriptValue, audioListenModeFromScriptValue);
|
qScriptRegisterMetaType(scriptEngine, audioListenModeToScriptValue, audioListenModeFromScriptValue);
|
||||||
|
|
||||||
scriptEngine->registerGlobalObject("AvatarList", DependencyManager::get<AvatarManager>().data());
|
scriptEngine->registerGlobalObject("AvatarList", DependencyManager::get<AvatarManager>().data());
|
||||||
|
|
|
@ -401,7 +401,7 @@ private:
|
||||||
|
|
||||||
int sendNackPackets();
|
int sendNackPackets();
|
||||||
|
|
||||||
MyAvatar* getMyAvatar() const;
|
std::shared_ptr<MyAvatar> getMyAvatar() const;
|
||||||
|
|
||||||
void checkSkeleton() const;
|
void checkSkeleton() const;
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
#include <EntityTree.h>
|
#include <EntityTree.h>
|
||||||
#include <EntityTreeRenderer.h>
|
#include <EntityTreeRenderer.h>
|
||||||
#include <avatar/AvatarManager.h>
|
#include <avatar/AvatarManager.h>
|
||||||
|
#include <AvatarData.h>
|
||||||
|
|
||||||
#include "InterfaceParentFinder.h"
|
#include "InterfaceParentFinder.h"
|
||||||
|
|
||||||
|
@ -45,6 +46,11 @@ SpatiallyNestableWeakPointer InterfaceParentFinder::find(QUuid parentID, bool& s
|
||||||
return parent;
|
return parent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (parentID == AVATAR_SELF_ID) {
|
||||||
|
success = true;
|
||||||
|
return avatarManager->getMyAvatar();
|
||||||
|
}
|
||||||
|
|
||||||
success = false;
|
success = false;
|
||||||
return parent;
|
return parent;
|
||||||
}
|
}
|
||||||
|
|
|
@ -166,7 +166,7 @@ Menu::Menu() {
|
||||||
// Avatar menu ----------------------------------
|
// Avatar menu ----------------------------------
|
||||||
MenuWrapper* avatarMenu = addMenu("Avatar");
|
MenuWrapper* avatarMenu = addMenu("Avatar");
|
||||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||||
QObject* avatar = avatarManager->getMyAvatar();
|
auto avatar = avatarManager->getMyAvatar();
|
||||||
|
|
||||||
// Avatar > Attachments...
|
// Avatar > Attachments...
|
||||||
auto action = addActionToQMenuAndActionHash(avatarMenu, MenuOption::Attachments);
|
auto action = addActionToQMenuAndActionHash(avatarMenu, MenuOption::Attachments);
|
||||||
|
@ -182,19 +182,19 @@ Menu::Menu() {
|
||||||
addActionToQMenuAndActionHash(avatarSizeMenu,
|
addActionToQMenuAndActionHash(avatarSizeMenu,
|
||||||
MenuOption::IncreaseAvatarSize,
|
MenuOption::IncreaseAvatarSize,
|
||||||
0, // QML Qt::Key_Plus,
|
0, // QML Qt::Key_Plus,
|
||||||
avatar, SLOT(increaseSize()));
|
avatar.get(), SLOT(increaseSize()));
|
||||||
|
|
||||||
// Avatar > Size > Decrease
|
// Avatar > Size > Decrease
|
||||||
addActionToQMenuAndActionHash(avatarSizeMenu,
|
addActionToQMenuAndActionHash(avatarSizeMenu,
|
||||||
MenuOption::DecreaseAvatarSize,
|
MenuOption::DecreaseAvatarSize,
|
||||||
0, // QML Qt::Key_Minus,
|
0, // QML Qt::Key_Minus,
|
||||||
avatar, SLOT(decreaseSize()));
|
avatar.get(), SLOT(decreaseSize()));
|
||||||
|
|
||||||
// Avatar > Size > Reset
|
// Avatar > Size > Reset
|
||||||
addActionToQMenuAndActionHash(avatarSizeMenu,
|
addActionToQMenuAndActionHash(avatarSizeMenu,
|
||||||
MenuOption::ResetAvatarSize,
|
MenuOption::ResetAvatarSize,
|
||||||
0, // QML Qt::Key_Equal,
|
0, // QML Qt::Key_Equal,
|
||||||
avatar, SLOT(resetSize()));
|
avatar.get(), SLOT(resetSize()));
|
||||||
|
|
||||||
// Avatar > Reset Sensors
|
// Avatar > Reset Sensors
|
||||||
addActionToQMenuAndActionHash(avatarMenu,
|
addActionToQMenuAndActionHash(avatarMenu,
|
||||||
|
@ -517,38 +517,38 @@ Menu::Menu() {
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderOtherLookAtVectors, 0, false);
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderOtherLookAtVectors, 0, false);
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::FixGaze, 0, false);
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::FixGaze, 0, false);
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawDefaultPose, 0, false,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawDefaultPose, 0, false,
|
||||||
avatar, SLOT(setEnableDebugDrawDefaultPose(bool)));
|
avatar.get(), SLOT(setEnableDebugDrawDefaultPose(bool)));
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawAnimPose, 0, false,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawAnimPose, 0, false,
|
||||||
avatar, SLOT(setEnableDebugDrawAnimPose(bool)));
|
avatar.get(), SLOT(setEnableDebugDrawAnimPose(bool)));
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawPosition, 0, false,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AnimDebugDrawPosition, 0, false,
|
||||||
avatar, SLOT(setEnableDebugDrawPosition(bool)));
|
avatar.get(), SLOT(setEnableDebugDrawPosition(bool)));
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::MeshVisible, 0, true,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::MeshVisible, 0, true,
|
||||||
avatar, SLOT(setEnableMeshVisible(bool)));
|
avatar.get(), SLOT(setEnableMeshVisible(bool)));
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::DisableEyelidAdjustment, 0, false);
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::DisableEyelidAdjustment, 0, false);
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::TurnWithHead, 0, false);
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::TurnWithHead, 0, false);
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::UseAnimPreAndPostRotations, 0, true,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::UseAnimPreAndPostRotations, 0, true,
|
||||||
avatar, SLOT(setUseAnimPreAndPostRotations(bool)));
|
avatar.get(), SLOT(setUseAnimPreAndPostRotations(bool)));
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableInverseKinematics, 0, true,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableInverseKinematics, 0, true,
|
||||||
avatar, SLOT(setEnableInverseKinematics(bool)));
|
avatar.get(), SLOT(setEnableInverseKinematics(bool)));
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderSensorToWorldMatrix, 0, false,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderSensorToWorldMatrix, 0, false,
|
||||||
avatar, SLOT(setEnableDebugDrawSensorToWorldMatrix(bool)));
|
avatar.get(), SLOT(setEnableDebugDrawSensorToWorldMatrix(bool)));
|
||||||
|
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ActionMotorControl,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ActionMotorControl,
|
||||||
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar, SLOT(updateMotionBehaviorFromMenu()),
|
Qt::CTRL | Qt::SHIFT | Qt::Key_K, true, avatar.get(), SLOT(updateMotionBehaviorFromMenu()),
|
||||||
UNSPECIFIED_POSITION, "Developer");
|
UNSPECIFIED_POSITION, "Developer");
|
||||||
|
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ScriptedMotorControl, 0, true,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ScriptedMotorControl, 0, true,
|
||||||
avatar, SLOT(updateMotionBehaviorFromMenu()),
|
avatar.get(), SLOT(updateMotionBehaviorFromMenu()),
|
||||||
UNSPECIFIED_POSITION, "Developer");
|
UNSPECIFIED_POSITION, "Developer");
|
||||||
|
|
||||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableCharacterController, 0, true,
|
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableCharacterController, 0, true,
|
||||||
avatar, SLOT(updateMotionBehaviorFromMenu()),
|
avatar.get(), SLOT(updateMotionBehaviorFromMenu()),
|
||||||
UNSPECIFIED_POSITION, "Developer");
|
UNSPECIFIED_POSITION, "Developer");
|
||||||
|
|
||||||
// Developer > Hands >>>
|
// Developer > Hands >>>
|
||||||
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
|
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
|
||||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false,
|
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false,
|
||||||
avatar, SLOT(setEnableDebugDrawHandControllers(bool)));
|
avatar.get(), SLOT(setEnableDebugDrawHandControllers(bool)));
|
||||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LowVelocityFilter, 0, true,
|
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::LowVelocityFilter, 0, true,
|
||||||
qApp, SLOT(setLowVelocityFilter(bool)));
|
qApp, SLOT(setLowVelocityFilter(bool)));
|
||||||
|
|
||||||
|
|
|
@ -250,8 +250,6 @@ int AudioScope::addSilenceToScope(QByteArray* byteArray, int frameOffset, int si
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const int STEREO_FACTOR = 2;
|
|
||||||
|
|
||||||
void AudioScope::addStereoSilenceToScope(int silentSamplesPerChannel) {
|
void AudioScope::addStereoSilenceToScope(int silentSamplesPerChannel) {
|
||||||
if (!_isEnabled || _isPaused) {
|
if (!_isEnabled || _isPaused) {
|
||||||
return;
|
return;
|
||||||
|
@ -265,10 +263,10 @@ void AudioScope::addStereoSamplesToScope(const QByteArray& samples) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const int16_t* samplesData = reinterpret_cast<const int16_t*>(samples.data());
|
const int16_t* samplesData = reinterpret_cast<const int16_t*>(samples.data());
|
||||||
int samplesPerChannel = samples.size() / sizeof(int16_t) / STEREO_FACTOR;
|
int samplesPerChannel = samples.size() / sizeof(int16_t) / AudioConstants::STEREO;
|
||||||
|
|
||||||
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, samplesData, samplesPerChannel, 0, STEREO_FACTOR);
|
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, samplesData, samplesPerChannel, 0, AudioConstants::STEREO);
|
||||||
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, samplesData, samplesPerChannel, 1, STEREO_FACTOR);
|
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, samplesData, samplesPerChannel, 1, AudioConstants::STEREO);
|
||||||
|
|
||||||
_scopeLastFrame = samples.right(AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
_scopeLastFrame = samples.right(AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||||
}
|
}
|
||||||
|
@ -282,9 +280,9 @@ void AudioScope::addLastFrameRepeatedWithFadeToScope(int samplesPerChannel) {
|
||||||
int samplesToWriteThisIteration = std::min(samplesRemaining, (int) AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
int samplesToWriteThisIteration = std::min(samplesRemaining, (int) AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||||
float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat);
|
float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat);
|
||||||
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, lastFrameData,
|
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, lastFrameData,
|
||||||
samplesToWriteThisIteration, 0, STEREO_FACTOR, fade);
|
samplesToWriteThisIteration, 0, AudioConstants::STEREO, fade);
|
||||||
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset,
|
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset,
|
||||||
lastFrameData, samplesToWriteThisIteration, 1, STEREO_FACTOR, fade);
|
lastFrameData, samplesToWriteThisIteration, 1, AudioConstants::STEREO, fade);
|
||||||
|
|
||||||
samplesRemaining -= samplesToWriteThisIteration;
|
samplesRemaining -= samplesToWriteThisIteration;
|
||||||
indexOfRepeat++;
|
indexOfRepeat++;
|
||||||
|
|
|
@ -25,19 +25,30 @@ AvatarActionHold::AvatarActionHold(const QUuid& id, EntityItemPointer ownerEntit
|
||||||
{
|
{
|
||||||
_type = ACTION_TYPE_HOLD;
|
_type = ACTION_TYPE_HOLD;
|
||||||
_measuredLinearVelocities.resize(AvatarActionHold::velocitySmoothFrames);
|
_measuredLinearVelocities.resize(AvatarActionHold::velocitySmoothFrames);
|
||||||
|
|
||||||
|
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
if (myAvatar) {
|
||||||
|
myAvatar->addHoldAction(this);
|
||||||
|
}
|
||||||
|
|
||||||
#if WANT_DEBUG
|
#if WANT_DEBUG
|
||||||
qDebug() << "AvatarActionHold::AvatarActionHold";
|
qDebug() << "AvatarActionHold::AvatarActionHold" << (void*)this;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
AvatarActionHold::~AvatarActionHold() {
|
AvatarActionHold::~AvatarActionHold() {
|
||||||
|
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
if (myAvatar) {
|
||||||
|
myAvatar->removeHoldAction(this);
|
||||||
|
}
|
||||||
|
|
||||||
#if WANT_DEBUG
|
#if WANT_DEBUG
|
||||||
qDebug() << "AvatarActionHold::~AvatarActionHold";
|
qDebug() << "AvatarActionHold::~AvatarActionHold" << (void*)this;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AvatarActionHold::getAvatarRigidBodyLocation(glm::vec3& avatarRigidBodyPosition, glm::quat& avatarRigidBodyRotation) {
|
bool AvatarActionHold::getAvatarRigidBodyLocation(glm::vec3& avatarRigidBodyPosition, glm::quat& avatarRigidBodyRotation) {
|
||||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
MyCharacterController* controller = myAvatar ? myAvatar->getCharacterController() : nullptr;
|
MyCharacterController* controller = myAvatar ? myAvatar->getCharacterController() : nullptr;
|
||||||
if (!controller) {
|
if (!controller) {
|
||||||
qDebug() << "AvatarActionHold::getAvatarRigidBodyLocation failed to get character controller";
|
qDebug() << "AvatarActionHold::getAvatarRigidBodyLocation failed to get character controller";
|
||||||
|
@ -460,3 +471,40 @@ void AvatarActionHold::deserialize(QByteArray serializedArguments) {
|
||||||
|
|
||||||
forceBodyNonStatic();
|
forceBodyNonStatic();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AvatarActionHold::lateAvatarUpdate(const AnimPose& prePhysicsRoomPose, const AnimPose& postAvatarUpdateRoomPose) {
|
||||||
|
auto ownerEntity = _ownerEntity.lock();
|
||||||
|
if (!ownerEntity) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
void* physicsInfo = ownerEntity->getPhysicsInfo();
|
||||||
|
if (!physicsInfo) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
|
||||||
|
btRigidBody* rigidBody = motionState ? motionState->getRigidBody() : nullptr;
|
||||||
|
if (!rigidBody) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||||
|
auto holdingAvatar = std::static_pointer_cast<Avatar>(avatarManager->getAvatarBySessionID(_holderID));
|
||||||
|
if (!holdingAvatar || !holdingAvatar->isMyAvatar()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
btTransform worldTrans = rigidBody->getWorldTransform();
|
||||||
|
AnimPose worldBodyPose(glm::vec3(1), bulletToGLM(worldTrans.getRotation()), bulletToGLM(worldTrans.getOrigin()));
|
||||||
|
|
||||||
|
// transform the body transform into sensor space with the prePhysics sensor-to-world matrix.
|
||||||
|
// then transform it back into world uisng the postAvatarUpdate sensor-to-world matrix.
|
||||||
|
AnimPose newWorldBodyPose = postAvatarUpdateRoomPose * prePhysicsRoomPose.inverse() * worldBodyPose;
|
||||||
|
|
||||||
|
worldTrans.setOrigin(glmToBullet(newWorldBodyPose.trans));
|
||||||
|
worldTrans.setRotation(glmToBullet(newWorldBodyPose.rot));
|
||||||
|
rigidBody->setWorldTransform(worldTrans);
|
||||||
|
|
||||||
|
bool positionSuccess;
|
||||||
|
ownerEntity->setPosition(bulletToGLM(worldTrans.getOrigin()) + ObjectMotionState::getWorldOffset(), positionSuccess, false);
|
||||||
|
bool orientationSuccess;
|
||||||
|
ownerEntity->setOrientation(bulletToGLM(worldTrans.getRotation()), orientationSuccess, false);
|
||||||
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
#include <QUuid>
|
#include <QUuid>
|
||||||
|
|
||||||
#include <EntityItem.h>
|
#include <EntityItem.h>
|
||||||
|
#include <AnimPose.h>
|
||||||
#include <ObjectActionSpring.h>
|
#include <ObjectActionSpring.h>
|
||||||
|
|
||||||
#include "avatar/MyAvatar.h"
|
#include "avatar/MyAvatar.h"
|
||||||
|
@ -41,6 +42,8 @@ public:
|
||||||
|
|
||||||
virtual void prepareForPhysicsSimulation() override;
|
virtual void prepareForPhysicsSimulation() override;
|
||||||
|
|
||||||
|
void lateAvatarUpdate(const AnimPose& prePhysicsRoomPose, const AnimPose& postAvatarUpdateRoomPose);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void doKinematicUpdate(float deltaTimeStep);
|
void doKinematicUpdate(float deltaTimeStep);
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,7 @@
|
||||||
#include <SettingHandle.h>
|
#include <SettingHandle.h>
|
||||||
#include <UsersScriptingInterface.h>
|
#include <UsersScriptingInterface.h>
|
||||||
#include <UUID.h>
|
#include <UUID.h>
|
||||||
|
#include <AvatarData.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "Avatar.h"
|
#include "Avatar.h"
|
||||||
|
@ -323,7 +324,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
|
||||||
// an id of null. Thus this code handles any collision in which one of the participating objects is
|
// an id of null. Thus this code handles any collision in which one of the participating objects is
|
||||||
// my avatar. (Other user machines will make a similar analysis and inject sound for their collisions.)
|
// my avatar. (Other user machines will make a similar analysis and inject sound for their collisions.)
|
||||||
if (collision.idA.isNull() || collision.idB.isNull()) {
|
if (collision.idA.isNull() || collision.idB.isNull()) {
|
||||||
MyAvatar* myAvatar = getMyAvatar();
|
auto myAvatar = getMyAvatar();
|
||||||
auto collisionSound = myAvatar->getCollisionSound();
|
auto collisionSound = myAvatar->getCollisionSound();
|
||||||
if (collisionSound) {
|
if (collisionSound) {
|
||||||
const auto characterController = myAvatar->getCharacterController();
|
const auto characterController = myAvatar->getCharacterController();
|
||||||
|
@ -399,7 +400,7 @@ void AvatarManager::updateAvatarRenderStatus(bool shouldRenderAvatars) {
|
||||||
|
|
||||||
|
|
||||||
AvatarSharedPointer AvatarManager::getAvatarBySessionID(const QUuid& sessionID) {
|
AvatarSharedPointer AvatarManager::getAvatarBySessionID(const QUuid& sessionID) {
|
||||||
if (sessionID == _myAvatar->getSessionUUID()) {
|
if (sessionID == AVATAR_SELF_ID || sessionID == _myAvatar->getSessionUUID()) {
|
||||||
return _myAvatar;
|
return _myAvatar;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,7 @@ public:
|
||||||
|
|
||||||
void init();
|
void init();
|
||||||
|
|
||||||
MyAvatar* getMyAvatar() { return _myAvatar.get(); }
|
std::shared_ptr<MyAvatar> getMyAvatar() { return _myAvatar; }
|
||||||
AvatarSharedPointer getAvatarBySessionID(const QUuid& sessionID) override;
|
AvatarSharedPointer getAvatarBySessionID(const QUuid& sessionID) override;
|
||||||
|
|
||||||
void updateMyAvatar(float deltaTime);
|
void updateMyAvatar(float deltaTime);
|
||||||
|
|
|
@ -45,6 +45,7 @@
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
#include "devices/Faceshift.h"
|
#include "devices/Faceshift.h"
|
||||||
#include "AvatarManager.h"
|
#include "AvatarManager.h"
|
||||||
|
#include "AvatarActionHold.h"
|
||||||
#include "Menu.h"
|
#include "Menu.h"
|
||||||
#include "MyAvatar.h"
|
#include "MyAvatar.h"
|
||||||
#include "Physics.h"
|
#include "Physics.h"
|
||||||
|
@ -1309,6 +1310,8 @@ void MyAvatar::prepareForPhysicsSimulation() {
|
||||||
} else {
|
} else {
|
||||||
_follow.deactivate();
|
_follow.deactivate();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_prePhysicsRoomPose = AnimPose(_sensorToWorldMatrix);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyAvatar::harvestResultsFromPhysicsSimulation(float deltaTime) {
|
void MyAvatar::harvestResultsFromPhysicsSimulation(float deltaTime) {
|
||||||
|
@ -1549,8 +1552,11 @@ void MyAvatar::postUpdate(float deltaTime) {
|
||||||
|
|
||||||
DebugDraw::getInstance().updateMyAvatarPos(getPosition());
|
DebugDraw::getInstance().updateMyAvatarPos(getPosition());
|
||||||
DebugDraw::getInstance().updateMyAvatarRot(getOrientation());
|
DebugDraw::getInstance().updateMyAvatarRot(getOrientation());
|
||||||
}
|
|
||||||
|
|
||||||
|
AnimPose postUpdateRoomPose(_sensorToWorldMatrix);
|
||||||
|
|
||||||
|
updateHoldActions(_prePhysicsRoomPose, postUpdateRoomPose);
|
||||||
|
}
|
||||||
|
|
||||||
void MyAvatar::preDisplaySide(RenderArgs* renderArgs) {
|
void MyAvatar::preDisplaySide(RenderArgs* renderArgs) {
|
||||||
|
|
||||||
|
@ -2257,3 +2263,35 @@ glm::vec3 MyAvatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// thread-safe
|
||||||
|
void MyAvatar::addHoldAction(AvatarActionHold* holdAction) {
|
||||||
|
std::lock_guard<std::mutex> guard(_holdActionsMutex);
|
||||||
|
_holdActions.push_back(holdAction);
|
||||||
|
}
|
||||||
|
|
||||||
|
// thread-safe
|
||||||
|
void MyAvatar::removeHoldAction(AvatarActionHold* holdAction) {
|
||||||
|
std::lock_guard<std::mutex> guard(_holdActionsMutex);
|
||||||
|
auto iter = std::find(std::begin(_holdActions), std::end(_holdActions), holdAction);
|
||||||
|
if (iter != std::end(_holdActions)) {
|
||||||
|
_holdActions.erase(iter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void MyAvatar::updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose& postUpdatePose) {
|
||||||
|
EntityTreeRenderer* entityTreeRenderer = qApp->getEntities();
|
||||||
|
EntityTreePointer entityTree = entityTreeRenderer ? entityTreeRenderer->getTree() : nullptr;
|
||||||
|
if (entityTree) {
|
||||||
|
// to prevent actions from adding or removing themselves from the _holdActions vector
|
||||||
|
// while we are iterating, we need to enter a critical section.
|
||||||
|
std::lock_guard<std::mutex> guard(_holdActionsMutex);
|
||||||
|
|
||||||
|
// lateAvatarUpdate will modify entity position & orientation, so we need an entity write lock
|
||||||
|
entityTree->withWriteLock([&] {
|
||||||
|
for (auto& holdAction : _holdActions) {
|
||||||
|
holdAction->lateAvatarUpdate(prePhysicsPose, postUpdatePose);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@
|
||||||
#include "MyCharacterController.h"
|
#include "MyCharacterController.h"
|
||||||
#include <ThreadSafeValueCache.h>
|
#include <ThreadSafeValueCache.h>
|
||||||
|
|
||||||
|
class AvatarActionHold;
|
||||||
class ModelItemID;
|
class ModelItemID;
|
||||||
|
|
||||||
enum DriveKeys {
|
enum DriveKeys {
|
||||||
|
@ -277,6 +278,10 @@ public:
|
||||||
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
|
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
|
||||||
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
|
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
|
||||||
|
|
||||||
|
void addHoldAction(AvatarActionHold* holdAction); // thread-safe
|
||||||
|
void removeHoldAction(AvatarActionHold* holdAction); // thread-safe
|
||||||
|
void updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose& postUpdatePose);
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
void increaseSize();
|
void increaseSize();
|
||||||
void decreaseSize();
|
void decreaseSize();
|
||||||
|
@ -488,6 +493,10 @@ private:
|
||||||
|
|
||||||
bool _hmdLeanRecenterEnabled = true;
|
bool _hmdLeanRecenterEnabled = true;
|
||||||
|
|
||||||
|
AnimPose _prePhysicsRoomPose;
|
||||||
|
std::mutex _holdActionsMutex;
|
||||||
|
std::vector<AvatarActionHold*> _holdActions;
|
||||||
|
|
||||||
float AVATAR_MOVEMENT_ENERGY_CONSTANT { 0.001f };
|
float AVATAR_MOVEMENT_ENERGY_CONSTANT { 0.001f };
|
||||||
float AUDIO_ENERGY_CONSTANT { 0.000001f };
|
float AUDIO_ENERGY_CONSTANT { 0.000001f };
|
||||||
float MAX_AVATAR_MOVEMENT_PER_FRAME { 30.0f };
|
float MAX_AVATAR_MOVEMENT_PER_FRAME { 30.0f };
|
||||||
|
|
|
@ -29,6 +29,7 @@
|
||||||
#include "InterfaceLogging.h"
|
#include "InterfaceLogging.h"
|
||||||
#include "UserActivityLogger.h"
|
#include "UserActivityLogger.h"
|
||||||
#include "MainWindow.h"
|
#include "MainWindow.h"
|
||||||
|
#include <QtCore/QProcess>
|
||||||
|
|
||||||
#ifdef HAS_BUGSPLAT
|
#ifdef HAS_BUGSPLAT
|
||||||
#include <BuildInfo.h>
|
#include <BuildInfo.h>
|
||||||
|
@ -121,6 +122,29 @@ int main(int argc, const char* argv[]) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QCommandLineParser parser;
|
||||||
|
QCommandLineOption runServerOption("runServer", "Whether to run the server");
|
||||||
|
QCommandLineOption serverContentPathOption("serverContentPath", "Where to find server content", "serverContentPath");
|
||||||
|
parser.addOption(runServerOption);
|
||||||
|
parser.addOption(serverContentPathOption);
|
||||||
|
parser.parse(arguments);
|
||||||
|
if (parser.isSet(runServerOption)) {
|
||||||
|
QString applicationDirPath = QFileInfo(arguments[0]).path();
|
||||||
|
QString serverPath = applicationDirPath + "/server-console/server-console.exe";
|
||||||
|
qDebug() << "Application dir path is: " << applicationDirPath;
|
||||||
|
qDebug() << "Server path is: " << serverPath;
|
||||||
|
QStringList args;
|
||||||
|
if (parser.isSet(serverContentPathOption)) {
|
||||||
|
QString serverContentPath = QFileInfo(arguments[0]).path() + "/" + parser.value(serverContentPathOption);
|
||||||
|
args << "--" << "--contentPath" << serverContentPath;
|
||||||
|
}
|
||||||
|
qDebug() << QFileInfo(arguments[0]).path();
|
||||||
|
qDebug() << QProcess::startDetached(serverPath, args);
|
||||||
|
|
||||||
|
// Sleep a short amount of time to give the server a chance to start
|
||||||
|
usleep(2000000);
|
||||||
|
}
|
||||||
|
|
||||||
QElapsedTimer startupTime;
|
QElapsedTimer startupTime;
|
||||||
startupTime.start();
|
startupTime.start();
|
||||||
|
|
||||||
|
|
|
@ -13,10 +13,12 @@
|
||||||
|
|
||||||
#include <QtScript/QScriptContext>
|
#include <QtScript/QScriptContext>
|
||||||
|
|
||||||
|
#include <avatar/AvatarManager.h>
|
||||||
#include <display-plugins/DisplayPlugin.h>
|
#include <display-plugins/DisplayPlugin.h>
|
||||||
#include <display-plugins/CompositorHelper.h>
|
#include <display-plugins/CompositorHelper.h>
|
||||||
#include <OffscreenUi.h>
|
#include <OffscreenUi.h>
|
||||||
#include <avatar/AvatarManager.h>
|
#include <plugins/PluginUtils.h>
|
||||||
|
|
||||||
#include "Application.h"
|
#include "Application.h"
|
||||||
|
|
||||||
HMDScriptingInterface::HMDScriptingInterface() {
|
HMDScriptingInterface::HMDScriptingInterface() {
|
||||||
|
@ -47,6 +49,14 @@ glm::vec2 HMDScriptingInterface::overlayToSpherical(const glm::vec2 & position)
|
||||||
return qApp->getApplicationCompositor().overlayToSpherical(position);
|
return qApp->getApplicationCompositor().overlayToSpherical(position);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool HMDScriptingInterface::isHMDAvailable() {
|
||||||
|
return PluginUtils::isHMDAvailable();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool HMDScriptingInterface::isHandControllerAvailable() {
|
||||||
|
return PluginUtils::isHandControllerAvailable();
|
||||||
|
}
|
||||||
|
|
||||||
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
|
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
|
||||||
glm::vec3 hudIntersection;
|
glm::vec3 hudIntersection;
|
||||||
auto instance = DependencyManager::get<HMDScriptingInterface>();
|
auto instance = DependencyManager::get<HMDScriptingInterface>();
|
||||||
|
@ -79,7 +89,7 @@ bool HMDScriptingInterface::getHUDLookAtPosition3D(glm::vec3& result) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::mat4 HMDScriptingInterface::getWorldHMDMatrix() const {
|
glm::mat4 HMDScriptingInterface::getWorldHMDMatrix() const {
|
||||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
return myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
return myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,6 +38,9 @@ public:
|
||||||
Q_INVOKABLE QString preferredAudioInput() const;
|
Q_INVOKABLE QString preferredAudioInput() const;
|
||||||
Q_INVOKABLE QString preferredAudioOutput() const;
|
Q_INVOKABLE QString preferredAudioOutput() const;
|
||||||
|
|
||||||
|
Q_INVOKABLE bool isHMDAvailable();
|
||||||
|
Q_INVOKABLE bool isHandControllerAvailable();
|
||||||
|
|
||||||
Q_INVOKABLE bool setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
|
Q_INVOKABLE bool setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
|
||||||
|
|
||||||
Q_INVOKABLE void disableHandLasers(int hands) const;
|
Q_INVOKABLE void disableHandLasers(int hands) const;
|
||||||
|
|
|
@ -125,3 +125,8 @@ void MenuScriptingInterface::setIsOptionChecked(const QString& menuOption, bool
|
||||||
Q_ARG(const QString&, menuOption),
|
Q_ARG(const QString&, menuOption),
|
||||||
Q_ARG(bool, isChecked));
|
Q_ARG(bool, isChecked));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MenuScriptingInterface::triggerOption(const QString& menuOption) {
|
||||||
|
QMetaObject::invokeMethod(Menu::getInstance(), "triggerOption", Q_ARG(const QString&, menuOption));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -48,6 +48,8 @@ public slots:
|
||||||
|
|
||||||
bool isOptionChecked(const QString& menuOption);
|
bool isOptionChecked(const QString& menuOption);
|
||||||
void setIsOptionChecked(const QString& menuOption, bool isChecked);
|
void setIsOptionChecked(const QString& menuOption, bool isChecked);
|
||||||
|
|
||||||
|
void triggerOption(const QString& menuOption);
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
void menuItemEvent(const QString& menuItem);
|
void menuItemEvent(const QString& menuItem);
|
||||||
|
|
|
@ -206,3 +206,7 @@ void WindowScriptingInterface::takeSnapshot(bool notify, float aspectRatio) {
|
||||||
void WindowScriptingInterface::shareSnapshot(const QString& path) {
|
void WindowScriptingInterface::shareSnapshot(const QString& path) {
|
||||||
qApp->shareSnapshot(path);
|
qApp->shareSnapshot(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool WindowScriptingInterface::isPhysicsEnabled() {
|
||||||
|
return qApp->isPhysicsEnabled();
|
||||||
|
}
|
||||||
|
|
|
@ -54,6 +54,7 @@ public slots:
|
||||||
void copyToClipboard(const QString& text);
|
void copyToClipboard(const QString& text);
|
||||||
void takeSnapshot(bool notify = true, float aspectRatio = 0.0f);
|
void takeSnapshot(bool notify = true, float aspectRatio = 0.0f);
|
||||||
void shareSnapshot(const QString& path);
|
void shareSnapshot(const QString& path);
|
||||||
|
bool isPhysicsEnabled();
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
void domainChanged(const QString& domainHostname);
|
void domainChanged(const QString& domainHostname);
|
||||||
|
|
|
@ -38,6 +38,7 @@ AddressBarDialog::AddressBarDialog(QQuickItem* parent) : OffscreenQmlDialog(pare
|
||||||
});
|
});
|
||||||
_backEnabled = !(DependencyManager::get<AddressManager>()->getBackStack().isEmpty());
|
_backEnabled = !(DependencyManager::get<AddressManager>()->getBackStack().isEmpty());
|
||||||
_forwardEnabled = !(DependencyManager::get<AddressManager>()->getForwardStack().isEmpty());
|
_forwardEnabled = !(DependencyManager::get<AddressManager>()->getForwardStack().isEmpty());
|
||||||
|
connect(addressManager.data(), &AddressManager::hostChanged, this, &AddressBarDialog::metaverseServerUrlChanged);
|
||||||
connect(DependencyManager::get<DialogsManager>().data(), &DialogsManager::setUseFeed, this, &AddressBarDialog::setUseFeed);
|
connect(DependencyManager::get<DialogsManager>().data(), &DialogsManager::setUseFeed, this, &AddressBarDialog::setUseFeed);
|
||||||
connect(qApp, &Application::receivedHifiSchemeURL, this, &AddressBarDialog::receivedHifiSchemeURL);
|
connect(qApp, &Application::receivedHifiSchemeURL, this, &AddressBarDialog::receivedHifiSchemeURL);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ signals:
|
||||||
void forwardEnabledChanged();
|
void forwardEnabledChanged();
|
||||||
void useFeedChanged();
|
void useFeedChanged();
|
||||||
void receivedHifiSchemeURL(const QString& url);
|
void receivedHifiSchemeURL(const QString& url);
|
||||||
void metaverseServerUrlChanged(); // While it is a constant, qml will complain about not seeing a change signal.
|
void metaverseServerUrlChanged();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void displayAddressOfflineMessage();
|
void displayAddressOfflineMessage();
|
||||||
|
|
|
@ -40,15 +40,6 @@ ApplicationOverlay::ApplicationOverlay()
|
||||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||||
_domainStatusBorder = geometryCache->allocateID();
|
_domainStatusBorder = geometryCache->allocateID();
|
||||||
_magnifierBorder = geometryCache->allocateID();
|
_magnifierBorder = geometryCache->allocateID();
|
||||||
|
|
||||||
// Once we move UI rendering and screen rendering to different
|
|
||||||
// threads, we need to use a sync object to deteremine when
|
|
||||||
// the current UI texture is no longer being read from, and only
|
|
||||||
// then release it back to the UI for re-use
|
|
||||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
|
||||||
connect(offscreenUi.data(), &OffscreenUi::textureUpdated, this, [&](GLuint textureId) {
|
|
||||||
_uiTexture = textureId;
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ApplicationOverlay::~ApplicationOverlay() {
|
ApplicationOverlay::~ApplicationOverlay() {
|
||||||
|
@ -96,18 +87,32 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
|
||||||
|
|
||||||
void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
|
void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
|
||||||
PROFILE_RANGE(__FUNCTION__);
|
PROFILE_RANGE(__FUNCTION__);
|
||||||
if (_uiTexture) {
|
|
||||||
gpu::Batch& batch = *renderArgs->_batch;
|
|
||||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
|
||||||
|
|
||||||
geometryCache->useSimpleDrawPipeline(batch);
|
if (!_uiTexture) {
|
||||||
batch.setProjectionTransform(mat4());
|
_uiTexture = gpu::TexturePointer(gpu::Texture::createExternal2D([](uint32_t recycleTexture, void* recycleFence){
|
||||||
batch.setModelTransform(Transform());
|
DependencyManager::get<OffscreenUi>()->releaseTexture({ recycleTexture, recycleFence });
|
||||||
batch.resetViewTransform();
|
}));
|
||||||
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _uiTexture);
|
_uiTexture->setSource(__FUNCTION__);
|
||||||
|
|
||||||
geometryCache->renderUnitQuad(batch, glm::vec4(1));
|
|
||||||
}
|
}
|
||||||
|
// Once we move UI rendering and screen rendering to different
|
||||||
|
// threads, we need to use a sync object to deteremine when
|
||||||
|
// the current UI texture is no longer being read from, and only
|
||||||
|
// then release it back to the UI for re-use
|
||||||
|
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||||
|
|
||||||
|
OffscreenQmlSurface::TextureAndFence newTextureAndFence;
|
||||||
|
bool newTextureAvailable = offscreenUi->fetchTexture(newTextureAndFence);
|
||||||
|
if (newTextureAvailable) {
|
||||||
|
_uiTexture->setExternalTexture(newTextureAndFence.first, newTextureAndFence.second);
|
||||||
|
}
|
||||||
|
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||||
|
gpu::Batch& batch = *renderArgs->_batch;
|
||||||
|
geometryCache->useSimpleDrawPipeline(batch);
|
||||||
|
batch.setProjectionTransform(mat4());
|
||||||
|
batch.setModelTransform(Transform());
|
||||||
|
batch.resetViewTransform();
|
||||||
|
batch.setResourceTexture(0, _uiTexture);
|
||||||
|
geometryCache->renderUnitQuad(batch, glm::vec4(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
void ApplicationOverlay::renderAudioScope(RenderArgs* renderArgs) {
|
void ApplicationOverlay::renderAudioScope(RenderArgs* renderArgs) {
|
||||||
|
|
|
@ -40,13 +40,13 @@ private:
|
||||||
|
|
||||||
float _alpha{ 1.0f };
|
float _alpha{ 1.0f };
|
||||||
float _trailingAudioLoudness{ 0.0f };
|
float _trailingAudioLoudness{ 0.0f };
|
||||||
uint32_t _uiTexture{ 0 };
|
|
||||||
|
|
||||||
int _domainStatusBorder;
|
int _domainStatusBorder;
|
||||||
int _magnifierBorder;
|
int _magnifierBorder;
|
||||||
|
|
||||||
ivec2 _previousBorderSize{ -1 };
|
ivec2 _previousBorderSize{ -1 };
|
||||||
|
|
||||||
|
gpu::TexturePointer _uiTexture;
|
||||||
gpu::TexturePointer _overlayDepthTexture;
|
gpu::TexturePointer _overlayDepthTexture;
|
||||||
gpu::TexturePointer _overlayColorTexture;
|
gpu::TexturePointer _overlayColorTexture;
|
||||||
gpu::FramebufferPointer _overlayFramebuffer;
|
gpu::FramebufferPointer _overlayFramebuffer;
|
||||||
|
|
|
@ -43,7 +43,7 @@ bool OverlayConductor::headOutsideOverlay() const {
|
||||||
|
|
||||||
bool OverlayConductor::updateAvatarIsAtRest() {
|
bool OverlayConductor::updateAvatarIsAtRest() {
|
||||||
|
|
||||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
|
||||||
const quint64 REST_ENABLE_TIME_USECS = 1000 * 1000; // 1 s
|
const quint64 REST_ENABLE_TIME_USECS = 1000 * 1000; // 1 s
|
||||||
const quint64 REST_DISABLE_TIME_USECS = 200 * 1000; // 200 ms
|
const quint64 REST_DISABLE_TIME_USECS = 200 * 1000; // 200 ms
|
||||||
|
@ -69,7 +69,7 @@ bool OverlayConductor::updateAvatarIsAtRest() {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool OverlayConductor::updateAvatarHasDriveInput() {
|
bool OverlayConductor::updateAvatarHasDriveInput() {
|
||||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
|
|
||||||
const quint64 DRIVE_ENABLE_TIME_USECS = 200 * 1000; // 200 ms
|
const quint64 DRIVE_ENABLE_TIME_USECS = 200 * 1000; // 200 ms
|
||||||
const quint64 DRIVE_DISABLE_TIME_USECS = 1000 * 1000; // 1 s
|
const quint64 DRIVE_DISABLE_TIME_USECS = 1000 * 1000; // 1 s
|
||||||
|
@ -103,7 +103,7 @@ void OverlayConductor::update(float dt) {
|
||||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||||
bool currentVisible = !offscreenUi->getDesktop()->property("pinned").toBool();
|
bool currentVisible = !offscreenUi->getDesktop()->property("pinned").toBool();
|
||||||
|
|
||||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
// centerUI when hmd mode is first enabled and mounted
|
// centerUI when hmd mode is first enabled and mounted
|
||||||
if (qApp->isHMDMode() && qApp->getActiveDisplayPlugin()->isDisplayVisible()) {
|
if (qApp->isHMDMode() && qApp->getActiveDisplayPlugin()->isDisplayVisible()) {
|
||||||
if (!_hmdMode) {
|
if (!_hmdMode) {
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
void setupPreferences() {
|
void setupPreferences() {
|
||||||
auto preferences = DependencyManager::get<Preferences>();
|
auto preferences = DependencyManager::get<Preferences>();
|
||||||
|
|
||||||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
static const QString AVATAR_BASICS { "Avatar Basics" };
|
static const QString AVATAR_BASICS { "Avatar Basics" };
|
||||||
{
|
{
|
||||||
auto getter = [=]()->QString { return myAvatar->getDisplayName(); };
|
auto getter = [=]()->QString { return myAvatar->getDisplayName(); };
|
||||||
|
|
|
@ -170,7 +170,7 @@ void Stats::updateStats(bool force) {
|
||||||
STAT_UPDATE(entitiesPing, octreeServerCount ? totalPingOctree / octreeServerCount : -1);
|
STAT_UPDATE(entitiesPing, octreeServerCount ? totalPingOctree / octreeServerCount : -1);
|
||||||
|
|
||||||
// Third column, avatar stats
|
// Third column, avatar stats
|
||||||
MyAvatar* myAvatar = avatarManager->getMyAvatar();
|
auto myAvatar = avatarManager->getMyAvatar();
|
||||||
glm::vec3 avatarPos = myAvatar->getPosition();
|
glm::vec3 avatarPos = myAvatar->getPosition();
|
||||||
STAT_UPDATE(position, QVector3D(avatarPos.x, avatarPos.y, avatarPos.z));
|
STAT_UPDATE(position, QVector3D(avatarPos.x, avatarPos.y, avatarPos.z));
|
||||||
STAT_UPDATE_FLOAT(speed, glm::length(myAvatar->getVelocity()), 0.01f);
|
STAT_UPDATE_FLOAT(speed, glm::length(myAvatar->getVelocity()), 0.01f);
|
||||||
|
|
|
@ -124,6 +124,12 @@ void Line3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
||||||
}
|
}
|
||||||
properties.remove("start"); // so that Base3DOverlay doesn't respond to it
|
properties.remove("start"); // so that Base3DOverlay doesn't respond to it
|
||||||
|
|
||||||
|
auto localStart = properties["localStart"];
|
||||||
|
if (localStart.isValid()) {
|
||||||
|
_start = vec3FromVariant(localStart);
|
||||||
|
}
|
||||||
|
properties.remove("localStart"); // so that Base3DOverlay doesn't respond to it
|
||||||
|
|
||||||
auto end = properties["end"];
|
auto end = properties["end"];
|
||||||
// if "end" property was not there, check to see if they included aliases: endPoint
|
// if "end" property was not there, check to see if they included aliases: endPoint
|
||||||
if (!end.isValid()) {
|
if (!end.isValid()) {
|
||||||
|
@ -133,6 +139,12 @@ void Line3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
||||||
setEnd(vec3FromVariant(end));
|
setEnd(vec3FromVariant(end));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto localEnd = properties["localEnd"];
|
||||||
|
if (localEnd.isValid()) {
|
||||||
|
_end = vec3FromVariant(localEnd);
|
||||||
|
}
|
||||||
|
properties.remove("localEnd"); // so that Base3DOverlay doesn't respond to it
|
||||||
|
|
||||||
auto glow = properties["glow"];
|
auto glow = properties["glow"];
|
||||||
if (glow.isValid()) {
|
if (glow.isValid()) {
|
||||||
setGlow(glow.toFloat());
|
setGlow(glow.toFloat());
|
||||||
|
|
|
@ -79,11 +79,10 @@ void ModelOverlay::render(RenderArgs* args) {
|
||||||
_model->removeFromScene(scene, pendingChanges);
|
_model->removeFromScene(scene, pendingChanges);
|
||||||
_model->addToScene(scene, pendingChanges);
|
_model->addToScene(scene, pendingChanges);
|
||||||
}
|
}
|
||||||
scene->enqueuePendingChanges(pendingChanges);
|
|
||||||
|
|
||||||
if (!_visible) {
|
_model->setVisibleInScene(_visible, scene);
|
||||||
return;
|
|
||||||
}
|
scene->enqueuePendingChanges(pendingChanges);
|
||||||
}
|
}
|
||||||
|
|
||||||
void ModelOverlay::setProperties(const QVariantMap& properties) {
|
void ModelOverlay::setProperties(const QVariantMap& properties) {
|
||||||
|
@ -123,17 +122,8 @@ void ModelOverlay::setProperties(const QVariantMap& properties) {
|
||||||
auto texturesValue = properties["textures"];
|
auto texturesValue = properties["textures"];
|
||||||
if (texturesValue.isValid() && texturesValue.canConvert(QVariant::Map)) {
|
if (texturesValue.isValid() && texturesValue.canConvert(QVariant::Map)) {
|
||||||
QVariantMap textureMap = texturesValue.toMap();
|
QVariantMap textureMap = texturesValue.toMap();
|
||||||
foreach(const QString& key, textureMap.keys()) {
|
QMetaObject::invokeMethod(_model.get(), "setTextures", Qt::AutoConnection,
|
||||||
|
Q_ARG(const QVariantMap&, textureMap));
|
||||||
QUrl newTextureURL = textureMap[key].toUrl();
|
|
||||||
qDebug() << "Updating texture named" << key << "to texture at URL" << newTextureURL;
|
|
||||||
|
|
||||||
QMetaObject::invokeMethod(_model.get(), "setTextureWithNameToURL", Qt::AutoConnection,
|
|
||||||
Q_ARG(const QString&, key),
|
|
||||||
Q_ARG(const QUrl&, newTextureURL));
|
|
||||||
|
|
||||||
_modelTextures[key] = newTextureURL; // Keep local track of textures for getProperty()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ namespace render {
|
||||||
if (args) {
|
if (args) {
|
||||||
if (overlay->getAnchor() == Overlay::MY_AVATAR) {
|
if (overlay->getAnchor() == Overlay::MY_AVATAR) {
|
||||||
auto batch = args->_batch;
|
auto batch = args->_batch;
|
||||||
MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
auto avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||||
glm::quat myAvatarRotation = avatar->getOrientation();
|
glm::quat myAvatarRotation = avatar->getOrientation();
|
||||||
glm::vec3 myAvatarPosition = avatar->getPosition();
|
glm::vec3 myAvatarPosition = avatar->getPosition();
|
||||||
float angle = glm::degrees(glm::angle(myAvatarRotation));
|
float angle = glm::degrees(glm::angle(myAvatarRotation));
|
||||||
|
|
|
@ -73,16 +73,18 @@ void Web3DOverlay::render(RenderArgs* args) {
|
||||||
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
|
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
|
||||||
QSurface * currentSurface = currentContext->surface();
|
QSurface * currentSurface = currentContext->surface();
|
||||||
if (!_webSurface) {
|
if (!_webSurface) {
|
||||||
_webSurface = new OffscreenQmlSurface();
|
auto deleter = [](OffscreenQmlSurface* webSurface) {
|
||||||
|
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface] {
|
||||||
|
webSurface->deleteLater();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
_webSurface = QSharedPointer<OffscreenQmlSurface>(new OffscreenQmlSurface(), deleter);
|
||||||
_webSurface->create(currentContext);
|
_webSurface->create(currentContext);
|
||||||
_webSurface->setBaseUrl(QUrl::fromLocalFile(PathUtils::resourcesPath() + "/qml/controls/"));
|
_webSurface->setBaseUrl(QUrl::fromLocalFile(PathUtils::resourcesPath() + "/qml/controls/"));
|
||||||
_webSurface->load("WebView.qml");
|
_webSurface->load("WebView.qml");
|
||||||
_webSurface->resume();
|
_webSurface->resume();
|
||||||
_webSurface->getRootItem()->setProperty("url", _url);
|
_webSurface->getRootItem()->setProperty("url", _url);
|
||||||
_webSurface->resize(QSize(_resolution.x, _resolution.y));
|
_webSurface->resize(QSize(_resolution.x, _resolution.y));
|
||||||
_connection = QObject::connect(_webSurface, &OffscreenQmlSurface::textureUpdated, [&](GLuint textureId) {
|
|
||||||
_texture = textureId;
|
|
||||||
});
|
|
||||||
currentContext->makeCurrent(currentSurface);
|
currentContext->makeCurrent(currentSurface);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,14 +99,22 @@ void Web3DOverlay::render(RenderArgs* args) {
|
||||||
transform.postScale(vec3(getDimensions(), 1.0f));
|
transform.postScale(vec3(getDimensions(), 1.0f));
|
||||||
}
|
}
|
||||||
|
|
||||||
Q_ASSERT(args->_batch);
|
if (!_texture) {
|
||||||
gpu::Batch& batch = *args->_batch;
|
auto webSurface = _webSurface;
|
||||||
if (_texture) {
|
_texture = gpu::TexturePointer(gpu::Texture::createExternal2D([webSurface](uint32_t recycleTexture, void* recycleFence) {
|
||||||
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _texture);
|
webSurface->releaseTexture({ recycleTexture, recycleFence });
|
||||||
} else {
|
}));
|
||||||
batch.setResourceTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
|
_texture->setSource(__FUNCTION__);
|
||||||
|
}
|
||||||
|
OffscreenQmlSurface::TextureAndFence newTextureAndFence;
|
||||||
|
bool newTextureAvailable = _webSurface->fetchTexture(newTextureAndFence);
|
||||||
|
if (newTextureAvailable) {
|
||||||
|
_texture->setExternalTexture(newTextureAndFence.first, newTextureAndFence.second);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Q_ASSERT(args->_batch);
|
||||||
|
gpu::Batch& batch = *args->_batch;
|
||||||
|
batch.setResourceTexture(0, _texture);
|
||||||
batch.setModelTransform(transform);
|
batch.setModelTransform(transform);
|
||||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||||
if (color.a < OPAQUE_ALPHA_THRESHOLD) {
|
if (color.a < OPAQUE_ALPHA_THRESHOLD) {
|
||||||
|
|
|
@ -41,9 +41,9 @@ public:
|
||||||
virtual Web3DOverlay* createClone() const override;
|
virtual Web3DOverlay* createClone() const override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
OffscreenQmlSurface* _webSurface{ nullptr };
|
QSharedPointer<OffscreenQmlSurface> _webSurface;
|
||||||
QMetaObject::Connection _connection;
|
QMetaObject::Connection _connection;
|
||||||
uint32_t _texture{ 0 };
|
gpu::TexturePointer _texture;
|
||||||
QString _url;
|
QString _url;
|
||||||
float _dpi;
|
float _dpi;
|
||||||
vec2 _resolution{ 640, 480 };
|
vec2 _resolution{ 640, 480 };
|
||||||
|
|
|
@ -115,7 +115,7 @@ AudioClient::AudioClient() :
|
||||||
_loopbackAudioOutput(NULL),
|
_loopbackAudioOutput(NULL),
|
||||||
_loopbackOutputDevice(NULL),
|
_loopbackOutputDevice(NULL),
|
||||||
_inputRingBuffer(0),
|
_inputRingBuffer(0),
|
||||||
_receivedAudioStream(0, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES),
|
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES),
|
||||||
_isStereoInput(false),
|
_isStereoInput(false),
|
||||||
_outputStarveDetectionStartTimeMsec(0),
|
_outputStarveDetectionStartTimeMsec(0),
|
||||||
_outputStarveDetectionCount(0),
|
_outputStarveDetectionCount(0),
|
||||||
|
@ -817,6 +817,13 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NOTE: we assume the inputFormat and the outputFormat are the same, since on any modern
|
||||||
|
// multimedia OS they should be. If there is a device that this is not true for, we can
|
||||||
|
// add back support to do resampling.
|
||||||
|
if (_inputFormat.sampleRate() != _outputFormat.sampleRate()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// if this person wants local loopback add that to the locally injected audio
|
// if this person wants local loopback add that to the locally injected audio
|
||||||
// if there is reverb apply it to local audio and substract the origin samples
|
// if there is reverb apply it to local audio and substract the origin samples
|
||||||
|
|
||||||
|
@ -833,11 +840,6 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: we assume the inputFormat and the outputFormat are the same, since on any modern
|
|
||||||
// multimedia OS they should be. If there is a device that this is not true for, we can
|
|
||||||
// add back support to do resampling.
|
|
||||||
Q_ASSERT(_inputFormat.sampleRate() == _outputFormat.sampleRate());
|
|
||||||
|
|
||||||
static QByteArray loopBackByteArray;
|
static QByteArray loopBackByteArray;
|
||||||
|
|
||||||
int numInputSamples = inputByteArray.size() / AudioConstants::SAMPLE_SIZE;
|
int numInputSamples = inputByteArray.size() / AudioConstants::SAMPLE_SIZE;
|
||||||
|
@ -1150,9 +1152,9 @@ bool AudioClient::outputLocalInjector(bool isStereo, AudioInjector* injector) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void AudioClient::outputFormatChanged() {
|
void AudioClient::outputFormatChanged() {
|
||||||
int outputFormatChannelCountTimesSampleRate = _outputFormat.channelCount() * _outputFormat.sampleRate();
|
_outputFrameSize = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * _outputFormat.channelCount() * _outputFormat.sampleRate()) /
|
||||||
_outputFrameSize = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * outputFormatChannelCountTimesSampleRate / _desiredOutputFormat.sampleRate();
|
_desiredOutputFormat.sampleRate();
|
||||||
_receivedAudioStream.outputFormatChanged(outputFormatChannelCountTimesSampleRate);
|
_receivedAudioStream.outputFormatChanged(_outputFormat.sampleRate(), _outputFormat.channelCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
|
bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
|
||||||
|
@ -1362,7 +1364,7 @@ int AudioClient::setOutputBufferSize(int numFrames, bool persist) {
|
||||||
// proportional to the accelerator ratio.
|
// proportional to the accelerator ratio.
|
||||||
|
|
||||||
#ifdef Q_OS_WIN
|
#ifdef Q_OS_WIN
|
||||||
const float AudioClient::CALLBACK_ACCELERATOR_RATIO = 1.0f;
|
const float AudioClient::CALLBACK_ACCELERATOR_RATIO = IsWindows8OrGreater() ? 1.0f : 0.25f;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef Q_OS_MAC
|
#ifdef Q_OS_MAC
|
||||||
|
|
|
@ -46,10 +46,11 @@ static const int STATS_FOR_STATS_PACKET_WINDOW_SECONDS = 30;
|
||||||
// _currentJitterBufferFrames is updated with the time-weighted avg and the running time-weighted avg is reset.
|
// _currentJitterBufferFrames is updated with the time-weighted avg and the running time-weighted avg is reset.
|
||||||
static const quint64 FRAMES_AVAILABLE_STAT_WINDOW_USECS = 10 * USECS_PER_SECOND;
|
static const quint64 FRAMES_AVAILABLE_STAT_WINDOW_USECS = 10 * USECS_PER_SECOND;
|
||||||
|
|
||||||
InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames) :
|
InboundAudioStream::InboundAudioStream(int numChannels, int numFrames, int numBlocks, int numStaticJitterBlocks) :
|
||||||
_ringBuffer(numFrameSamples, numFramesCapacity),
|
_ringBuffer(numChannels * numFrames, numBlocks),
|
||||||
_dynamicJitterBufferEnabled(numStaticJitterFrames == -1),
|
_numChannels(numChannels),
|
||||||
_staticJitterBufferFrames(std::max(numStaticJitterFrames, DEFAULT_STATIC_JITTER_FRAMES)),
|
_dynamicJitterBufferEnabled(numStaticJitterBlocks == -1),
|
||||||
|
_staticJitterBufferFrames(std::max(numStaticJitterBlocks, DEFAULT_STATIC_JITTER_FRAMES)),
|
||||||
_desiredJitterBufferFrames(_dynamicJitterBufferEnabled ? 1 : _staticJitterBufferFrames),
|
_desiredJitterBufferFrames(_dynamicJitterBufferEnabled ? 1 : _staticJitterBufferFrames),
|
||||||
_incomingSequenceNumberStats(STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
|
_incomingSequenceNumberStats(STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
|
||||||
_starveHistory(STARVE_HISTORY_CAPACITY),
|
_starveHistory(STARVE_HISTORY_CAPACITY),
|
||||||
|
@ -121,11 +122,11 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
|
||||||
|
|
||||||
packetReceivedUpdateTimingStats();
|
packetReceivedUpdateTimingStats();
|
||||||
|
|
||||||
int networkSamples;
|
int networkFrames;
|
||||||
|
|
||||||
// parse the info after the seq number and before the audio data (the stream properties)
|
// parse the info after the seq number and before the audio data (the stream properties)
|
||||||
int prePropertyPosition = message.getPosition();
|
int prePropertyPosition = message.getPosition();
|
||||||
int propertyBytes = parseStreamProperties(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead()), networkSamples);
|
int propertyBytes = parseStreamProperties(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead()), networkFrames);
|
||||||
message.seek(prePropertyPosition + propertyBytes);
|
message.seek(prePropertyPosition + propertyBytes);
|
||||||
|
|
||||||
// handle this packet based on its arrival status.
|
// handle this packet based on its arrival status.
|
||||||
|
@ -135,7 +136,7 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
|
||||||
// NOTE: we assume that each dropped packet contains the same number of samples
|
// NOTE: we assume that each dropped packet contains the same number of samples
|
||||||
// as the packet we just received.
|
// as the packet we just received.
|
||||||
int packetsDropped = arrivalInfo._seqDiffFromExpected;
|
int packetsDropped = arrivalInfo._seqDiffFromExpected;
|
||||||
writeSamplesForDroppedPackets(packetsDropped * networkSamples);
|
writeFramesForDroppedPackets(packetsDropped * networkFrames);
|
||||||
|
|
||||||
// fall through to OnTime case
|
// fall through to OnTime case
|
||||||
}
|
}
|
||||||
|
@ -143,7 +144,7 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
|
||||||
// Packet is on time; parse its data to the ringbuffer
|
// Packet is on time; parse its data to the ringbuffer
|
||||||
if (message.getType() == PacketType::SilentAudioFrame) {
|
if (message.getType() == PacketType::SilentAudioFrame) {
|
||||||
// FIXME - Some codecs need to know about these silent frames... and can produce better output
|
// FIXME - Some codecs need to know about these silent frames... and can produce better output
|
||||||
writeDroppableSilentSamples(networkSamples);
|
writeDroppableSilentFrames(networkFrames);
|
||||||
} else {
|
} else {
|
||||||
// note: PCM and no codec are identical
|
// note: PCM and no codec are identical
|
||||||
bool selectedPCM = _selectedCodecName == "pcm" || _selectedCodecName == "";
|
bool selectedPCM = _selectedCodecName == "pcm" || _selectedCodecName == "";
|
||||||
|
@ -153,7 +154,7 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
|
||||||
parseAudioData(message.getType(), afterProperties);
|
parseAudioData(message.getType(), afterProperties);
|
||||||
} else {
|
} else {
|
||||||
qDebug() << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket << "writing silence";
|
qDebug() << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket << "writing silence";
|
||||||
writeDroppableSilentSamples(networkSamples);
|
writeDroppableSilentFrames(networkFrames);
|
||||||
// inform others of the mismatch
|
// inform others of the mismatch
|
||||||
auto sendingNode = DependencyManager::get<NodeList>()->nodeWithUUID(message.getSourceID());
|
auto sendingNode = DependencyManager::get<NodeList>()->nodeWithUUID(message.getSourceID());
|
||||||
emit mismatchedAudioCodec(sendingNode, _selectedCodecName, codecInPacket);
|
emit mismatchedAudioCodec(sendingNode, _selectedCodecName, codecInPacket);
|
||||||
|
@ -218,12 +219,13 @@ int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packet
|
||||||
return _ringBuffer.writeData(decodedBuffer.data(), actualSize);
|
return _ringBuffer.writeData(decodedBuffer.data(), actualSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
int InboundAudioStream::writeDroppableSilentSamples(int silentSamples) {
|
int InboundAudioStream::writeDroppableSilentFrames(int silentFrames) {
|
||||||
if (_decoder) {
|
if (_decoder) {
|
||||||
_decoder->trackLostFrames(silentSamples);
|
_decoder->trackLostFrames(silentFrames);
|
||||||
}
|
}
|
||||||
|
|
||||||
// calculate how many silent frames we should drop.
|
// calculate how many silent frames we should drop.
|
||||||
|
int silentSamples = silentFrames * _numChannels;
|
||||||
int samplesPerFrame = _ringBuffer.getNumFrameSamples();
|
int samplesPerFrame = _ringBuffer.getNumFrameSamples();
|
||||||
int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING;
|
int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING;
|
||||||
int numSilentFramesToDrop = 0;
|
int numSilentFramesToDrop = 0;
|
||||||
|
@ -414,14 +416,14 @@ void InboundAudioStream::packetReceivedUpdateTimingStats() {
|
||||||
_lastPacketReceivedTime = now;
|
_lastPacketReceivedTime = now;
|
||||||
}
|
}
|
||||||
|
|
||||||
int InboundAudioStream::writeSamplesForDroppedPackets(int networkSamples) {
|
int InboundAudioStream::writeFramesForDroppedPackets(int networkFrames) {
|
||||||
return writeLastFrameRepeatedWithFade(networkSamples);
|
return writeLastFrameRepeatedWithFade(networkFrames);
|
||||||
}
|
}
|
||||||
|
|
||||||
int InboundAudioStream::writeLastFrameRepeatedWithFade(int samples) {
|
int InboundAudioStream::writeLastFrameRepeatedWithFade(int frames) {
|
||||||
AudioRingBuffer::ConstIterator frameToRepeat = _ringBuffer.lastFrameWritten();
|
AudioRingBuffer::ConstIterator frameToRepeat = _ringBuffer.lastFrameWritten();
|
||||||
int frameSize = _ringBuffer.getNumFrameSamples();
|
int frameSize = _ringBuffer.getNumFrameSamples();
|
||||||
int samplesToWrite = samples;
|
int samplesToWrite = frames * _numChannels;
|
||||||
int indexOfRepeat = 0;
|
int indexOfRepeat = 0;
|
||||||
do {
|
do {
|
||||||
int samplesToWriteThisIteration = std::min(samplesToWrite, frameSize);
|
int samplesToWriteThisIteration = std::min(samplesToWrite, frameSize);
|
||||||
|
@ -434,7 +436,7 @@ int InboundAudioStream::writeLastFrameRepeatedWithFade(int samples) {
|
||||||
indexOfRepeat++;
|
indexOfRepeat++;
|
||||||
} while (samplesToWrite > 0);
|
} while (samplesToWrite > 0);
|
||||||
|
|
||||||
return samples;
|
return frames;
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioStreamStats InboundAudioStream::getAudioStreamStats() const {
|
AudioStreamStats InboundAudioStream::getAudioStreamStats() const {
|
||||||
|
|
|
@ -47,7 +47,7 @@ public:
|
||||||
static const bool REPETITION_WITH_FADE;
|
static const bool REPETITION_WITH_FADE;
|
||||||
|
|
||||||
InboundAudioStream() = delete;
|
InboundAudioStream() = delete;
|
||||||
InboundAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames = -1);
|
InboundAudioStream(int numChannels, int numFrames, int numBlocks, int numStaticJitterBlocks);
|
||||||
~InboundAudioStream();
|
~InboundAudioStream();
|
||||||
|
|
||||||
void reset();
|
void reset();
|
||||||
|
@ -115,7 +115,7 @@ public slots:
|
||||||
private:
|
private:
|
||||||
void packetReceivedUpdateTimingStats();
|
void packetReceivedUpdateTimingStats();
|
||||||
|
|
||||||
int writeSamplesForDroppedPackets(int networkSamples);
|
int writeFramesForDroppedPackets(int networkFrames);
|
||||||
|
|
||||||
void popSamplesNoCheck(int samples);
|
void popSamplesNoCheck(int samples);
|
||||||
void framesAvailableChanged();
|
void framesAvailableChanged();
|
||||||
|
@ -134,16 +134,17 @@ protected:
|
||||||
/// default implementation assumes packet contains raw audio samples after stream properties
|
/// default implementation assumes packet contains raw audio samples after stream properties
|
||||||
virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties);
|
virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties);
|
||||||
|
|
||||||
/// writes silent samples to the buffer that may be dropped to reduce latency caused by the buffer
|
/// writes silent frames to the buffer that may be dropped to reduce latency caused by the buffer
|
||||||
virtual int writeDroppableSilentSamples(int silentSamples);
|
virtual int writeDroppableSilentFrames(int silentFrames);
|
||||||
|
|
||||||
/// writes the last written frame repeatedly, gradually fading to silence.
|
/// writes the last written frame repeatedly, gradually fading to silence.
|
||||||
/// used for writing samples for dropped packets.
|
/// used for writing samples for dropped packets.
|
||||||
virtual int writeLastFrameRepeatedWithFade(int samples);
|
virtual int writeLastFrameRepeatedWithFade(int frames);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
AudioRingBuffer _ringBuffer;
|
AudioRingBuffer _ringBuffer;
|
||||||
|
int _numChannels;
|
||||||
|
|
||||||
bool _lastPopSucceeded { false };
|
bool _lastPopSucceeded { false };
|
||||||
AudioRingBuffer::ConstIterator _lastPopOutput;
|
AudioRingBuffer::ConstIterator _lastPopOutput;
|
||||||
|
|
|
@ -11,5 +11,8 @@
|
||||||
|
|
||||||
#include "MixedAudioStream.h"
|
#include "MixedAudioStream.h"
|
||||||
|
|
||||||
MixedAudioStream::MixedAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames) :
|
#include "AudioConstants.h"
|
||||||
InboundAudioStream(numFrameSamples, numFramesCapacity, numStaticJitterFrames) {}
|
|
||||||
|
MixedAudioStream::MixedAudioStream(int numFramesCapacity, int numStaticJitterFrames) :
|
||||||
|
InboundAudioStream(AudioConstants::STEREO, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL,
|
||||||
|
numFramesCapacity, numStaticJitterFrames) {}
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
class MixedAudioStream : public InboundAudioStream {
|
class MixedAudioStream : public InboundAudioStream {
|
||||||
public:
|
public:
|
||||||
MixedAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames = -1);
|
MixedAudioStream(int numFramesCapacity, int numStaticJitterFrames = -1);
|
||||||
|
|
||||||
float getNextOutputFrameLoudness() const { return _ringBuffer.getNextOutputFrameLoudness(); }
|
float getNextOutputFrameLoudness() const { return _ringBuffer.getNextOutputFrameLoudness(); }
|
||||||
};
|
};
|
||||||
|
|
|
@ -12,33 +12,30 @@
|
||||||
#include "MixedProcessedAudioStream.h"
|
#include "MixedProcessedAudioStream.h"
|
||||||
#include "AudioLogging.h"
|
#include "AudioLogging.h"
|
||||||
|
|
||||||
static const int STEREO_FACTOR = 2;
|
MixedProcessedAudioStream::MixedProcessedAudioStream(int numFramesCapacity, int numStaticJitterFrames)
|
||||||
|
: InboundAudioStream(AudioConstants::STEREO, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL,
|
||||||
|
numFramesCapacity, numStaticJitterFrames) {}
|
||||||
|
|
||||||
MixedProcessedAudioStream::MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames)
|
void MixedProcessedAudioStream::outputFormatChanged(int sampleRate, int channelCount) {
|
||||||
: InboundAudioStream(numFrameSamples, numFramesCapacity, numStaticJitterFrames) {}
|
_outputSampleRate = sampleRate;
|
||||||
|
_outputChannelCount = channelCount;
|
||||||
void MixedProcessedAudioStream::outputFormatChanged(int outputFormatChannelCountTimesSampleRate) {
|
int deviceOutputFrameFrames = networkToDeviceFrames(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO / AudioConstants::STEREO);
|
||||||
_outputFormatChannelsTimesSampleRate = outputFormatChannelCountTimesSampleRate;
|
int deviceOutputFrameSamples = deviceOutputFrameFrames * AudioConstants::STEREO;
|
||||||
int deviceOutputFrameSize = networkToDeviceSamples(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO);
|
_ringBuffer.resizeForFrameSize(deviceOutputFrameSamples);
|
||||||
_ringBuffer.resizeForFrameSize(deviceOutputFrameSize);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int MixedProcessedAudioStream::writeDroppableSilentSamples(int silentSamples) {
|
int MixedProcessedAudioStream::writeDroppableSilentFrames(int silentFrames) {
|
||||||
|
int deviceSilentFrames = networkToDeviceFrames(silentFrames);
|
||||||
int deviceSilentSamplesWritten = InboundAudioStream::writeDroppableSilentSamples(networkToDeviceSamples(silentSamples));
|
int deviceSilentFramesWritten = InboundAudioStream::writeDroppableSilentFrames(deviceSilentFrames);
|
||||||
|
emit addedSilence(deviceToNetworkFrames(deviceSilentFramesWritten));
|
||||||
emit addedSilence(deviceToNetworkSamples(deviceSilentSamplesWritten) / STEREO_FACTOR);
|
return deviceSilentFramesWritten;
|
||||||
|
|
||||||
return deviceSilentSamplesWritten;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int samples) {
|
int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int frames) {
|
||||||
|
int deviceFrames = networkToDeviceFrames(frames);
|
||||||
int deviceSamplesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(networkToDeviceSamples(samples));
|
int deviceFramesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(deviceFrames);
|
||||||
|
emit addedLastFrameRepeatedWithFade(deviceToNetworkFrames(deviceFramesWritten));
|
||||||
emit addedLastFrameRepeatedWithFade(deviceToNetworkSamples(deviceSamplesWritten) / STEREO_FACTOR);
|
return deviceFramesWritten;
|
||||||
|
|
||||||
return deviceSamplesWritten;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) {
|
int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) {
|
||||||
|
@ -56,16 +53,16 @@ int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray&
|
||||||
|
|
||||||
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
|
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
|
||||||
qCDebug(audiostream, "Wrote %d samples to buffer (%d available)", outputBuffer.size() / (int)sizeof(int16_t), getSamplesAvailable());
|
qCDebug(audiostream, "Wrote %d samples to buffer (%d available)", outputBuffer.size() / (int)sizeof(int16_t), getSamplesAvailable());
|
||||||
|
|
||||||
return packetAfterStreamProperties.size();
|
return packetAfterStreamProperties.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
int MixedProcessedAudioStream::networkToDeviceSamples(int networkSamples) {
|
int MixedProcessedAudioStream::networkToDeviceFrames(int networkFrames) {
|
||||||
return (quint64)networkSamples * (quint64)_outputFormatChannelsTimesSampleRate / (quint64)(STEREO_FACTOR
|
return ((quint64)networkFrames * _outputChannelCount * _outputSampleRate) /
|
||||||
* AudioConstants::SAMPLE_RATE);
|
(quint64)(AudioConstants::STEREO * AudioConstants::SAMPLE_RATE);
|
||||||
}
|
}
|
||||||
|
|
||||||
int MixedProcessedAudioStream::deviceToNetworkSamples(int deviceSamples) {
|
int MixedProcessedAudioStream::deviceToNetworkFrames(int deviceFrames) {
|
||||||
return (quint64)deviceSamples * (quint64)(STEREO_FACTOR * AudioConstants::SAMPLE_RATE)
|
return (quint64)deviceFrames * (quint64)(AudioConstants::STEREO * AudioConstants::SAMPLE_RATE) /
|
||||||
/ (quint64)_outputFormatChannelsTimesSampleRate;
|
(_outputSampleRate * _outputChannelCount);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ class AudioClient;
|
||||||
class MixedProcessedAudioStream : public InboundAudioStream {
|
class MixedProcessedAudioStream : public InboundAudioStream {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
public:
|
public:
|
||||||
MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, int numStaticJitterFrames = -1);
|
MixedProcessedAudioStream(int numFramesCapacity, int numStaticJitterFrames = -1);
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
|
|
||||||
|
@ -30,19 +30,20 @@ signals:
|
||||||
void processSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
|
void processSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
void outputFormatChanged(int outputFormatChannelCountTimesSampleRate);
|
void outputFormatChanged(int sampleRate, int channelCount);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
int writeDroppableSilentSamples(int silentSamples) override;
|
int writeDroppableSilentFrames(int silentFrames) override;
|
||||||
int writeLastFrameRepeatedWithFade(int samples) override;
|
int writeLastFrameRepeatedWithFade(int frames) override;
|
||||||
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) override;
|
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
int networkToDeviceSamples(int networkSamples);
|
int networkToDeviceFrames(int networkFrames);
|
||||||
int deviceToNetworkSamples(int deviceSamples);
|
int deviceToNetworkFrames(int deviceFrames);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
int _outputFormatChannelsTimesSampleRate;
|
quint64 _outputSampleRate;
|
||||||
|
quint64 _outputChannelCount;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_MixedProcessedAudioStream_h
|
#endif // hifi_MixedProcessedAudioStream_h
|
||||||
|
|
|
@ -22,10 +22,10 @@
|
||||||
#include <UUID.h>
|
#include <UUID.h>
|
||||||
|
|
||||||
PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, int numStaticJitterFrames) :
|
PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, int numStaticJitterFrames) :
|
||||||
InboundAudioStream(isStereo
|
InboundAudioStream(isStereo ? AudioConstants::STEREO : AudioConstants::MONO,
|
||||||
? AudioConstants::NETWORK_FRAME_SAMPLES_STEREO
|
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL,
|
||||||
: AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL,
|
AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY,
|
||||||
AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY, numStaticJitterFrames),
|
numStaticJitterFrames),
|
||||||
_type(type),
|
_type(type),
|
||||||
_position(0.0f, 0.0f, 0.0f),
|
_position(0.0f, 0.0f, 0.0f),
|
||||||
_orientation(0.0f, 0.0f, 0.0f, 0.0f),
|
_orientation(0.0f, 0.0f, 0.0f, 0.0f),
|
||||||
|
|
|
@ -976,10 +976,16 @@ void AvatarData::parseAvatarIdentityPacket(const QByteArray& data, Identity& ide
|
||||||
packetStream >> identityOut.uuid >> identityOut.skeletonModelURL >> identityOut.attachmentData >> identityOut.displayName >> identityOut.avatarEntityData;
|
packetStream >> identityOut.uuid >> identityOut.skeletonModelURL >> identityOut.attachmentData >> identityOut.displayName >> identityOut.avatarEntityData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static const QUrl emptyURL("");
|
||||||
|
const QUrl& AvatarData::cannonicalSkeletonModelURL(const QUrl& emptyURL) {
|
||||||
|
// We don't put file urls on the wire, but instead convert to empty.
|
||||||
|
return _skeletonModelURL.scheme() == "file" ? emptyURL : _skeletonModelURL;
|
||||||
|
}
|
||||||
|
|
||||||
bool AvatarData::processAvatarIdentity(const Identity& identity) {
|
bool AvatarData::processAvatarIdentity(const Identity& identity) {
|
||||||
bool hasIdentityChanged = false;
|
bool hasIdentityChanged = false;
|
||||||
|
|
||||||
if (_firstSkeletonCheck || (identity.skeletonModelURL != _skeletonModelURL)) {
|
if (_firstSkeletonCheck || (identity.skeletonModelURL != cannonicalSkeletonModelURL(emptyURL))) {
|
||||||
setSkeletonModelURL(identity.skeletonModelURL);
|
setSkeletonModelURL(identity.skeletonModelURL);
|
||||||
hasIdentityChanged = true;
|
hasIdentityChanged = true;
|
||||||
_firstSkeletonCheck = false;
|
_firstSkeletonCheck = false;
|
||||||
|
@ -1010,8 +1016,7 @@ bool AvatarData::processAvatarIdentity(const Identity& identity) {
|
||||||
QByteArray AvatarData::identityByteArray() {
|
QByteArray AvatarData::identityByteArray() {
|
||||||
QByteArray identityData;
|
QByteArray identityData;
|
||||||
QDataStream identityStream(&identityData, QIODevice::Append);
|
QDataStream identityStream(&identityData, QIODevice::Append);
|
||||||
QUrl emptyURL("");
|
const QUrl& urlToSend = cannonicalSkeletonModelURL(emptyURL);
|
||||||
const QUrl& urlToSend = _skeletonModelURL.scheme() == "file" ? emptyURL : _skeletonModelURL;
|
|
||||||
|
|
||||||
_avatarEntitiesLock.withReadLock([&] {
|
_avatarEntitiesLock.withReadLock([&] {
|
||||||
identityStream << getSessionUUID() << urlToSend << _attachmentData << _displayName << _avatarEntityData;
|
identityStream << getSessionUUID() << urlToSend << _attachmentData << _displayName << _avatarEntityData;
|
||||||
|
|
|
@ -55,6 +55,7 @@ typedef unsigned long long quint64;
|
||||||
#include <NumericalConstants.h>
|
#include <NumericalConstants.h>
|
||||||
#include <Packed.h>
|
#include <Packed.h>
|
||||||
#include <ThreadSafeValueCache.h>
|
#include <ThreadSafeValueCache.h>
|
||||||
|
#include <SharedUtil.h>
|
||||||
|
|
||||||
#include "AABox.h"
|
#include "AABox.h"
|
||||||
#include "HeadData.h"
|
#include "HeadData.h"
|
||||||
|
@ -138,10 +139,6 @@ class AttachmentData;
|
||||||
class Transform;
|
class Transform;
|
||||||
using TransformPointer = std::shared_ptr<Transform>;
|
using TransformPointer = std::shared_ptr<Transform>;
|
||||||
|
|
||||||
// When writing out avatarEntities to a QByteArray, if the parentID is the ID of MyAvatar, use this ID instead. This allows
|
|
||||||
// the value to be reset when the sessionID changes.
|
|
||||||
const QUuid AVATAR_SELF_ID = QUuid("{00000000-0000-0000-0000-000000000001}");
|
|
||||||
|
|
||||||
class AvatarData : public QObject, public SpatiallyNestable {
|
class AvatarData : public QObject, public SpatiallyNestable {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
|
|
||||||
|
@ -402,6 +399,7 @@ protected:
|
||||||
QUrl _skeletonFBXURL;
|
QUrl _skeletonFBXURL;
|
||||||
QVector<AttachmentData> _attachmentData;
|
QVector<AttachmentData> _attachmentData;
|
||||||
QString _displayName;
|
QString _displayName;
|
||||||
|
const QUrl& cannonicalSkeletonModelURL(const QUrl& empty);
|
||||||
|
|
||||||
float _displayNameTargetAlpha;
|
float _displayNameTargetAlpha;
|
||||||
float _displayNameAlpha;
|
float _displayNameAlpha;
|
||||||
|
|
|
@ -6,7 +6,3 @@ link_hifi_libraries(shared plugins ui-plugins gl gpu-gl ui render-utils)
|
||||||
target_opengl()
|
target_opengl()
|
||||||
|
|
||||||
GroupSources("src/display-plugins")
|
GroupSources("src/display-plugins")
|
||||||
|
|
||||||
if (NOT ANDROID)
|
|
||||||
target_oglplus()
|
|
||||||
endif ()
|
|
||||||
|
|
|
@ -591,17 +591,17 @@ void EntityTreeRenderer::deleteReleasedModels() {
|
||||||
|
|
||||||
RayToEntityIntersectionResult EntityTreeRenderer::findRayIntersectionWorker(const PickRay& ray, Octree::lockType lockType,
|
RayToEntityIntersectionResult EntityTreeRenderer::findRayIntersectionWorker(const PickRay& ray, Octree::lockType lockType,
|
||||||
bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude,
|
bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude,
|
||||||
const QVector<EntityItemID>& entityIdsToDiscard) {
|
const QVector<EntityItemID>& entityIdsToDiscard, bool visibleOnly, bool collidableOnly) {
|
||||||
RayToEntityIntersectionResult result;
|
RayToEntityIntersectionResult result;
|
||||||
if (_tree) {
|
if (_tree) {
|
||||||
EntityTreePointer entityTree = std::static_pointer_cast<EntityTree>(_tree);
|
EntityTreePointer entityTree = std::static_pointer_cast<EntityTree>(_tree);
|
||||||
|
|
||||||
OctreeElementPointer element;
|
OctreeElementPointer element;
|
||||||
EntityItemPointer intersectedEntity = NULL;
|
EntityItemPointer intersectedEntity = NULL;
|
||||||
result.intersects = entityTree->findRayIntersection(ray.origin, ray.direction, element, result.distance,
|
result.intersects = entityTree->findRayIntersection(ray.origin, ray.direction,
|
||||||
result.face, result.surfaceNormal, entityIdsToInclude, entityIdsToDiscard,
|
entityIdsToInclude, entityIdsToDiscard, visibleOnly, collidableOnly, precisionPicking,
|
||||||
(void**)&intersectedEntity, lockType, &result.accurate,
|
element, result.distance, result.face, result.surfaceNormal,
|
||||||
precisionPicking);
|
(void**)&intersectedEntity, lockType, &result.accurate);
|
||||||
if (result.intersects && intersectedEntity) {
|
if (result.intersects && intersectedEntity) {
|
||||||
result.entityID = intersectedEntity->getEntityItemID();
|
result.entityID = intersectedEntity->getEntityItemID();
|
||||||
result.properties = intersectedEntity->getProperties();
|
result.properties = intersectedEntity->getProperties();
|
||||||
|
|
|
@ -149,7 +149,8 @@ private:
|
||||||
QList<ModelPointer> _releasedModels;
|
QList<ModelPointer> _releasedModels;
|
||||||
RayToEntityIntersectionResult findRayIntersectionWorker(const PickRay& ray, Octree::lockType lockType,
|
RayToEntityIntersectionResult findRayIntersectionWorker(const PickRay& ray, Octree::lockType lockType,
|
||||||
bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude = QVector<EntityItemID>(),
|
bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude = QVector<EntityItemID>(),
|
||||||
const QVector<EntityItemID>& entityIdsToDiscard = QVector<EntityItemID>());
|
const QVector<EntityItemID>& entityIdsToDiscard = QVector<EntityItemID>(), bool visibleOnly=false,
|
||||||
|
bool collidableOnly = false);
|
||||||
|
|
||||||
EntityItemID _currentHoverOverEntityID;
|
EntityItemID _currentHoverOverEntityID;
|
||||||
EntityItemID _currentClickingOnEntityID;
|
EntityItemID _currentClickingOnEntityID;
|
||||||
|
|
|
@ -38,39 +38,6 @@ static uint64_t MAX_NO_RENDER_INTERVAL = 30 * USECS_PER_SECOND;
|
||||||
static int MAX_WINDOW_SIZE = 4096;
|
static int MAX_WINDOW_SIZE = 4096;
|
||||||
static float OPAQUE_ALPHA_THRESHOLD = 0.99f;
|
static float OPAQUE_ALPHA_THRESHOLD = 0.99f;
|
||||||
|
|
||||||
void WebEntityAPIHelper::synthesizeKeyPress(QString key) {
|
|
||||||
if (_renderableWebEntityItem) {
|
|
||||||
_renderableWebEntityItem->synthesizeKeyPress(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void WebEntityAPIHelper::emitScriptEvent(const QVariant& message) {
|
|
||||||
if (QThread::currentThread() != thread()) {
|
|
||||||
QMetaObject::invokeMethod(this, "emitScriptEvent", Qt::QueuedConnection, Q_ARG(QVariant, message));
|
|
||||||
} else {
|
|
||||||
emit scriptEventReceived(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void WebEntityAPIHelper::emitWebEvent(const QVariant& message) {
|
|
||||||
if (QThread::currentThread() != thread()) {
|
|
||||||
QMetaObject::invokeMethod(this, "emitWebEvent", Qt::QueuedConnection, Q_ARG(QVariant, message));
|
|
||||||
} else {
|
|
||||||
// special case to handle raising and lowering the virtual keyboard
|
|
||||||
if (message.type() == QVariant::String && message.toString() == "_RAISE_KEYBOARD" && _renderableWebEntityItem) {
|
|
||||||
if (_renderableWebEntityItem) {
|
|
||||||
_renderableWebEntityItem->setKeyboardRaised(true);
|
|
||||||
}
|
|
||||||
} else if (message.type() == QVariant::String && message.toString() == "_LOWER_KEYBOARD" && _renderableWebEntityItem) {
|
|
||||||
if (_renderableWebEntityItem) {
|
|
||||||
_renderableWebEntityItem->setKeyboardRaised(false);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
emit webEventReceived(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
EntityItemPointer RenderableWebEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
|
EntityItemPointer RenderableWebEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
|
||||||
EntityItemPointer entity{ new RenderableWebEntityItem(entityID) };
|
EntityItemPointer entity{ new RenderableWebEntityItem(entityID) };
|
||||||
entity->setProperties(properties);
|
entity->setProperties(properties);
|
||||||
|
@ -85,21 +52,9 @@ RenderableWebEntityItem::RenderableWebEntityItem(const EntityItemID& entityItemI
|
||||||
_touchDevice.setType(QTouchDevice::TouchScreen);
|
_touchDevice.setType(QTouchDevice::TouchScreen);
|
||||||
_touchDevice.setName("RenderableWebEntityItemTouchDevice");
|
_touchDevice.setName("RenderableWebEntityItemTouchDevice");
|
||||||
_touchDevice.setMaximumTouchPoints(4);
|
_touchDevice.setMaximumTouchPoints(4);
|
||||||
|
|
||||||
_webEntityAPIHelper = new WebEntityAPIHelper;
|
|
||||||
_webEntityAPIHelper->setRenderableWebEntityItem(this);
|
|
||||||
_webEntityAPIHelper->moveToThread(qApp->thread());
|
|
||||||
|
|
||||||
// forward web events to EntityScriptingInterface
|
|
||||||
auto entities = DependencyManager::get<EntityScriptingInterface>();
|
|
||||||
QObject::connect(_webEntityAPIHelper, &WebEntityAPIHelper::webEventReceived, [=](const QVariant& message) {
|
|
||||||
emit entities->webEventReceived(entityItemID, message);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
RenderableWebEntityItem::~RenderableWebEntityItem() {
|
RenderableWebEntityItem::~RenderableWebEntityItem() {
|
||||||
_webEntityAPIHelper->setRenderableWebEntityItem(nullptr);
|
|
||||||
_webEntityAPIHelper->deleteLater();
|
|
||||||
destroyWebSurface();
|
destroyWebSurface();
|
||||||
qDebug() << "Destroyed web entity " << getID();
|
qDebug() << "Destroyed web entity " << getID();
|
||||||
}
|
}
|
||||||
|
@ -129,20 +84,35 @@ bool RenderableWebEntityItem::buildWebSurface(EntityTreeRenderer* renderer) {
|
||||||
// Save the original GL context, because creating a QML surface will create a new context
|
// Save the original GL context, because creating a QML surface will create a new context
|
||||||
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
|
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
|
||||||
QSurface * currentSurface = currentContext->surface();
|
QSurface * currentSurface = currentContext->surface();
|
||||||
_webSurface = new OffscreenQmlSurface();
|
|
||||||
|
auto deleter = [](OffscreenQmlSurface* webSurface) {
|
||||||
|
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface] {
|
||||||
|
webSurface->deleteLater();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
_webSurface = QSharedPointer<OffscreenQmlSurface>(new OffscreenQmlSurface(), deleter);
|
||||||
|
|
||||||
|
// The lifetime of the QML surface MUST be managed by the main thread
|
||||||
|
// Additionally, we MUST use local variables copied by value, rather than
|
||||||
|
// member variables, since they would implicitly refer to a this that
|
||||||
|
// is no longer valid
|
||||||
|
|
||||||
_webSurface->create(currentContext);
|
_webSurface->create(currentContext);
|
||||||
_webSurface->setBaseUrl(QUrl::fromLocalFile(PathUtils::resourcesPath() + "/qml/controls/"));
|
_webSurface->setBaseUrl(QUrl::fromLocalFile(PathUtils::resourcesPath() + "/qml/controls/"));
|
||||||
_webSurface->load("WebView.qml", [&](QQmlContext* context, QObject* obj) {
|
_webSurface->load("WebView.qml", [&](QQmlContext* context, QObject* obj) {
|
||||||
context->setContextProperty("eventBridgeJavaScriptToInject", QVariant(javaScriptToInject));
|
context->setContextProperty("eventBridgeJavaScriptToInject", QVariant(javaScriptToInject));
|
||||||
});
|
});
|
||||||
_webSurface->resume();
|
_webSurface->resume();
|
||||||
_webSurface->getRootItem()->setProperty("eventBridge", QVariant::fromValue(_webEntityAPIHelper));
|
|
||||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||||
_webSurface->getRootContext()->setContextProperty("desktop", QVariant());
|
_webSurface->getRootContext()->setContextProperty("desktop", QVariant());
|
||||||
_webSurface->getRootContext()->setContextProperty("webEntity", _webEntityAPIHelper);
|
|
||||||
_connection = QObject::connect(_webSurface, &OffscreenQmlSurface::textureUpdated, [&](GLuint textureId) {
|
// forward web events to EntityScriptingInterface
|
||||||
_texture = textureId;
|
auto entities = DependencyManager::get<EntityScriptingInterface>();
|
||||||
|
const EntityItemID entityItemID = getID();
|
||||||
|
QObject::connect(_webSurface.data(), &OffscreenQmlSurface::webEventReceived, [=](const QVariant& message) {
|
||||||
|
emit entities->webEventReceived(entityItemID, message);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Restore the original GL context
|
// Restore the original GL context
|
||||||
currentContext->makeCurrent(currentSurface);
|
currentContext->makeCurrent(currentSurface);
|
||||||
|
|
||||||
|
@ -217,20 +187,33 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
|
||||||
// without worrying about excessive overhead.
|
// without worrying about excessive overhead.
|
||||||
_webSurface->resize(QSize(windowSize.x, windowSize.y));
|
_webSurface->resize(QSize(windowSize.x, windowSize.y));
|
||||||
|
|
||||||
|
if (!_texture) {
|
||||||
|
auto webSurface = _webSurface;
|
||||||
|
auto recycler = [webSurface] (uint32_t recycleTexture, void* recycleFence) {
|
||||||
|
webSurface->releaseTexture({ recycleTexture, recycleFence });
|
||||||
|
};
|
||||||
|
_texture = gpu::TexturePointer(gpu::Texture::createExternal2D(recycler));
|
||||||
|
_texture->setSource(__FUNCTION__);
|
||||||
|
}
|
||||||
|
OffscreenQmlSurface::TextureAndFence newTextureAndFence;
|
||||||
|
bool newTextureAvailable = _webSurface->fetchTexture(newTextureAndFence);
|
||||||
|
if (newTextureAvailable) {
|
||||||
|
_texture->setExternalTexture(newTextureAndFence.first, newTextureAndFence.second);
|
||||||
|
}
|
||||||
|
|
||||||
PerformanceTimer perfTimer("RenderableWebEntityItem::render");
|
PerformanceTimer perfTimer("RenderableWebEntityItem::render");
|
||||||
Q_ASSERT(getType() == EntityTypes::Web);
|
Q_ASSERT(getType() == EntityTypes::Web);
|
||||||
static const glm::vec2 texMin(0.0f), texMax(1.0f), topLeft(-0.5f), bottomRight(0.5f);
|
static const glm::vec2 texMin(0.0f), texMax(1.0f), topLeft(-0.5f), bottomRight(0.5f);
|
||||||
|
|
||||||
Q_ASSERT(args->_batch);
|
Q_ASSERT(args->_batch);
|
||||||
gpu::Batch& batch = *args->_batch;
|
gpu::Batch& batch = *args->_batch;
|
||||||
|
|
||||||
bool success;
|
bool success;
|
||||||
batch.setModelTransform(getTransformToCenter(success));
|
batch.setModelTransform(getTransformToCenter(success));
|
||||||
if (!success) {
|
if (!success) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (_texture) {
|
batch.setResourceTexture(0, _texture);
|
||||||
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _texture);
|
|
||||||
}
|
|
||||||
|
|
||||||
float fadeRatio = _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
|
float fadeRatio = _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
|
||||||
|
|
||||||
|
@ -344,20 +327,10 @@ void RenderableWebEntityItem::destroyWebSurface() {
|
||||||
_mouseMoveConnection = QMetaObject::Connection();
|
_mouseMoveConnection = QMetaObject::Connection();
|
||||||
QObject::disconnect(_hoverLeaveConnection);
|
QObject::disconnect(_hoverLeaveConnection);
|
||||||
_hoverLeaveConnection = QMetaObject::Connection();
|
_hoverLeaveConnection = QMetaObject::Connection();
|
||||||
|
_webSurface.reset();
|
||||||
// The lifetime of the QML surface MUST be managed by the main thread
|
|
||||||
// Additionally, we MUST use local variables copied by value, rather than
|
|
||||||
// member variables, since they would implicitly refer to a this that
|
|
||||||
// is no longer valid
|
|
||||||
auto webSurface = _webSurface;
|
|
||||||
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface] {
|
|
||||||
webSurface->deleteLater();
|
|
||||||
});
|
|
||||||
_webSurface = nullptr;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void RenderableWebEntityItem::update(const quint64& now) {
|
void RenderableWebEntityItem::update(const quint64& now) {
|
||||||
auto interval = now - _lastRenderTime;
|
auto interval = now - _lastRenderTime;
|
||||||
if (interval > MAX_NO_RENDER_INTERVAL) {
|
if (interval > MAX_NO_RENDER_INTERVAL) {
|
||||||
|
@ -365,78 +338,13 @@ void RenderableWebEntityItem::update(const quint64& now) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool RenderableWebEntityItem::isTransparent() {
|
bool RenderableWebEntityItem::isTransparent() {
|
||||||
float fadeRatio = _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
|
float fadeRatio = _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
|
||||||
return fadeRatio < OPAQUE_ALPHA_THRESHOLD;
|
return fadeRatio < OPAQUE_ALPHA_THRESHOLD;
|
||||||
}
|
}
|
||||||
|
|
||||||
// UTF-8 encoded symbols
|
|
||||||
static const uint8_t UPWARDS_WHITE_ARROW_FROM_BAR[] = { 0xE2, 0x87, 0xAA, 0x00 }; // shift
|
|
||||||
static const uint8_t LEFT_ARROW[] = { 0xE2, 0x86, 0x90, 0x00 }; // backspace
|
|
||||||
static const uint8_t LEFTWARD_WHITE_ARROW[] = { 0xE2, 0x87, 0xA6, 0x00 }; // left arrow
|
|
||||||
static const uint8_t RIGHTWARD_WHITE_ARROW[] = { 0xE2, 0x87, 0xA8, 0x00 }; // right arrow
|
|
||||||
static const uint8_t ASTERISIM[] = { 0xE2, 0x81, 0x82, 0x00 }; // symbols
|
|
||||||
static const uint8_t RETURN_SYMBOL[] = { 0xE2, 0x8F, 0x8E, 0x00 }; // return
|
|
||||||
static const char PUNCTUATION_STRING[] = "&123";
|
|
||||||
static const char ALPHABET_STRING[] = "abc";
|
|
||||||
|
|
||||||
static bool equals(const QByteArray& byteArray, const uint8_t* ptr) {
|
|
||||||
int i;
|
|
||||||
for (i = 0; i < byteArray.size(); i++) {
|
|
||||||
if ((char)ptr[i] != byteArray[i]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ptr[i] == 0x00;
|
|
||||||
}
|
|
||||||
|
|
||||||
void RenderableWebEntityItem::synthesizeKeyPress(QString key) {
|
|
||||||
auto eventHandler = getEventHandler();
|
|
||||||
if (eventHandler) {
|
|
||||||
auto utf8Key = key.toUtf8();
|
|
||||||
|
|
||||||
int scanCode = (int)utf8Key[0];
|
|
||||||
QString keyString = key;
|
|
||||||
if (equals(utf8Key, UPWARDS_WHITE_ARROW_FROM_BAR) || equals(utf8Key, ASTERISIM) ||
|
|
||||||
equals(utf8Key, (uint8_t*)PUNCTUATION_STRING) || equals(utf8Key, (uint8_t*)ALPHABET_STRING)) {
|
|
||||||
return; // ignore
|
|
||||||
} else if (equals(utf8Key, LEFT_ARROW)) {
|
|
||||||
scanCode = Qt::Key_Backspace;
|
|
||||||
keyString = "\x08";
|
|
||||||
} else if (equals(utf8Key, RETURN_SYMBOL)) {
|
|
||||||
scanCode = Qt::Key_Return;
|
|
||||||
keyString = "\x0d";
|
|
||||||
} else if (equals(utf8Key, LEFTWARD_WHITE_ARROW)) {
|
|
||||||
scanCode = Qt::Key_Left;
|
|
||||||
keyString = "";
|
|
||||||
} else if (equals(utf8Key, RIGHTWARD_WHITE_ARROW)) {
|
|
||||||
scanCode = Qt::Key_Right;
|
|
||||||
keyString = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
QKeyEvent* pressEvent = new QKeyEvent(QEvent::KeyPress, scanCode, Qt::NoModifier, keyString);
|
|
||||||
QKeyEvent* releaseEvent = new QKeyEvent(QEvent::KeyRelease, scanCode, Qt::NoModifier, keyString);
|
|
||||||
QCoreApplication::postEvent(eventHandler, pressEvent);
|
|
||||||
QCoreApplication::postEvent(eventHandler, releaseEvent);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void RenderableWebEntityItem::emitScriptEvent(const QVariant& message) {
|
void RenderableWebEntityItem::emitScriptEvent(const QVariant& message) {
|
||||||
if (_webEntityAPIHelper) {
|
|
||||||
_webEntityAPIHelper->emitScriptEvent(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void RenderableWebEntityItem::setKeyboardRaised(bool raised) {
|
|
||||||
|
|
||||||
// raise the keyboard only while in HMD mode and it's being requested.
|
|
||||||
bool value = AbstractViewStateInterface::instance()->isHMDMode() && raised;
|
|
||||||
|
|
||||||
if (_webSurface) {
|
if (_webSurface) {
|
||||||
auto rootItem = _webSurface->getRootItem();
|
_webSurface->emitScriptEvent(message);
|
||||||
if (rootItem) {
|
|
||||||
rootItem->setProperty("keyboardRaised", QVariant(value));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,36 +13,18 @@
|
||||||
#include <QMouseEvent>
|
#include <QMouseEvent>
|
||||||
#include <QTouchEvent>
|
#include <QTouchEvent>
|
||||||
#include <PointerEvent.h>
|
#include <PointerEvent.h>
|
||||||
|
#include <gl/OffscreenQmlSurface.h>
|
||||||
|
|
||||||
#include <WebEntityItem.h>
|
#include <WebEntityItem.h>
|
||||||
|
|
||||||
#include "RenderableEntityItem.h"
|
#include "RenderableEntityItem.h"
|
||||||
|
|
||||||
class OffscreenQmlSurface;
|
|
||||||
class QWindow;
|
class QWindow;
|
||||||
class QObject;
|
class QObject;
|
||||||
class EntityTreeRenderer;
|
class EntityTreeRenderer;
|
||||||
class RenderableWebEntityItem;
|
class RenderableWebEntityItem;
|
||||||
|
|
||||||
class WebEntityAPIHelper : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
void setRenderableWebEntityItem(RenderableWebEntityItem* renderableWebEntityItem) {
|
|
||||||
_renderableWebEntityItem = renderableWebEntityItem;
|
|
||||||
}
|
|
||||||
Q_INVOKABLE void synthesizeKeyPress(QString key);
|
|
||||||
|
|
||||||
// event bridge
|
|
||||||
public slots:
|
|
||||||
void emitScriptEvent(const QVariant& scriptMessage);
|
|
||||||
void emitWebEvent(const QVariant& webMessage);
|
|
||||||
signals:
|
|
||||||
void scriptEventReceived(const QVariant& message);
|
|
||||||
void webEventReceived(const QVariant& message);
|
|
||||||
|
|
||||||
protected:
|
|
||||||
RenderableWebEntityItem* _renderableWebEntityItem{ nullptr };
|
|
||||||
};
|
|
||||||
|
|
||||||
class RenderableWebEntityItem : public WebEntityItem {
|
class RenderableWebEntityItem : public WebEntityItem {
|
||||||
public:
|
public:
|
||||||
|
@ -64,29 +46,24 @@ public:
|
||||||
bool needsToCallUpdate() const override { return _webSurface != nullptr; }
|
bool needsToCallUpdate() const override { return _webSurface != nullptr; }
|
||||||
|
|
||||||
virtual void emitScriptEvent(const QVariant& message) override;
|
virtual void emitScriptEvent(const QVariant& message) override;
|
||||||
void setKeyboardRaised(bool raised);
|
|
||||||
|
|
||||||
SIMPLE_RENDERABLE();
|
SIMPLE_RENDERABLE();
|
||||||
|
|
||||||
virtual bool isTransparent() override;
|
virtual bool isTransparent() override;
|
||||||
|
|
||||||
public:
|
|
||||||
void synthesizeKeyPress(QString key);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool buildWebSurface(EntityTreeRenderer* renderer);
|
bool buildWebSurface(EntityTreeRenderer* renderer);
|
||||||
void destroyWebSurface();
|
void destroyWebSurface();
|
||||||
glm::vec2 getWindowSize() const;
|
glm::vec2 getWindowSize() const;
|
||||||
|
|
||||||
OffscreenQmlSurface* _webSurface{ nullptr };
|
QSharedPointer<OffscreenQmlSurface> _webSurface;
|
||||||
QMetaObject::Connection _connection;
|
QMetaObject::Connection _connection;
|
||||||
uint32_t _texture{ 0 };
|
gpu::TexturePointer _texture;
|
||||||
ivec2 _lastPress{ INT_MIN };
|
ivec2 _lastPress { INT_MIN };
|
||||||
bool _pressed{ false };
|
bool _pressed{ false };
|
||||||
QTouchEvent _lastTouchEvent { QEvent::TouchUpdate };
|
QTouchEvent _lastTouchEvent { QEvent::TouchUpdate };
|
||||||
uint64_t _lastRenderTime{ 0 };
|
uint64_t _lastRenderTime{ 0 };
|
||||||
QTouchDevice _touchDevice;
|
QTouchDevice _touchDevice;
|
||||||
WebEntityAPIHelper* _webEntityAPIHelper;
|
|
||||||
|
|
||||||
QMetaObject::Connection _mousePressConnection;
|
QMetaObject::Connection _mousePressConnection;
|
||||||
QMetaObject::Connection _mouseReleaseConnection;
|
QMetaObject::Connection _mouseReleaseConnection;
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
|
|
||||||
#include "EntityItemID.h"
|
#include "EntityItemID.h"
|
||||||
#include <VariantMapToScriptValue.h>
|
#include <VariantMapToScriptValue.h>
|
||||||
|
#include <SharedUtil.h>
|
||||||
#include <SpatialParentFinder.h>
|
#include <SpatialParentFinder.h>
|
||||||
|
|
||||||
#include "EntitiesLogging.h"
|
#include "EntitiesLogging.h"
|
||||||
|
@ -181,6 +182,11 @@ QUuid EntityScriptingInterface::addEntity(const EntityItemProperties& properties
|
||||||
propertiesWithSimID.setOwningAvatarID(myNodeID);
|
propertiesWithSimID.setOwningAvatarID(myNodeID);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (propertiesWithSimID.getParentID() == AVATAR_SELF_ID) {
|
||||||
|
qDebug() << "ERROR: Cannot set entity parent ID to the local-only MyAvatar ID";
|
||||||
|
propertiesWithSimID.setParentID(QUuid());
|
||||||
|
}
|
||||||
|
|
||||||
auto dimensions = propertiesWithSimID.getDimensions();
|
auto dimensions = propertiesWithSimID.getDimensions();
|
||||||
float volume = dimensions.x * dimensions.y * dimensions.z;
|
float volume = dimensions.x * dimensions.y * dimensions.z;
|
||||||
auto density = propertiesWithSimID.getDensity();
|
auto density = propertiesWithSimID.getDensity();
|
||||||
|
@ -357,6 +363,9 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
|
||||||
|
|
||||||
if (!scriptSideProperties.parentIDChanged()) {
|
if (!scriptSideProperties.parentIDChanged()) {
|
||||||
properties.setParentID(entity->getParentID());
|
properties.setParentID(entity->getParentID());
|
||||||
|
} else if (scriptSideProperties.getParentID() == AVATAR_SELF_ID) {
|
||||||
|
qDebug() << "ERROR: Cannot set entity parent ID to the local-only MyAvatar ID";
|
||||||
|
properties.setParentID(QUuid());
|
||||||
}
|
}
|
||||||
if (!scriptSideProperties.parentJointIndexChanged()) {
|
if (!scriptSideProperties.parentJointIndexChanged()) {
|
||||||
properties.setParentJointIndex(entity->getParentJointIndex());
|
properties.setParentJointIndex(entity->getParentJointIndex());
|
||||||
|
@ -386,9 +395,14 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!updatedEntity) {
|
// FIXME: We need to figure out a better way to handle this. Allowing these edits to go through potentially
|
||||||
return QUuid();
|
// breaks avatar energy and entities that are parented.
|
||||||
}
|
//
|
||||||
|
// To handle cases where a script needs to edit an entity with a _known_ entity id but doesn't exist
|
||||||
|
// in the local entity tree, we need to allow those edits to go through to the server.
|
||||||
|
// if (!updatedEntity) {
|
||||||
|
// return QUuid();
|
||||||
|
// }
|
||||||
|
|
||||||
_entityTree->withReadLock([&] {
|
_entityTree->withReadLock([&] {
|
||||||
EntityItemPointer entity = _entityTree->findEntityByEntityItemID(entityID);
|
EntityItemPointer entity = _entityTree->findEntityByEntityItemID(entityID);
|
||||||
|
@ -610,11 +624,11 @@ QVector<QUuid> EntityScriptingInterface::findEntitiesInFrustum(QVariantMap frust
|
||||||
}
|
}
|
||||||
|
|
||||||
RayToEntityIntersectionResult EntityScriptingInterface::findRayIntersection(const PickRay& ray, bool precisionPicking,
|
RayToEntityIntersectionResult EntityScriptingInterface::findRayIntersection(const PickRay& ray, bool precisionPicking,
|
||||||
const QScriptValue& entityIdsToInclude, const QScriptValue& entityIdsToDiscard) {
|
const QScriptValue& entityIdsToInclude, const QScriptValue& entityIdsToDiscard, bool visibleOnly, bool collidableOnly) {
|
||||||
|
|
||||||
QVector<EntityItemID> entitiesToInclude = qVectorEntityItemIDFromScriptValue(entityIdsToInclude);
|
QVector<EntityItemID> entitiesToInclude = qVectorEntityItemIDFromScriptValue(entityIdsToInclude);
|
||||||
QVector<EntityItemID> entitiesToDiscard = qVectorEntityItemIDFromScriptValue(entityIdsToDiscard);
|
QVector<EntityItemID> entitiesToDiscard = qVectorEntityItemIDFromScriptValue(entityIdsToDiscard);
|
||||||
return findRayIntersectionWorker(ray, Octree::Lock, precisionPicking, entitiesToInclude, entitiesToDiscard);
|
return findRayIntersectionWorker(ray, Octree::Lock, precisionPicking, entitiesToInclude, entitiesToDiscard, visibleOnly, collidableOnly);
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME - we should remove this API and encourage all users to use findRayIntersection() instead. We've changed
|
// FIXME - we should remove this API and encourage all users to use findRayIntersection() instead. We've changed
|
||||||
|
@ -629,17 +643,18 @@ RayToEntityIntersectionResult EntityScriptingInterface::findRayIntersectionBlock
|
||||||
}
|
}
|
||||||
|
|
||||||
RayToEntityIntersectionResult EntityScriptingInterface::findRayIntersectionWorker(const PickRay& ray,
|
RayToEntityIntersectionResult EntityScriptingInterface::findRayIntersectionWorker(const PickRay& ray,
|
||||||
Octree::lockType lockType,
|
Octree::lockType lockType, bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude,
|
||||||
bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude, const QVector<EntityItemID>& entityIdsToDiscard) {
|
const QVector<EntityItemID>& entityIdsToDiscard, bool visibleOnly, bool collidableOnly) {
|
||||||
|
|
||||||
|
|
||||||
RayToEntityIntersectionResult result;
|
RayToEntityIntersectionResult result;
|
||||||
if (_entityTree) {
|
if (_entityTree) {
|
||||||
OctreeElementPointer element;
|
OctreeElementPointer element;
|
||||||
EntityItemPointer intersectedEntity = NULL;
|
EntityItemPointer intersectedEntity = NULL;
|
||||||
result.intersects = _entityTree->findRayIntersection(ray.origin, ray.direction, element, result.distance, result.face,
|
result.intersects = _entityTree->findRayIntersection(ray.origin, ray.direction,
|
||||||
result.surfaceNormal, entityIdsToInclude, entityIdsToDiscard, (void**)&intersectedEntity, lockType, &result.accurate,
|
entityIdsToInclude, entityIdsToDiscard, visibleOnly, collidableOnly, precisionPicking,
|
||||||
precisionPicking);
|
element, result.distance, result.face, result.surfaceNormal,
|
||||||
|
(void**)&intersectedEntity, lockType, &result.accurate);
|
||||||
if (result.intersects && intersectedEntity) {
|
if (result.intersects && intersectedEntity) {
|
||||||
result.entityID = intersectedEntity->getEntityItemID();
|
result.entityID = intersectedEntity->getEntityItemID();
|
||||||
result.properties = intersectedEntity->getProperties();
|
result.properties = intersectedEntity->getProperties();
|
||||||
|
|
|
@ -143,7 +143,9 @@ public slots:
|
||||||
/// If the scripting context has visible entities, this will determine a ray intersection, the results
|
/// If the scripting context has visible entities, this will determine a ray intersection, the results
|
||||||
/// may be inaccurate if the engine is unable to access the visible entities, in which case result.accurate
|
/// may be inaccurate if the engine is unable to access the visible entities, in which case result.accurate
|
||||||
/// will be false.
|
/// will be false.
|
||||||
Q_INVOKABLE RayToEntityIntersectionResult findRayIntersection(const PickRay& ray, bool precisionPicking = false, const QScriptValue& entityIdsToInclude = QScriptValue(), const QScriptValue& entityIdsToDiscard = QScriptValue());
|
Q_INVOKABLE RayToEntityIntersectionResult findRayIntersection(const PickRay& ray, bool precisionPicking = false,
|
||||||
|
const QScriptValue& entityIdsToInclude = QScriptValue(), const QScriptValue& entityIdsToDiscard = QScriptValue(),
|
||||||
|
bool visibleOnly = false, bool collidableOnly = false);
|
||||||
|
|
||||||
/// If the scripting context has visible entities, this will determine a ray intersection, and will block in
|
/// If the scripting context has visible entities, this will determine a ray intersection, and will block in
|
||||||
/// order to return an accurate result
|
/// order to return an accurate result
|
||||||
|
@ -257,7 +259,8 @@ private:
|
||||||
|
|
||||||
/// actually does the work of finding the ray intersection, can be called in locking mode or tryLock mode
|
/// actually does the work of finding the ray intersection, can be called in locking mode or tryLock mode
|
||||||
RayToEntityIntersectionResult findRayIntersectionWorker(const PickRay& ray, Octree::lockType lockType,
|
RayToEntityIntersectionResult findRayIntersectionWorker(const PickRay& ray, Octree::lockType lockType,
|
||||||
bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude, const QVector<EntityItemID>& entityIdsToDiscard);
|
bool precisionPicking, const QVector<EntityItemID>& entityIdsToInclude, const QVector<EntityItemID>& entityIdsToDiscard,
|
||||||
|
bool visibleOnly = false, bool collidableOnly = false);
|
||||||
|
|
||||||
EntityTreePointer _entityTree;
|
EntityTreePointer _entityTree;
|
||||||
|
|
||||||
|
|
|
@ -29,6 +29,28 @@ static const quint64 DELETED_ENTITIES_EXTRA_USECS_TO_CONSIDER = USECS_PER_MSEC *
|
||||||
const float EntityTree::DEFAULT_MAX_TMP_ENTITY_LIFETIME = 60 * 60; // 1 hour
|
const float EntityTree::DEFAULT_MAX_TMP_ENTITY_LIFETIME = 60 * 60; // 1 hour
|
||||||
|
|
||||||
|
|
||||||
|
// combines the ray cast arguments into a single object
|
||||||
|
class RayArgs {
|
||||||
|
public:
|
||||||
|
// Inputs
|
||||||
|
glm::vec3 origin;
|
||||||
|
glm::vec3 direction;
|
||||||
|
const QVector<EntityItemID>& entityIdsToInclude;
|
||||||
|
const QVector<EntityItemID>& entityIdsToDiscard;
|
||||||
|
bool visibleOnly;
|
||||||
|
bool collidableOnly;
|
||||||
|
bool precisionPicking;
|
||||||
|
|
||||||
|
// Outputs
|
||||||
|
OctreeElementPointer& element;
|
||||||
|
float& distance;
|
||||||
|
BoxFace& face;
|
||||||
|
glm::vec3& surfaceNormal;
|
||||||
|
void** intersectedObject;
|
||||||
|
bool found;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
EntityTree::EntityTree(bool shouldReaverage) :
|
EntityTree::EntityTree(bool shouldReaverage) :
|
||||||
Octree(shouldReaverage),
|
Octree(shouldReaverage),
|
||||||
_fbxService(NULL),
|
_fbxService(NULL),
|
||||||
|
@ -538,40 +560,28 @@ bool EntityTree::findNearPointOperation(OctreeElementPointer element, void* extr
|
||||||
// if this element doesn't contain the point, then none of its children can contain the point, so stop searching
|
// if this element doesn't contain the point, then none of its children can contain the point, so stop searching
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// combines the ray cast arguments into a single object
|
|
||||||
class RayArgs {
|
|
||||||
public:
|
|
||||||
glm::vec3 origin;
|
|
||||||
glm::vec3 direction;
|
|
||||||
OctreeElementPointer& element;
|
|
||||||
float& distance;
|
|
||||||
BoxFace& face;
|
|
||||||
glm::vec3& surfaceNormal;
|
|
||||||
const QVector<EntityItemID>& entityIdsToInclude;
|
|
||||||
const QVector<EntityItemID>& entityIdsToDiscard;
|
|
||||||
void** intersectedObject;
|
|
||||||
bool found;
|
|
||||||
bool precisionPicking;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
bool findRayIntersectionOp(OctreeElementPointer element, void* extraData) {
|
bool findRayIntersectionOp(OctreeElementPointer element, void* extraData) {
|
||||||
RayArgs* args = static_cast<RayArgs*>(extraData);
|
RayArgs* args = static_cast<RayArgs*>(extraData);
|
||||||
bool keepSearching = true;
|
bool keepSearching = true;
|
||||||
EntityTreeElementPointer entityTreeElementPointer = std::dynamic_pointer_cast<EntityTreeElement>(element);
|
EntityTreeElementPointer entityTreeElementPointer = std::dynamic_pointer_cast<EntityTreeElement>(element);
|
||||||
if (entityTreeElementPointer ->findRayIntersection(args->origin, args->direction, keepSearching,
|
if (entityTreeElementPointer->findRayIntersection(args->origin, args->direction, keepSearching,
|
||||||
args->element, args->distance, args->face, args->surfaceNormal, args->entityIdsToInclude,
|
args->element, args->distance, args->face, args->surfaceNormal, args->entityIdsToInclude,
|
||||||
args->entityIdsToDiscard, args->intersectedObject, args->precisionPicking)) {
|
args->entityIdsToDiscard, args->visibleOnly, args->collidableOnly, args->intersectedObject, args->precisionPicking)) {
|
||||||
args->found = true;
|
args->found = true;
|
||||||
}
|
}
|
||||||
return keepSearching;
|
return keepSearching;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool EntityTree::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
bool EntityTree::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||||
OctreeElementPointer& element, float& distance,
|
QVector<EntityItemID> entityIdsToInclude, QVector<EntityItemID> entityIdsToDiscard,
|
||||||
BoxFace& face, glm::vec3& surfaceNormal, const QVector<EntityItemID>& entityIdsToInclude, const QVector<EntityItemID>& entityIdsToDiscard, void** intersectedObject,
|
bool visibleOnly, bool collidableOnly, bool precisionPicking,
|
||||||
Octree::lockType lockType, bool* accurateResult, bool precisionPicking) {
|
OctreeElementPointer& element, float& distance,
|
||||||
RayArgs args = { origin, direction, element, distance, face, surfaceNormal, entityIdsToInclude, entityIdsToDiscard, intersectedObject, false, precisionPicking };
|
BoxFace& face, glm::vec3& surfaceNormal, void** intersectedObject,
|
||||||
|
Octree::lockType lockType, bool* accurateResult) {
|
||||||
|
RayArgs args = { origin, direction, entityIdsToInclude, entityIdsToDiscard,
|
||||||
|
visibleOnly, collidableOnly, precisionPicking,
|
||||||
|
element, distance, face, surfaceNormal, intersectedObject, false };
|
||||||
distance = FLT_MAX;
|
distance = FLT_MAX;
|
||||||
|
|
||||||
bool requireLock = lockType == Octree::Lock;
|
bool requireLock = lockType == Octree::Lock;
|
||||||
|
|
|
@ -31,6 +31,7 @@ using ModelWeakPointer = std::weak_ptr<Model>;
|
||||||
|
|
||||||
class EntitySimulation;
|
class EntitySimulation;
|
||||||
|
|
||||||
|
|
||||||
class NewlyCreatedEntityHook {
|
class NewlyCreatedEntityHook {
|
||||||
public:
|
public:
|
||||||
virtual void entityCreated(const EntityItem& newEntity, const SharedNodePointer& senderNode) = 0;
|
virtual void entityCreated(const EntityItem& newEntity, const SharedNodePointer& senderNode) = 0;
|
||||||
|
@ -89,13 +90,11 @@ public:
|
||||||
const SharedNodePointer& senderNode) override;
|
const SharedNodePointer& senderNode) override;
|
||||||
|
|
||||||
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||||
OctreeElementPointer& node, float& distance, BoxFace& face, glm::vec3& surfaceNormal,
|
QVector<EntityItemID> entityIdsToInclude, QVector<EntityItemID> entityIdsToDiscard,
|
||||||
const QVector<EntityItemID>& entityIdsToInclude = QVector<EntityItemID>(),
|
bool visibleOnly, bool collidableOnly, bool precisionPicking,
|
||||||
const QVector<EntityItemID>& entityIdsToDiscard = QVector<EntityItemID>(),
|
OctreeElementPointer& node, float& distance,
|
||||||
void** intersectedObject = NULL,
|
BoxFace& face, glm::vec3& surfaceNormal, void** intersectedObject = NULL,
|
||||||
Octree::lockType lockType = Octree::TryLock,
|
Octree::lockType lockType = Octree::TryLock, bool* accurateResult = NULL);
|
||||||
bool* accurateResult = NULL,
|
|
||||||
bool precisionPicking = false);
|
|
||||||
|
|
||||||
virtual bool rootElementHasData() const override { return true; }
|
virtual bool rootElementHasData() const override { return true; }
|
||||||
|
|
||||||
|
|
|
@ -534,7 +534,8 @@ bool EntityTreeElement::bestFitBounds(const glm::vec3& minPoint, const glm::vec3
|
||||||
bool EntityTreeElement::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
bool EntityTreeElement::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||||
bool& keepSearching, OctreeElementPointer& element, float& distance,
|
bool& keepSearching, OctreeElementPointer& element, float& distance,
|
||||||
BoxFace& face, glm::vec3& surfaceNormal, const QVector<EntityItemID>& entityIdsToInclude,
|
BoxFace& face, glm::vec3& surfaceNormal, const QVector<EntityItemID>& entityIdsToInclude,
|
||||||
const QVector<EntityItemID>& entityIdsToDiscard, void** intersectedObject, bool precisionPicking) {
|
const QVector<EntityItemID>& entityIdsToDiscard, bool visibleOnly, bool collidableOnly,
|
||||||
|
void** intersectedObject, bool precisionPicking) {
|
||||||
|
|
||||||
keepSearching = true; // assume that we will continue searching after this.
|
keepSearching = true; // assume that we will continue searching after this.
|
||||||
|
|
||||||
|
@ -559,7 +560,8 @@ bool EntityTreeElement::findRayIntersection(const glm::vec3& origin, const glm::
|
||||||
if (_cube.contains(origin) || distanceToElementCube < distance) {
|
if (_cube.contains(origin) || distanceToElementCube < distance) {
|
||||||
|
|
||||||
if (findDetailedRayIntersection(origin, direction, keepSearching, element, distanceToElementDetails,
|
if (findDetailedRayIntersection(origin, direction, keepSearching, element, distanceToElementDetails,
|
||||||
face, localSurfaceNormal, entityIdsToInclude, entityIdsToDiscard, intersectedObject, precisionPicking, distanceToElementCube)) {
|
face, localSurfaceNormal, entityIdsToInclude, entityIdsToDiscard, visibleOnly, collidableOnly,
|
||||||
|
intersectedObject, precisionPicking, distanceToElementCube)) {
|
||||||
|
|
||||||
if (distanceToElementDetails < distance) {
|
if (distanceToElementDetails < distance) {
|
||||||
distance = distanceToElementDetails;
|
distance = distanceToElementDetails;
|
||||||
|
@ -574,13 +576,16 @@ bool EntityTreeElement::findRayIntersection(const glm::vec3& origin, const glm::
|
||||||
|
|
||||||
bool EntityTreeElement::findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction, bool& keepSearching,
|
bool EntityTreeElement::findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction, bool& keepSearching,
|
||||||
OctreeElementPointer& element, float& distance, BoxFace& face, glm::vec3& surfaceNormal,
|
OctreeElementPointer& element, float& distance, BoxFace& face, glm::vec3& surfaceNormal,
|
||||||
const QVector<EntityItemID>& entityIdsToInclude, const QVector<EntityItemID>& entityIDsToDiscard, void** intersectedObject, bool precisionPicking, float distanceToElementCube) {
|
const QVector<EntityItemID>& entityIdsToInclude, const QVector<EntityItemID>& entityIDsToDiscard,
|
||||||
|
bool visibleOnly, bool collidableOnly, void** intersectedObject, bool precisionPicking, float distanceToElementCube) {
|
||||||
|
|
||||||
// only called if we do intersect our bounding cube, but find if we actually intersect with entities...
|
// only called if we do intersect our bounding cube, but find if we actually intersect with entities...
|
||||||
int entityNumber = 0;
|
int entityNumber = 0;
|
||||||
bool somethingIntersected = false;
|
bool somethingIntersected = false;
|
||||||
forEachEntity([&](EntityItemPointer entity) {
|
forEachEntity([&](EntityItemPointer entity) {
|
||||||
if ( (entityIdsToInclude.size() > 0 && !entityIdsToInclude.contains(entity->getID())) || (entityIDsToDiscard.size() > 0 && entityIDsToDiscard.contains(entity->getID())) ) {
|
if ( (visibleOnly && !entity->isVisible()) || (collidableOnly && (entity->getCollisionless() || entity->getShapeType() == SHAPE_TYPE_NONE))
|
||||||
|
|| (entityIdsToInclude.size() > 0 && !entityIdsToInclude.contains(entity->getID()))
|
||||||
|
|| (entityIDsToDiscard.size() > 0 && entityIDsToDiscard.contains(entity->getID())) ) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -638,7 +643,7 @@ bool EntityTreeElement::findDetailedRayIntersection(const glm::vec3& origin, con
|
||||||
if (localDistance < distance && entity->getType() != EntityTypes::ParticleEffect) {
|
if (localDistance < distance && entity->getType() != EntityTypes::ParticleEffect) {
|
||||||
distance = localDistance;
|
distance = localDistance;
|
||||||
face = localFace;
|
face = localFace;
|
||||||
surfaceNormal = localSurfaceNormal;
|
surfaceNormal = glm::vec3(rotation * glm::vec4(localSurfaceNormal, 1.0f));
|
||||||
*intersectedObject = (void*)entity.get();
|
*intersectedObject = (void*)entity.get();
|
||||||
somethingIntersected = true;
|
somethingIntersected = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,12 +147,12 @@ public:
|
||||||
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||||
bool& keepSearching, OctreeElementPointer& node, float& distance,
|
bool& keepSearching, OctreeElementPointer& node, float& distance,
|
||||||
BoxFace& face, glm::vec3& surfaceNormal, const QVector<EntityItemID>& entityIdsToInclude,
|
BoxFace& face, glm::vec3& surfaceNormal, const QVector<EntityItemID>& entityIdsToInclude,
|
||||||
const QVector<EntityItemID>& entityIdsToDiscard,
|
const QVector<EntityItemID>& entityIdsToDiscard, bool visibleOnly = false, bool collidableOnly = false,
|
||||||
void** intersectedObject = NULL, bool precisionPicking = false);
|
void** intersectedObject = NULL, bool precisionPicking = false);
|
||||||
virtual bool findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
virtual bool findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||||
bool& keepSearching, OctreeElementPointer& element, float& distance,
|
bool& keepSearching, OctreeElementPointer& element, float& distance,
|
||||||
BoxFace& face, glm::vec3& surfaceNormal, const QVector<EntityItemID>& entityIdsToInclude,
|
BoxFace& face, glm::vec3& surfaceNormal, const QVector<EntityItemID>& entityIdsToInclude,
|
||||||
const QVector<EntityItemID>& entityIdsToDiscard,
|
const QVector<EntityItemID>& entityIdsToDiscard, bool visibleOnly, bool collidableOnly,
|
||||||
void** intersectedObject, bool precisionPicking, float distanceToElementCube);
|
void** intersectedObject, bool precisionPicking, float distanceToElementCube);
|
||||||
virtual bool findSpherePenetration(const glm::vec3& center, float radius,
|
virtual bool findSpherePenetration(const glm::vec3& center, float radius,
|
||||||
glm::vec3& penetration, void** penetratedObject) const override;
|
glm::vec3& penetration, void** penetratedObject) const override;
|
||||||
|
|
|
@ -6,5 +6,4 @@ target_opengl()
|
||||||
|
|
||||||
if (NOT ANDROID)
|
if (NOT ANDROID)
|
||||||
target_glew()
|
target_glew()
|
||||||
target_oglplus()
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
189
libraries/gl/src/gl/GLShaders.cpp
Normal file
189
libraries/gl/src/gl/GLShaders.cpp
Normal file
|
@ -0,0 +1,189 @@
|
||||||
|
#include "GLShaders.h"
|
||||||
|
|
||||||
|
#include "GLLogging.h"
|
||||||
|
|
||||||
|
namespace gl {
|
||||||
|
|
||||||
|
|
||||||
|
#ifdef SEPARATE_PROGRAM
|
||||||
|
bool compileShader(GLenum shaderDomain, const std::string& shaderSource, const std::string& defines, GLuint &shaderObject, GLuint &programObject) {
|
||||||
|
#else
|
||||||
|
bool compileShader(GLenum shaderDomain, const std::string& shaderSource, const std::string& defines, GLuint &shaderObject) {
|
||||||
|
#endif
|
||||||
|
if (shaderSource.empty()) {
|
||||||
|
qCDebug(glLogging) << "GLShader::compileShader - no GLSL shader source code ? so failed to create";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the shader object
|
||||||
|
GLuint glshader = glCreateShader(shaderDomain);
|
||||||
|
if (!glshader) {
|
||||||
|
qCDebug(glLogging) << "GLShader::compileShader - failed to create the gl shader object";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign the source
|
||||||
|
const int NUM_SOURCE_STRINGS = 2;
|
||||||
|
const GLchar* srcstr[] = { defines.c_str(), shaderSource.c_str() };
|
||||||
|
glShaderSource(glshader, NUM_SOURCE_STRINGS, srcstr, NULL);
|
||||||
|
|
||||||
|
// Compile !
|
||||||
|
glCompileShader(glshader);
|
||||||
|
|
||||||
|
// check if shader compiled
|
||||||
|
GLint compiled = 0;
|
||||||
|
glGetShaderiv(glshader, GL_COMPILE_STATUS, &compiled);
|
||||||
|
|
||||||
|
// if compilation fails
|
||||||
|
if (!compiled) {
|
||||||
|
|
||||||
|
// save the source code to a temp file so we can debug easily
|
||||||
|
/*
|
||||||
|
std::ofstream filestream;
|
||||||
|
filestream.open("debugshader.glsl");
|
||||||
|
if (filestream.is_open()) {
|
||||||
|
filestream << srcstr[0];
|
||||||
|
filestream << srcstr[1];
|
||||||
|
filestream.close();
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
GLint infoLength = 0;
|
||||||
|
glGetShaderiv(glshader, GL_INFO_LOG_LENGTH, &infoLength);
|
||||||
|
|
||||||
|
char* temp = new char[infoLength];
|
||||||
|
glGetShaderInfoLog(glshader, infoLength, NULL, temp);
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
filestream.open("debugshader.glsl.info.txt");
|
||||||
|
if (filestream.is_open()) {
|
||||||
|
filestream << std::string(temp);
|
||||||
|
filestream.close();
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
qCWarning(glLogging) << "GLShader::compileShader - failed to compile the gl shader object:";
|
||||||
|
for (auto s : srcstr) {
|
||||||
|
qCWarning(glLogging) << s;
|
||||||
|
}
|
||||||
|
qCWarning(glLogging) << "GLShader::compileShader - errors:";
|
||||||
|
qCWarning(glLogging) << temp;
|
||||||
|
delete[] temp;
|
||||||
|
|
||||||
|
glDeleteShader(glshader);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef SEPARATE_PROGRAM
|
||||||
|
GLuint glprogram = 0;
|
||||||
|
// so far so good, program is almost done, need to link:
|
||||||
|
GLuint glprogram = glCreateProgram();
|
||||||
|
if (!glprogram) {
|
||||||
|
qCDebug(glLogging) << "GLShader::compileShader - failed to create the gl shader & gl program object";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
glProgramParameteri(glprogram, GL_PROGRAM_SEPARABLE, GL_TRUE);
|
||||||
|
glAttachShader(glprogram, glshader);
|
||||||
|
glLinkProgram(glprogram);
|
||||||
|
|
||||||
|
GLint linked = 0;
|
||||||
|
glGetProgramiv(glprogram, GL_LINK_STATUS, &linked);
|
||||||
|
|
||||||
|
if (!linked) {
|
||||||
|
/*
|
||||||
|
// save the source code to a temp file so we can debug easily
|
||||||
|
std::ofstream filestream;
|
||||||
|
filestream.open("debugshader.glsl");
|
||||||
|
if (filestream.is_open()) {
|
||||||
|
filestream << shaderSource->source;
|
||||||
|
filestream.close();
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
GLint infoLength = 0;
|
||||||
|
glGetProgramiv(glprogram, GL_INFO_LOG_LENGTH, &infoLength);
|
||||||
|
|
||||||
|
char* temp = new char[infoLength];
|
||||||
|
glGetProgramInfoLog(glprogram, infoLength, NULL, temp);
|
||||||
|
|
||||||
|
qCDebug(glLogging) << "GLShader::compileShader - failed to LINK the gl program object :";
|
||||||
|
qCDebug(glLogging) << temp;
|
||||||
|
|
||||||
|
/*
|
||||||
|
filestream.open("debugshader.glsl.info.txt");
|
||||||
|
if (filestream.is_open()) {
|
||||||
|
filestream << String(temp);
|
||||||
|
filestream.close();
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
delete[] temp;
|
||||||
|
|
||||||
|
glDeleteShader(glshader);
|
||||||
|
glDeleteProgram(glprogram);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
programObject = glprogram;
|
||||||
|
#endif
|
||||||
|
shaderObject = glshader;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
GLuint compileProgram(const std::vector<GLuint>& glshaders) {
|
||||||
|
// A brand new program:
|
||||||
|
GLuint glprogram = glCreateProgram();
|
||||||
|
if (!glprogram) {
|
||||||
|
qCDebug(glLogging) << "GLShader::compileProgram - failed to create the gl program object";
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// glProgramParameteri(glprogram, GL_PROGRAM_, GL_TRUE);
|
||||||
|
// Create the program from the sub shaders
|
||||||
|
for (auto so : glshaders) {
|
||||||
|
glAttachShader(glprogram, so);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Link!
|
||||||
|
glLinkProgram(glprogram);
|
||||||
|
|
||||||
|
GLint linked = 0;
|
||||||
|
glGetProgramiv(glprogram, GL_LINK_STATUS, &linked);
|
||||||
|
|
||||||
|
if (!linked) {
|
||||||
|
/*
|
||||||
|
// save the source code to a temp file so we can debug easily
|
||||||
|
std::ofstream filestream;
|
||||||
|
filestream.open("debugshader.glsl");
|
||||||
|
if (filestream.is_open()) {
|
||||||
|
filestream << shaderSource->source;
|
||||||
|
filestream.close();
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
GLint infoLength = 0;
|
||||||
|
glGetProgramiv(glprogram, GL_INFO_LOG_LENGTH, &infoLength);
|
||||||
|
|
||||||
|
char* temp = new char[infoLength];
|
||||||
|
glGetProgramInfoLog(glprogram, infoLength, NULL, temp);
|
||||||
|
|
||||||
|
qCDebug(glLogging) << "GLShader::compileProgram - failed to LINK the gl program object :";
|
||||||
|
qCDebug(glLogging) << temp;
|
||||||
|
|
||||||
|
/*
|
||||||
|
filestream.open("debugshader.glsl.info.txt");
|
||||||
|
if (filestream.is_open()) {
|
||||||
|
filestream << std::string(temp);
|
||||||
|
filestream.close();
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
delete[] temp;
|
||||||
|
|
||||||
|
glDeleteProgram(glprogram);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return glprogram;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
29
libraries/gl/src/gl/GLShaders.h
Normal file
29
libraries/gl/src/gl/GLShaders.h
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
//
|
||||||
|
// Created by Bradley Austin Davis 2016/09/27
|
||||||
|
// Copyright 2014 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
#ifndef hifi_GLShaders_h
|
||||||
|
#define hifi_GLShaders_h
|
||||||
|
|
||||||
|
#include "Config.h"
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
namespace gl {
|
||||||
|
#ifdef SEPARATE_PROGRAM
|
||||||
|
bool compileShader(GLenum shaderDomain, const std::string& shaderSource, const std::string& defines, GLuint &shaderObject, GLuint &programObject);
|
||||||
|
#else
|
||||||
|
bool compileShader(GLenum shaderDomain, const std::string& shaderSource, const std::string& defines, GLuint &shaderObject);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
GLuint compileProgram(const std::vector<GLuint>& glshaders);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
|
@ -32,24 +32,25 @@
|
||||||
#include <AbstractUriHandler.h>
|
#include <AbstractUriHandler.h>
|
||||||
#include <AccountManager.h>
|
#include <AccountManager.h>
|
||||||
#include <NetworkAccessManager.h>
|
#include <NetworkAccessManager.h>
|
||||||
|
#include <GLMHelpers.h>
|
||||||
|
|
||||||
#include "OffscreenGLCanvas.h"
|
#include "OffscreenGLCanvas.h"
|
||||||
#include "GLEscrow.h"
|
|
||||||
#include "GLHelpers.h"
|
#include "GLHelpers.h"
|
||||||
#include "GLLogging.h"
|
#include "GLLogging.h"
|
||||||
|
#include "TextureRecycler.h"
|
||||||
|
#include "Context.h"
|
||||||
|
|
||||||
QString fixupHifiUrl(const QString& urlString) {
|
QString fixupHifiUrl(const QString& urlString) {
|
||||||
static const QString ACCESS_TOKEN_PARAMETER = "access_token";
|
static const QString ACCESS_TOKEN_PARAMETER = "access_token";
|
||||||
static const QString ALLOWED_HOST = "metaverse.highfidelity.com";
|
static const QString ALLOWED_HOST = "metaverse.highfidelity.com";
|
||||||
QUrl url(urlString);
|
QUrl url(urlString);
|
||||||
QUrlQuery query(url);
|
QUrlQuery query(url);
|
||||||
if (url.host() == ALLOWED_HOST && query.allQueryItemValues(ACCESS_TOKEN_PARAMETER).empty()) {
|
if (url.host() == ALLOWED_HOST && query.allQueryItemValues(ACCESS_TOKEN_PARAMETER).empty()) {
|
||||||
auto accountManager = DependencyManager::get<AccountManager>();
|
auto accountManager = DependencyManager::get<AccountManager>();
|
||||||
query.addQueryItem(ACCESS_TOKEN_PARAMETER, accountManager->getAccountInfo().getAccessToken().token);
|
query.addQueryItem(ACCESS_TOKEN_PARAMETER, accountManager->getAccountInfo().getAccessToken().token);
|
||||||
url.setQuery(query.query());
|
url.setQuery(query.query());
|
||||||
return url.toString();
|
return url.toString();
|
||||||
}
|
}
|
||||||
return urlString;
|
return urlString;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -115,250 +116,8 @@ QNetworkAccessManager* QmlNetworkAccessManagerFactory::create(QObject* parent) {
|
||||||
Q_DECLARE_LOGGING_CATEGORY(offscreenFocus)
|
Q_DECLARE_LOGGING_CATEGORY(offscreenFocus)
|
||||||
Q_LOGGING_CATEGORY(offscreenFocus, "hifi.offscreen.focus")
|
Q_LOGGING_CATEGORY(offscreenFocus, "hifi.offscreen.focus")
|
||||||
|
|
||||||
static const QEvent::Type INIT = QEvent::Type(QEvent::User + 1);
|
void OffscreenQmlSurface::setupFbo() {
|
||||||
static const QEvent::Type RENDER = QEvent::Type(QEvent::User + 2);
|
_canvas->makeCurrent();
|
||||||
static const QEvent::Type RESIZE = QEvent::Type(QEvent::User + 3);
|
|
||||||
static const QEvent::Type STOP = QEvent::Type(QEvent::User + 4);
|
|
||||||
|
|
||||||
class RawTextureRecycler {
|
|
||||||
public:
|
|
||||||
using TexturePtr = GLuint;
|
|
||||||
RawTextureRecycler(bool useMipmaps) : _useMipmaps(useMipmaps) {}
|
|
||||||
void setSize(const uvec2& size);
|
|
||||||
void clear();
|
|
||||||
TexturePtr getNextTexture();
|
|
||||||
void recycleTexture(GLuint texture);
|
|
||||||
|
|
||||||
private:
|
|
||||||
|
|
||||||
struct TexInfo {
|
|
||||||
TexturePtr _tex { 0 };
|
|
||||||
uvec2 _size;
|
|
||||||
bool _active { false };
|
|
||||||
|
|
||||||
TexInfo() {}
|
|
||||||
TexInfo(TexturePtr tex, const uvec2& size) : _tex(tex), _size(size) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
using Map = std::map<GLuint, TexInfo>;
|
|
||||||
using Queue = std::queue<TexturePtr>;
|
|
||||||
|
|
||||||
Map _allTextures;
|
|
||||||
Queue _readyTextures;
|
|
||||||
uvec2 _size { 1920, 1080 };
|
|
||||||
bool _useMipmaps;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
void RawTextureRecycler::setSize(const uvec2& size) {
|
|
||||||
if (size == _size) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
_size = size;
|
|
||||||
while (!_readyTextures.empty()) {
|
|
||||||
_readyTextures.pop();
|
|
||||||
}
|
|
||||||
std::set<Map::key_type> toDelete;
|
|
||||||
std::for_each(_allTextures.begin(), _allTextures.end(), [&](Map::const_reference item) {
|
|
||||||
if (!item.second._active && item.second._size != _size) {
|
|
||||||
toDelete.insert(item.first);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
std::for_each(toDelete.begin(), toDelete.end(), [&](Map::key_type key) {
|
|
||||||
_allTextures.erase(key);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
void RawTextureRecycler::clear() {
|
|
||||||
while (!_readyTextures.empty()) {
|
|
||||||
_readyTextures.pop();
|
|
||||||
}
|
|
||||||
_allTextures.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
RawTextureRecycler::TexturePtr RawTextureRecycler::getNextTexture() {
|
|
||||||
if (_readyTextures.empty()) {
|
|
||||||
TexturePtr newTexture;
|
|
||||||
glGenTextures(1, &newTexture);
|
|
||||||
|
|
||||||
glBindTexture(GL_TEXTURE_2D, newTexture);
|
|
||||||
if (_useMipmaps) {
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
|
|
||||||
} else {
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
|
||||||
}
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
|
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
|
|
||||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, 8.0f);
|
|
||||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -0.2f);
|
|
||||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, 8.0f);
|
|
||||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, _size.x, _size.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
|
|
||||||
_allTextures[newTexture] = TexInfo { newTexture, _size };
|
|
||||||
_readyTextures.push(newTexture);
|
|
||||||
}
|
|
||||||
|
|
||||||
TexturePtr result = _readyTextures.front();
|
|
||||||
_readyTextures.pop();
|
|
||||||
auto& item = _allTextures[result];
|
|
||||||
item._active = true;
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
void RawTextureRecycler::recycleTexture(GLuint texture) {
|
|
||||||
Q_ASSERT(_allTextures.count(texture));
|
|
||||||
auto& item = _allTextures[texture];
|
|
||||||
Q_ASSERT(item._active);
|
|
||||||
item._active = false;
|
|
||||||
if (item._size != _size) {
|
|
||||||
// Buh-bye
|
|
||||||
_allTextures.erase(texture);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
_readyTextures.push(item._tex);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class OffscreenQmlRenderThread : public QThread {
|
|
||||||
public:
|
|
||||||
OffscreenQmlRenderThread(OffscreenQmlSurface* surface, QOpenGLContext* shareContext);
|
|
||||||
virtual ~OffscreenQmlRenderThread() = default;
|
|
||||||
|
|
||||||
virtual void run() override;
|
|
||||||
virtual bool event(QEvent *e) override;
|
|
||||||
|
|
||||||
protected:
|
|
||||||
class Queue : private QQueue<QEvent*> {
|
|
||||||
public:
|
|
||||||
void add(QEvent::Type type);
|
|
||||||
QEvent* take();
|
|
||||||
|
|
||||||
private:
|
|
||||||
QMutex _mutex;
|
|
||||||
QWaitCondition _waitCondition;
|
|
||||||
bool _isWaiting{ false };
|
|
||||||
};
|
|
||||||
|
|
||||||
friend class OffscreenQmlSurface;
|
|
||||||
|
|
||||||
QJsonObject getGLContextData();
|
|
||||||
|
|
||||||
Queue _queue;
|
|
||||||
QMutex _mutex;
|
|
||||||
QWaitCondition _waitCondition;
|
|
||||||
std::atomic<bool> _rendering { false };
|
|
||||||
|
|
||||||
QJsonObject _glData;
|
|
||||||
QMutex _glMutex;
|
|
||||||
QWaitCondition _glWait;
|
|
||||||
|
|
||||||
private:
|
|
||||||
// Event-driven methods
|
|
||||||
void init();
|
|
||||||
void render();
|
|
||||||
void resize();
|
|
||||||
void cleanup();
|
|
||||||
|
|
||||||
// Helper methods
|
|
||||||
void setupFbo();
|
|
||||||
bool allowNewFrame(uint8_t fps);
|
|
||||||
|
|
||||||
// Rendering members
|
|
||||||
OffscreenGLCanvas _canvas;
|
|
||||||
OffscreenQmlSurface* _surface{ nullptr };
|
|
||||||
QQuickWindow* _quickWindow{ nullptr };
|
|
||||||
QMyQuickRenderControl* _renderControl{ nullptr };
|
|
||||||
GLuint _fbo { 0 };
|
|
||||||
GLuint _depthStencil { 0 };
|
|
||||||
RawTextureRecycler _textures { true };
|
|
||||||
GLTextureEscrow _escrow;
|
|
||||||
|
|
||||||
uint64_t _lastRenderTime{ 0 };
|
|
||||||
uvec2 _size{ 1920, 1080 };
|
|
||||||
QSize _newSize;
|
|
||||||
bool _quit{ false };
|
|
||||||
};
|
|
||||||
|
|
||||||
void OffscreenQmlRenderThread::Queue::add(QEvent::Type type) {
|
|
||||||
QMutexLocker locker(&_mutex);
|
|
||||||
enqueue(new QEvent(type));
|
|
||||||
if (_isWaiting) {
|
|
||||||
_waitCondition.wakeOne();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
QEvent* OffscreenQmlRenderThread::Queue::take() {
|
|
||||||
QMutexLocker locker(&_mutex);
|
|
||||||
while (isEmpty()) {
|
|
||||||
_isWaiting = true;
|
|
||||||
_waitCondition.wait(&_mutex);
|
|
||||||
_isWaiting = false;
|
|
||||||
}
|
|
||||||
QEvent* e = dequeue();
|
|
||||||
return e;
|
|
||||||
}
|
|
||||||
|
|
||||||
OffscreenQmlRenderThread::OffscreenQmlRenderThread(OffscreenQmlSurface* surface, QOpenGLContext* shareContext) : _surface(surface) {
|
|
||||||
_canvas.setObjectName("OffscreenQmlRenderCanvas");
|
|
||||||
qCDebug(glLogging) << "Building QML Renderer";
|
|
||||||
if (!_canvas.create(shareContext)) {
|
|
||||||
qWarning("Failed to create OffscreenGLCanvas");
|
|
||||||
_quit = true;
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
_renderControl = new QMyQuickRenderControl();
|
|
||||||
QQuickWindow::setDefaultAlphaBuffer(true);
|
|
||||||
// Create a QQuickWindow that is associated with our render control.
|
|
||||||
// This window never gets created or shown, meaning that it will never get an underlying native (platform) window.
|
|
||||||
// NOTE: Must be created on the main thread so that OffscreenQmlSurface can send it events
|
|
||||||
// NOTE: Must be created on the rendering thread or it will refuse to render,
|
|
||||||
// so we wait until after its ctor to move object/context to this thread.
|
|
||||||
_quickWindow = new QQuickWindow(_renderControl);
|
|
||||||
_quickWindow->setColor(QColor(255, 255, 255, 0));
|
|
||||||
_quickWindow->setFlags(_quickWindow->flags() | static_cast<Qt::WindowFlags>(Qt::WA_TranslucentBackground));
|
|
||||||
|
|
||||||
// We can prepare, but we must wait to start() the thread until after the ctor
|
|
||||||
_renderControl->prepareThread(this);
|
|
||||||
_canvas.getContextObject()->moveToThread(this);
|
|
||||||
moveToThread(this);
|
|
||||||
|
|
||||||
_queue.add(INIT);
|
|
||||||
}
|
|
||||||
|
|
||||||
void OffscreenQmlRenderThread::run() {
|
|
||||||
qCDebug(glLogging) << "Starting QML Renderer thread";
|
|
||||||
|
|
||||||
while (!_quit) {
|
|
||||||
QEvent* e = _queue.take();
|
|
||||||
event(e);
|
|
||||||
delete e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bool OffscreenQmlRenderThread::event(QEvent *e) {
|
|
||||||
switch (int(e->type())) {
|
|
||||||
case INIT:
|
|
||||||
init();
|
|
||||||
return true;
|
|
||||||
case RENDER:
|
|
||||||
render();
|
|
||||||
return true;
|
|
||||||
case RESIZE:
|
|
||||||
resize();
|
|
||||||
return true;
|
|
||||||
case STOP:
|
|
||||||
cleanup();
|
|
||||||
return true;
|
|
||||||
default:
|
|
||||||
return QObject::event(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void OffscreenQmlRenderThread::setupFbo() {
|
|
||||||
_textures.setSize(_size);
|
_textures.setSize(_size);
|
||||||
if (_depthStencil) {
|
if (_depthStencil) {
|
||||||
glDeleteRenderbuffers(1, &_depthStencil);
|
glDeleteRenderbuffers(1, &_depthStencil);
|
||||||
|
@ -376,44 +135,12 @@ void OffscreenQmlRenderThread::setupFbo() {
|
||||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
|
||||||
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthStencil);
|
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthStencil);
|
||||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||||
|
_canvas->doneCurrent();
|
||||||
}
|
}
|
||||||
|
|
||||||
QJsonObject OffscreenQmlRenderThread::getGLContextData() {
|
void OffscreenQmlSurface::cleanup() {
|
||||||
_glMutex.lock();
|
_canvas->makeCurrent();
|
||||||
if (_glData.isEmpty()) {
|
|
||||||
_glWait.wait(&_glMutex);
|
|
||||||
}
|
|
||||||
_glMutex.unlock();
|
|
||||||
return _glData;
|
|
||||||
}
|
|
||||||
|
|
||||||
void OffscreenQmlRenderThread::init() {
|
|
||||||
qCDebug(glLogging) << "Initializing QML Renderer";
|
|
||||||
|
|
||||||
if (!_canvas.makeCurrent()) {
|
|
||||||
qWarning("Failed to make context current on QML Renderer Thread");
|
|
||||||
_quit = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
_glMutex.lock();
|
|
||||||
_glData = ::getGLContextData();
|
|
||||||
_glMutex.unlock();
|
|
||||||
_glWait.wakeAll();
|
|
||||||
|
|
||||||
connect(_renderControl, &QQuickRenderControl::renderRequested, _surface, &OffscreenQmlSurface::requestRender);
|
|
||||||
connect(_renderControl, &QQuickRenderControl::sceneChanged, _surface, &OffscreenQmlSurface::requestUpdate);
|
|
||||||
|
|
||||||
_renderControl->initialize(_canvas.getContext());
|
|
||||||
setupFbo();
|
|
||||||
_escrow.setRecycler([this](GLuint texture){
|
|
||||||
_textures.recycleTexture(texture);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
void OffscreenQmlRenderThread::cleanup() {
|
|
||||||
_renderControl->invalidate();
|
_renderControl->invalidate();
|
||||||
|
|
||||||
if (_depthStencil) {
|
if (_depthStencil) {
|
||||||
glDeleteRenderbuffers(1, &_depthStencil);
|
glDeleteRenderbuffers(1, &_depthStencil);
|
||||||
_depthStencil = 0;
|
_depthStencil = 0;
|
||||||
|
@ -424,88 +151,97 @@ void OffscreenQmlRenderThread::cleanup() {
|
||||||
}
|
}
|
||||||
|
|
||||||
_textures.clear();
|
_textures.clear();
|
||||||
|
_canvas->doneCurrent();
|
||||||
_canvas.doneCurrent();
|
|
||||||
_canvas.getContextObject()->moveToThread(QCoreApplication::instance()->thread());
|
|
||||||
|
|
||||||
_quit = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffscreenQmlRenderThread::resize() {
|
void OffscreenQmlSurface::render() {
|
||||||
// Lock _newSize changes
|
if (_paused) {
|
||||||
{
|
|
||||||
QMutexLocker locker(&_mutex);
|
|
||||||
|
|
||||||
// Update our members
|
|
||||||
if (_quickWindow) {
|
|
||||||
_quickWindow->setGeometry(QRect(QPoint(), _newSize));
|
|
||||||
_quickWindow->contentItem()->setSize(_newSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Qt bug in 5.4 forces this check of pixel ratio,
|
|
||||||
// even though we're rendering offscreen.
|
|
||||||
qreal pixelRatio = 1.0;
|
|
||||||
if (_renderControl && _renderControl->_renderWindow) {
|
|
||||||
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
|
|
||||||
}
|
|
||||||
|
|
||||||
uvec2 newOffscreenSize = toGlm(_newSize * pixelRatio);
|
|
||||||
if (newOffscreenSize == _size) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
qCDebug(glLogging) << "Offscreen UI resizing to " << _newSize.width() << "x" << _newSize.height() << " with pixel ratio " << pixelRatio;
|
|
||||||
_size = newOffscreenSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
_textures.setSize(_size);
|
|
||||||
setupFbo();
|
|
||||||
}
|
|
||||||
|
|
||||||
void OffscreenQmlRenderThread::render() {
|
|
||||||
// Ensure we always release the main thread
|
|
||||||
Finally releaseMainThread([this] {
|
|
||||||
_waitCondition.wakeOne();
|
|
||||||
});
|
|
||||||
|
|
||||||
if (_surface->_paused) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
_rendering = true;
|
_canvas->makeCurrent();
|
||||||
Finally unmarkRenderingFlag([this] {
|
|
||||||
_rendering = false;
|
|
||||||
});
|
|
||||||
|
|
||||||
{
|
|
||||||
QMutexLocker locker(&_mutex);
|
|
||||||
_renderControl->sync();
|
|
||||||
releaseMainThread.trigger();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
_renderControl->sync();
|
||||||
_quickWindow->setRenderTarget(_fbo, QSize(_size.x, _size.y));
|
_quickWindow->setRenderTarget(_fbo, QSize(_size.x, _size.y));
|
||||||
|
|
||||||
try {
|
// Clear out any pending textures to be returned
|
||||||
GLuint texture = _textures.getNextTexture();
|
{
|
||||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
|
std::list<OffscreenQmlSurface::TextureAndFence> returnedTextures;
|
||||||
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, texture, 0);
|
{
|
||||||
PROFILE_RANGE("qml_render->rendercontrol")
|
std::unique_lock<std::mutex> lock(_textureMutex);
|
||||||
_renderControl->render();
|
returnedTextures.swap(_returnedTextures);
|
||||||
|
}
|
||||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
if (!returnedTextures.empty()) {
|
||||||
glBindTexture(GL_TEXTURE_2D, texture);
|
for (const auto& textureAndFence : returnedTextures) {
|
||||||
glGenerateMipmap(GL_TEXTURE_2D);
|
GLsync fence = static_cast<GLsync>(textureAndFence.second);
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
if (fence) {
|
||||||
|
glWaitSync(fence, 0, GL_TIMEOUT_IGNORED);
|
||||||
_quickWindow->resetOpenGLState();
|
glDeleteSync(fence);
|
||||||
_escrow.submit(texture);
|
}
|
||||||
_lastRenderTime = usecTimestampNow();
|
_textures.recycleTexture(textureAndFence.first);
|
||||||
} catch (std::runtime_error& error) {
|
}
|
||||||
qWarning() << "Failed to render QML: " << error.what();
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
GLuint texture = _textures.getNextTexture();
|
||||||
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _fbo);
|
||||||
|
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, texture, 0);
|
||||||
|
PROFILE_RANGE("qml_render->rendercontrol")
|
||||||
|
_renderControl->render();
|
||||||
|
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||||
|
glBindTexture(GL_TEXTURE_2D, texture);
|
||||||
|
glGenerateMipmap(GL_TEXTURE_2D);
|
||||||
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
|
|
||||||
|
{
|
||||||
|
std::unique_lock<std::mutex> lock(_textureMutex);
|
||||||
|
// If the most recent texture was unused, we can directly recycle it
|
||||||
|
if (_latestTextureAndFence.first) {
|
||||||
|
_textures.recycleTexture(_latestTextureAndFence.first);
|
||||||
|
glDeleteSync(static_cast<GLsync>(_latestTextureAndFence.second));
|
||||||
|
_latestTextureAndFence = { 0, 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
_latestTextureAndFence = { texture, glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0) };
|
||||||
|
// Fence will be used in another thread / context, so a flush is required
|
||||||
|
glFlush();
|
||||||
|
}
|
||||||
|
|
||||||
|
_quickWindow->resetOpenGLState();
|
||||||
|
_lastRenderTime = usecTimestampNow();
|
||||||
|
_canvas->doneCurrent();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool OffscreenQmlRenderThread::allowNewFrame(uint8_t fps) {
|
bool OffscreenQmlSurface::fetchTexture(TextureAndFence& textureAndFence) {
|
||||||
|
textureAndFence = { 0, 0 };
|
||||||
|
|
||||||
|
std::unique_lock<std::mutex> lock(_textureMutex);
|
||||||
|
if (0 == _latestTextureAndFence.first) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure writes to the latest texture are complete before before returning it for reading
|
||||||
|
textureAndFence = _latestTextureAndFence;
|
||||||
|
_latestTextureAndFence = { 0, 0 };
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void OffscreenQmlSurface::releaseTexture(const TextureAndFence& textureAndFence) {
|
||||||
|
std::unique_lock<std::mutex> lock(_textureMutex);
|
||||||
|
_returnedTextures.push_back(textureAndFence);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool OffscreenQmlSurface::allowNewFrame(uint8_t fps) {
|
||||||
|
// If we already have a pending texture, don't render another one
|
||||||
|
// i.e. don't render faster than the consumer context, since it wastes
|
||||||
|
// GPU cycles on producing output that will never be seen
|
||||||
|
{
|
||||||
|
std::unique_lock<std::mutex> lock(_textureMutex);
|
||||||
|
if (0 != _latestTextureAndFence.first) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
auto minRenderInterval = USECS_PER_SECOND / fps;
|
auto minRenderInterval = USECS_PER_SECOND / fps;
|
||||||
auto lastInterval = usecTimestampNow() - _lastRenderTime;
|
auto lastInterval = usecTimestampNow() - _lastRenderTime;
|
||||||
return (lastInterval > minRenderInterval);
|
return (lastInterval > minRenderInterval);
|
||||||
|
@ -519,33 +255,46 @@ OffscreenQmlSurface::~OffscreenQmlSurface() {
|
||||||
QObject::disconnect(&_updateTimer);
|
QObject::disconnect(&_updateTimer);
|
||||||
QObject::disconnect(qApp);
|
QObject::disconnect(qApp);
|
||||||
|
|
||||||
qCDebug(glLogging) << "Stopping QML Renderer Thread " << _renderer->currentThreadId();
|
|
||||||
_renderer->_queue.add(STOP);
|
|
||||||
if (!_renderer->wait(MAX_SHUTDOWN_WAIT_SECS * USECS_PER_SECOND)) {
|
|
||||||
qWarning() << "Failed to shut down the QML Renderer Thread";
|
|
||||||
}
|
|
||||||
|
|
||||||
delete _rootItem;
|
cleanup();
|
||||||
delete _renderer;
|
|
||||||
delete _qmlComponent;
|
_canvas->deleteLater();
|
||||||
delete _qmlEngine;
|
_rootItem->deleteLater();
|
||||||
|
_qmlComponent->deleteLater();
|
||||||
|
_qmlEngine->deleteLater();
|
||||||
|
_quickWindow->deleteLater();
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffscreenQmlSurface::onAboutToQuit() {
|
void OffscreenQmlSurface::onAboutToQuit() {
|
||||||
|
_paused = true;
|
||||||
QObject::disconnect(&_updateTimer);
|
QObject::disconnect(&_updateTimer);
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
|
void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
|
||||||
qCDebug(glLogging) << "Building QML surface";
|
qCDebug(glLogging) << "Building QML surface";
|
||||||
|
|
||||||
_renderer = new OffscreenQmlRenderThread(this, shareContext);
|
_renderControl = new QMyQuickRenderControl();
|
||||||
_renderer->moveToThread(_renderer);
|
|
||||||
_renderer->setObjectName("QML Renderer Thread");
|
|
||||||
_renderer->start();
|
|
||||||
|
|
||||||
_renderer->_renderControl->_renderWindow = _proxyWindow;
|
QQuickWindow::setDefaultAlphaBuffer(true);
|
||||||
|
|
||||||
connect(_renderer->_quickWindow, &QQuickWindow::focusObjectChanged, this, &OffscreenQmlSurface::onFocusObjectChanged);
|
// Create a QQuickWindow that is associated with our render control.
|
||||||
|
// This window never gets created or shown, meaning that it will never get an underlying native (platform) window.
|
||||||
|
// NOTE: Must be created on the main thread so that OffscreenQmlSurface can send it events
|
||||||
|
// NOTE: Must be created on the rendering thread or it will refuse to render,
|
||||||
|
// so we wait until after its ctor to move object/context to this thread.
|
||||||
|
_quickWindow = new QQuickWindow(_renderControl);
|
||||||
|
_quickWindow->setColor(QColor(255, 255, 255, 0));
|
||||||
|
_quickWindow->setFlags(_quickWindow->flags() | static_cast<Qt::WindowFlags>(Qt::WA_TranslucentBackground));
|
||||||
|
|
||||||
|
_renderControl->_renderWindow = _proxyWindow;
|
||||||
|
|
||||||
|
_canvas = new OffscreenGLCanvas();
|
||||||
|
if (!_canvas->create(shareContext)) {
|
||||||
|
qFatal("Failed to create OffscreenGLCanvas");
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
connect(_quickWindow, &QQuickWindow::focusObjectChanged, this, &OffscreenQmlSurface::onFocusObjectChanged);
|
||||||
|
|
||||||
// Create a QML engine.
|
// Create a QML engine.
|
||||||
_qmlEngine = new QQmlEngine;
|
_qmlEngine = new QQmlEngine;
|
||||||
|
@ -556,13 +305,26 @@ void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
|
||||||
importList.insert(importList.begin(), PathUtils::resourcesPath());
|
importList.insert(importList.begin(), PathUtils::resourcesPath());
|
||||||
_qmlEngine->setImportPathList(importList);
|
_qmlEngine->setImportPathList(importList);
|
||||||
if (!_qmlEngine->incubationController()) {
|
if (!_qmlEngine->incubationController()) {
|
||||||
_qmlEngine->setIncubationController(_renderer->_quickWindow->incubationController());
|
_qmlEngine->setIncubationController(_quickWindow->incubationController());
|
||||||
}
|
}
|
||||||
|
|
||||||
_qmlEngine->rootContext()->setContextProperty("GL", _renderer->getGLContextData());
|
// FIXME
|
||||||
|
_qmlEngine->rootContext()->setContextProperty("GL", _glData);
|
||||||
_qmlEngine->rootContext()->setContextProperty("offscreenWindow", QVariant::fromValue(getWindow()));
|
_qmlEngine->rootContext()->setContextProperty("offscreenWindow", QVariant::fromValue(getWindow()));
|
||||||
_qmlComponent = new QQmlComponent(_qmlEngine);
|
_qmlComponent = new QQmlComponent(_qmlEngine);
|
||||||
|
|
||||||
|
|
||||||
|
connect(_renderControl, &QQuickRenderControl::renderRequested, [this] { _render = true; });
|
||||||
|
connect(_renderControl, &QQuickRenderControl::sceneChanged, [this] { _render = _polish = true; });
|
||||||
|
|
||||||
|
if (!_canvas->makeCurrent()) {
|
||||||
|
qWarning("Failed to make context current for QML Renderer");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
_glData = ::getGLContextData();
|
||||||
|
_renderControl->initialize(_canvas->getContext());
|
||||||
|
setupFbo();
|
||||||
|
|
||||||
// When Quick says there is a need to render, we will not render immediately. Instead,
|
// When Quick says there is a need to render, we will not render immediately. Instead,
|
||||||
// a timer with a small interval is used to get better performance.
|
// a timer with a small interval is used to get better performance.
|
||||||
QObject::connect(&_updateTimer, &QTimer::timeout, this, &OffscreenQmlSurface::updateQuick);
|
QObject::connect(&_updateTimer, &QTimer::timeout, this, &OffscreenQmlSurface::updateQuick);
|
||||||
|
@ -577,7 +339,7 @@ void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
|
||||||
|
|
||||||
void OffscreenQmlSurface::resize(const QSize& newSize_, bool forceResize) {
|
void OffscreenQmlSurface::resize(const QSize& newSize_, bool forceResize) {
|
||||||
|
|
||||||
if (!_renderer || !_renderer->_quickWindow) {
|
if (!_quickWindow) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -593,7 +355,7 @@ void OffscreenQmlSurface::resize(const QSize& newSize_, bool forceResize) {
|
||||||
std::max(static_cast<int>(scale * newSize.height()), 10));
|
std::max(static_cast<int>(scale * newSize.height()), 10));
|
||||||
}
|
}
|
||||||
|
|
||||||
QSize currentSize = _renderer->_quickWindow->geometry().size();
|
QSize currentSize = _quickWindow->geometry().size();
|
||||||
if (newSize == currentSize && !forceResize) {
|
if (newSize == currentSize && !forceResize) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -604,12 +366,26 @@ void OffscreenQmlSurface::resize(const QSize& newSize_, bool forceResize) {
|
||||||
_rootItem->setSize(newSize);
|
_rootItem->setSize(newSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
// Update our members
|
||||||
QMutexLocker locker(&(_renderer->_mutex));
|
_quickWindow->setGeometry(QRect(QPoint(), newSize));
|
||||||
_renderer->_newSize = newSize;
|
_quickWindow->contentItem()->setSize(newSize);
|
||||||
|
|
||||||
|
// Qt bug in 5.4 forces this check of pixel ratio,
|
||||||
|
// even though we're rendering offscreen.
|
||||||
|
qreal pixelRatio = 1.0;
|
||||||
|
if (_renderControl && _renderControl->_renderWindow) {
|
||||||
|
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
|
||||||
}
|
}
|
||||||
|
|
||||||
_renderer->_queue.add(RESIZE);
|
uvec2 newOffscreenSize = toGlm(newSize * pixelRatio);
|
||||||
|
if (newOffscreenSize == _size) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
qCDebug(glLogging) << "Offscreen UI resizing to " << newSize.width() << "x" << newSize.height() << " with pixel ratio " << pixelRatio;
|
||||||
|
_size = newOffscreenSize;
|
||||||
|
_textures.setSize(_size);
|
||||||
|
setupFbo();
|
||||||
}
|
}
|
||||||
|
|
||||||
QQuickItem* OffscreenQmlSurface::getRootItem() {
|
QQuickItem* OffscreenQmlSurface::getRootItem() {
|
||||||
|
@ -627,13 +403,13 @@ QObject* OffscreenQmlSurface::load(const QUrl& qmlSource, std::function<void(QQm
|
||||||
_qmlComponent->loadUrl(qmlSource, QQmlComponent::PreferSynchronous);
|
_qmlComponent->loadUrl(qmlSource, QQmlComponent::PreferSynchronous);
|
||||||
|
|
||||||
if (_qmlComponent->isLoading()) {
|
if (_qmlComponent->isLoading()) {
|
||||||
connect(_qmlComponent, &QQmlComponent::statusChanged, this,
|
connect(_qmlComponent, &QQmlComponent::statusChanged, this,
|
||||||
[this, f](QQmlComponent::Status){
|
[this, f](QQmlComponent::Status){
|
||||||
finishQmlLoad(f);
|
finishQmlLoad(f);
|
||||||
});
|
});
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
return finishQmlLoad(f);
|
return finishQmlLoad(f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -641,15 +417,6 @@ void OffscreenQmlSurface::clearCache() {
|
||||||
getRootContext()->engine()->clearComponentCache();
|
getRootContext()->engine()->clearComponentCache();
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffscreenQmlSurface::requestUpdate() {
|
|
||||||
_polish = true;
|
|
||||||
_render = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
void OffscreenQmlSurface::requestRender() {
|
|
||||||
_render = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f) {
|
QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f) {
|
||||||
disconnect(_qmlComponent, &QQmlComponent::statusChanged, this, 0);
|
disconnect(_qmlComponent, &QQmlComponent::statusChanged, this, 0);
|
||||||
if (_qmlComponent->isError()) {
|
if (_qmlComponent->isError()) {
|
||||||
|
@ -660,6 +427,19 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Refactor with similar code in RenderableWebEntityItem
|
||||||
|
QString javaScriptToInject;
|
||||||
|
QFile webChannelFile(":qtwebchannel/qwebchannel.js");
|
||||||
|
QFile createGlobalEventBridgeFile(PathUtils::resourcesPath() + "/html/createGlobalEventBridge.js");
|
||||||
|
if (webChannelFile.open(QFile::ReadOnly | QFile::Text) &&
|
||||||
|
createGlobalEventBridgeFile.open(QFile::ReadOnly | QFile::Text)) {
|
||||||
|
QString webChannelStr = QTextStream(&webChannelFile).readAll();
|
||||||
|
QString createGlobalEventBridgeStr = QTextStream(&createGlobalEventBridgeFile).readAll();
|
||||||
|
javaScriptToInject = webChannelStr + createGlobalEventBridgeStr;
|
||||||
|
} else {
|
||||||
|
qWarning() << "Unable to find qwebchannel.js or createGlobalEventBridge.js";
|
||||||
|
}
|
||||||
|
|
||||||
QQmlContext* newContext = new QQmlContext(_qmlEngine, qApp);
|
QQmlContext* newContext = new QQmlContext(_qmlEngine, qApp);
|
||||||
QObject* newObject = _qmlComponent->beginCreate(newContext);
|
QObject* newObject = _qmlComponent->beginCreate(newContext);
|
||||||
if (_qmlComponent->isError()) {
|
if (_qmlComponent->isError()) {
|
||||||
|
@ -672,6 +452,9 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
newObject->setProperty("eventBridge", QVariant::fromValue(this));
|
||||||
|
newContext->setContextProperty("eventBridgeJavaScriptToInject", QVariant(javaScriptToInject));
|
||||||
|
|
||||||
f(newContext, newObject);
|
f(newContext, newObject);
|
||||||
_qmlComponent->completeCreate();
|
_qmlComponent->completeCreate();
|
||||||
|
|
||||||
|
@ -679,7 +462,7 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
|
||||||
// All quick items should be focusable
|
// All quick items should be focusable
|
||||||
QQuickItem* newItem = qobject_cast<QQuickItem*>(newObject);
|
QQuickItem* newItem = qobject_cast<QQuickItem*>(newObject);
|
||||||
if (newItem) {
|
if (newItem) {
|
||||||
// Make sure we make items focusable (critical for
|
// Make sure we make items focusable (critical for
|
||||||
// supporting keyboard shortcuts)
|
// supporting keyboard shortcuts)
|
||||||
newItem->setFlag(QQuickItem::ItemIsFocusScope, true);
|
newItem->setFlag(QQuickItem::ItemIsFocusScope, true);
|
||||||
}
|
}
|
||||||
|
@ -701,38 +484,31 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
|
||||||
}
|
}
|
||||||
// The root item is ready. Associate it with the window.
|
// The root item is ready. Associate it with the window.
|
||||||
_rootItem = newItem;
|
_rootItem = newItem;
|
||||||
_rootItem->setParentItem(_renderer->_quickWindow->contentItem());
|
_rootItem->setParentItem(_quickWindow->contentItem());
|
||||||
_rootItem->setSize(_renderer->_quickWindow->renderTargetSize());
|
_rootItem->setSize(_quickWindow->renderTargetSize());
|
||||||
return _rootItem;
|
return _rootItem;
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffscreenQmlSurface::updateQuick() {
|
void OffscreenQmlSurface::updateQuick() {
|
||||||
// If we're
|
// If we're
|
||||||
// a) not set up
|
// a) not set up
|
||||||
// b) already rendering a frame
|
// b) already rendering a frame
|
||||||
// c) rendering too fast
|
// c) rendering too fast
|
||||||
// then skip this
|
// then skip this
|
||||||
if (!_renderer || _renderer->_rendering || !_renderer->allowNewFrame(_maxFps)) {
|
if (!allowNewFrame(_maxFps)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_polish) {
|
if (_polish) {
|
||||||
_renderer->_renderControl->polishItems();
|
_renderControl->polishItems();
|
||||||
_polish = false;
|
_polish = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_render) {
|
if (_render) {
|
||||||
PROFILE_RANGE(__FUNCTION__);
|
PROFILE_RANGE(__FUNCTION__);
|
||||||
// Lock the GUI size while syncing
|
render();
|
||||||
QMutexLocker locker(&(_renderer->_mutex));
|
|
||||||
_renderer->_queue.add(RENDER);
|
|
||||||
_renderer->_waitCondition.wait(&(_renderer->_mutex));
|
|
||||||
_render = false;
|
_render = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_renderer->_escrow.fetchSignaledAndRelease(_currentTexture)) {
|
|
||||||
emit textureUpdated(_currentTexture);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
QPointF OffscreenQmlSurface::mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject) {
|
QPointF OffscreenQmlSurface::mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject) {
|
||||||
|
@ -744,7 +520,7 @@ QPointF OffscreenQmlSurface::mapWindowToUi(const QPointF& sourcePosition, QObjec
|
||||||
}
|
}
|
||||||
vec2 offscreenPosition = toGlm(sourcePosition);
|
vec2 offscreenPosition = toGlm(sourcePosition);
|
||||||
offscreenPosition /= sourceSize;
|
offscreenPosition /= sourceSize;
|
||||||
offscreenPosition *= vec2(toGlm(_renderer->_quickWindow->size()));
|
offscreenPosition *= vec2(toGlm(_quickWindow->size()));
|
||||||
return QPointF(offscreenPosition.x, offscreenPosition.y);
|
return QPointF(offscreenPosition.x, offscreenPosition.y);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -752,14 +528,13 @@ QPointF OffscreenQmlSurface::mapToVirtualScreen(const QPointF& originalPoint, QO
|
||||||
return _mouseTranslator(originalPoint);
|
return _mouseTranslator(originalPoint);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////
|
||||||
//
|
//
|
||||||
// Event handling customization
|
// Event handling customization
|
||||||
//
|
//
|
||||||
|
|
||||||
bool OffscreenQmlSurface::filterEnabled(QObject* originalDestination, QEvent* event) const {
|
bool OffscreenQmlSurface::filterEnabled(QObject* originalDestination, QEvent* event) const {
|
||||||
if (_renderer->_quickWindow == originalDestination) {
|
if (_quickWindow == originalDestination) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// Only intercept events while we're in an active state
|
// Only intercept events while we're in an active state
|
||||||
|
@ -777,12 +552,11 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
|
||||||
// Don't intercept our own events, or we enter an infinite recursion
|
// Don't intercept our own events, or we enter an infinite recursion
|
||||||
QObject* recurseTest = originalDestination;
|
QObject* recurseTest = originalDestination;
|
||||||
while (recurseTest) {
|
while (recurseTest) {
|
||||||
Q_ASSERT(recurseTest != _rootItem && recurseTest != _renderer->_quickWindow);
|
Q_ASSERT(recurseTest != _rootItem && recurseTest != _quickWindow);
|
||||||
recurseTest = recurseTest->parent();
|
recurseTest = recurseTest->parent();
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
switch (event->type()) {
|
switch (event->type()) {
|
||||||
case QEvent::Resize: {
|
case QEvent::Resize: {
|
||||||
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
|
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
|
||||||
|
@ -796,7 +570,7 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
|
||||||
case QEvent::KeyPress:
|
case QEvent::KeyPress:
|
||||||
case QEvent::KeyRelease: {
|
case QEvent::KeyRelease: {
|
||||||
event->ignore();
|
event->ignore();
|
||||||
if (QCoreApplication::sendEvent(_renderer->_quickWindow, event)) {
|
if (QCoreApplication::sendEvent(_quickWindow, event)) {
|
||||||
return event->isAccepted();
|
return event->isAccepted();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -810,7 +584,7 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
|
||||||
wheelEvent->delta(), wheelEvent->buttons(),
|
wheelEvent->delta(), wheelEvent->buttons(),
|
||||||
wheelEvent->modifiers(), wheelEvent->orientation());
|
wheelEvent->modifiers(), wheelEvent->orientation());
|
||||||
mappedEvent.ignore();
|
mappedEvent.ignore();
|
||||||
if (QCoreApplication::sendEvent(_renderer->_quickWindow, &mappedEvent)) {
|
if (QCoreApplication::sendEvent(_quickWindow, &mappedEvent)) {
|
||||||
return mappedEvent.isAccepted();
|
return mappedEvent.isAccepted();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -831,7 +605,7 @@ bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* even
|
||||||
_qmlEngine->rootContext()->setContextProperty("lastMousePosition", transformedPos);
|
_qmlEngine->rootContext()->setContextProperty("lastMousePosition", transformedPos);
|
||||||
}
|
}
|
||||||
mappedEvent.ignore();
|
mappedEvent.ignore();
|
||||||
if (QCoreApplication::sendEvent(_renderer->_quickWindow, &mappedEvent)) {
|
if (QCoreApplication::sendEvent(_quickWindow, &mappedEvent)) {
|
||||||
return mappedEvent.isAccepted();
|
return mappedEvent.isAccepted();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -850,7 +624,10 @@ void OffscreenQmlSurface::pause() {
|
||||||
|
|
||||||
void OffscreenQmlSurface::resume() {
|
void OffscreenQmlSurface::resume() {
|
||||||
_paused = false;
|
_paused = false;
|
||||||
requestRender();
|
_render = true;
|
||||||
|
|
||||||
|
getRootItem()->setProperty("eventBridge", QVariant::fromValue(this));
|
||||||
|
getRootContext()->setContextProperty("webEntity", this);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool OffscreenQmlSurface::isPaused() const {
|
bool OffscreenQmlSurface::isPaused() const {
|
||||||
|
@ -859,8 +636,8 @@ bool OffscreenQmlSurface::isPaused() const {
|
||||||
|
|
||||||
void OffscreenQmlSurface::setProxyWindow(QWindow* window) {
|
void OffscreenQmlSurface::setProxyWindow(QWindow* window) {
|
||||||
_proxyWindow = window;
|
_proxyWindow = window;
|
||||||
if (_renderer && _renderer->_renderControl) {
|
if (_renderControl) {
|
||||||
_renderer->_renderControl->_renderWindow = window;
|
_renderControl->_renderWindow = window;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -869,11 +646,11 @@ QObject* OffscreenQmlSurface::getEventHandler() {
|
||||||
}
|
}
|
||||||
|
|
||||||
QQuickWindow* OffscreenQmlSurface::getWindow() {
|
QQuickWindow* OffscreenQmlSurface::getWindow() {
|
||||||
return _renderer->_quickWindow;
|
return _quickWindow;
|
||||||
}
|
}
|
||||||
|
|
||||||
QSize OffscreenQmlSurface::size() const {
|
QSize OffscreenQmlSurface::size() const {
|
||||||
return _renderer->_quickWindow->geometry().size();
|
return _quickWindow->geometry().size();
|
||||||
}
|
}
|
||||||
|
|
||||||
QQmlContext* OffscreenQmlSurface::getRootContext() {
|
QQmlContext* OffscreenQmlSurface::getRootContext() {
|
||||||
|
@ -908,15 +685,37 @@ QVariant OffscreenQmlSurface::returnFromUiThread(std::function<QVariant()> funct
|
||||||
return function();
|
return function();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void OffscreenQmlSurface::focusDestroyed(QObject *obj) {
|
||||||
|
_currentFocusItem = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
void OffscreenQmlSurface::onFocusObjectChanged(QObject* object) {
|
void OffscreenQmlSurface::onFocusObjectChanged(QObject* object) {
|
||||||
if (!object) {
|
QQuickItem* item = dynamic_cast<QQuickItem*>(object);
|
||||||
|
if (!item) {
|
||||||
setFocusText(false);
|
setFocusText(false);
|
||||||
|
_currentFocusItem = nullptr;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
QInputMethodQueryEvent query(Qt::ImEnabled);
|
QInputMethodQueryEvent query(Qt::ImEnabled);
|
||||||
qApp->sendEvent(object, &query);
|
qApp->sendEvent(object, &query);
|
||||||
setFocusText(query.value(Qt::ImEnabled).toBool());
|
setFocusText(query.value(Qt::ImEnabled).toBool());
|
||||||
|
|
||||||
|
if (_currentFocusItem) {
|
||||||
|
disconnect(_currentFocusItem, &QObject::destroyed, this, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Raise and lower keyboard for QML text fields.
|
||||||
|
// HTML text fields are handled in emitWebEvent() methods - testing READ_ONLY_PROPERTY prevents action for HTML files.
|
||||||
|
const char* READ_ONLY_PROPERTY = "readOnly";
|
||||||
|
bool raiseKeyboard = item->hasActiveFocus() && item->property(READ_ONLY_PROPERTY) == false;
|
||||||
|
if (_currentFocusItem && !raiseKeyboard) {
|
||||||
|
setKeyboardRaised(_currentFocusItem, false);
|
||||||
|
}
|
||||||
|
setKeyboardRaised(item, raiseKeyboard); // Always set focus so that alphabetic / numeric setting is updated.
|
||||||
|
|
||||||
|
_currentFocusItem = item;
|
||||||
|
connect(_currentFocusItem, &QObject::destroyed, this, &OffscreenQmlSurface::focusDestroyed);
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffscreenQmlSurface::setFocusText(bool newFocusText) {
|
void OffscreenQmlSurface::setFocusText(bool newFocusText) {
|
||||||
|
@ -926,4 +725,103 @@ void OffscreenQmlSurface::setFocusText(bool newFocusText) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UTF-8 encoded symbols
|
||||||
|
static const uint8_t UPWARDS_WHITE_ARROW_FROM_BAR[] = { 0xE2, 0x87, 0xAA, 0x00 }; // shift
|
||||||
|
static const uint8_t LEFT_ARROW[] = { 0xE2, 0x86, 0x90, 0x00 }; // backspace
|
||||||
|
static const uint8_t LEFTWARD_WHITE_ARROW[] = { 0xE2, 0x87, 0xA6, 0x00 }; // left arrow
|
||||||
|
static const uint8_t RIGHTWARD_WHITE_ARROW[] = { 0xE2, 0x87, 0xA8, 0x00 }; // right arrow
|
||||||
|
static const uint8_t ASTERISIM[] = { 0xE2, 0x81, 0x82, 0x00 }; // symbols
|
||||||
|
static const uint8_t RETURN_SYMBOL[] = { 0xE2, 0x8F, 0x8E, 0x00 }; // return
|
||||||
|
static const char PUNCTUATION_STRING[] = "&123";
|
||||||
|
static const char ALPHABET_STRING[] = "abc";
|
||||||
|
|
||||||
|
static bool equals(const QByteArray& byteArray, const uint8_t* ptr) {
|
||||||
|
int i;
|
||||||
|
for (i = 0; i < byteArray.size(); i++) {
|
||||||
|
if ((char)ptr[i] != byteArray[i]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ptr[i] == 0x00;
|
||||||
|
}
|
||||||
|
|
||||||
|
void OffscreenQmlSurface::synthesizeKeyPress(QString key) {
|
||||||
|
auto eventHandler = getEventHandler();
|
||||||
|
if (eventHandler) {
|
||||||
|
auto utf8Key = key.toUtf8();
|
||||||
|
|
||||||
|
int scanCode = (int)utf8Key[0];
|
||||||
|
QString keyString = key;
|
||||||
|
if (equals(utf8Key, UPWARDS_WHITE_ARROW_FROM_BAR) || equals(utf8Key, ASTERISIM) ||
|
||||||
|
equals(utf8Key, (uint8_t*)PUNCTUATION_STRING) || equals(utf8Key, (uint8_t*)ALPHABET_STRING)) {
|
||||||
|
return; // ignore
|
||||||
|
} else if (equals(utf8Key, LEFT_ARROW)) {
|
||||||
|
scanCode = Qt::Key_Backspace;
|
||||||
|
keyString = "\x08";
|
||||||
|
} else if (equals(utf8Key, RETURN_SYMBOL)) {
|
||||||
|
scanCode = Qt::Key_Return;
|
||||||
|
keyString = "\x0d";
|
||||||
|
} else if (equals(utf8Key, LEFTWARD_WHITE_ARROW)) {
|
||||||
|
scanCode = Qt::Key_Left;
|
||||||
|
keyString = "";
|
||||||
|
} else if (equals(utf8Key, RIGHTWARD_WHITE_ARROW)) {
|
||||||
|
scanCode = Qt::Key_Right;
|
||||||
|
keyString = "";
|
||||||
|
}
|
||||||
|
|
||||||
|
QKeyEvent* pressEvent = new QKeyEvent(QEvent::KeyPress, scanCode, Qt::NoModifier, keyString);
|
||||||
|
QKeyEvent* releaseEvent = new QKeyEvent(QEvent::KeyRelease, scanCode, Qt::NoModifier, keyString);
|
||||||
|
QCoreApplication::postEvent(eventHandler, pressEvent);
|
||||||
|
QCoreApplication::postEvent(eventHandler, releaseEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void OffscreenQmlSurface::setKeyboardRaised(QObject* object, bool raised, bool numeric) {
|
||||||
|
if (!object) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
QQuickItem* item = dynamic_cast<QQuickItem*>(object);
|
||||||
|
while (item) {
|
||||||
|
// Numeric value may be set in parameter from HTML UI; for QML UI, detect numeric fields here.
|
||||||
|
numeric = numeric || QString(item->metaObject()->className()).left(7) == "SpinBox";
|
||||||
|
|
||||||
|
if (item->property("keyboardRaised").isValid()) {
|
||||||
|
if (item->property("punctuationMode").isValid()) {
|
||||||
|
item->setProperty("punctuationMode", QVariant(numeric));
|
||||||
|
}
|
||||||
|
item->setProperty("keyboardRaised", QVariant(raised));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
item = dynamic_cast<QQuickItem*>(item->parentItem());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void OffscreenQmlSurface::emitScriptEvent(const QVariant& message) {
|
||||||
|
if (QThread::currentThread() != thread()) {
|
||||||
|
QMetaObject::invokeMethod(this, "emitScriptEvent", Qt::QueuedConnection, Q_ARG(QVariant, message));
|
||||||
|
} else {
|
||||||
|
emit scriptEventReceived(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void OffscreenQmlSurface::emitWebEvent(const QVariant& message) {
|
||||||
|
if (QThread::currentThread() != thread()) {
|
||||||
|
QMetaObject::invokeMethod(this, "emitWebEvent", Qt::QueuedConnection, Q_ARG(QVariant, message));
|
||||||
|
} else {
|
||||||
|
// Special case to handle raising and lowering the virtual keyboard.
|
||||||
|
const QString RAISE_KEYBOARD = "_RAISE_KEYBOARD";
|
||||||
|
const QString RAISE_KEYBOARD_NUMERIC = "_RAISE_KEYBOARD_NUMERIC";
|
||||||
|
const QString LOWER_KEYBOARD = "_LOWER_KEYBOARD";
|
||||||
|
QString messageString = message.type() == QVariant::String ? message.toString() : "";
|
||||||
|
if (messageString.left(RAISE_KEYBOARD.length()) == RAISE_KEYBOARD) {
|
||||||
|
setKeyboardRaised(_currentFocusItem, true, messageString == RAISE_KEYBOARD_NUMERIC);
|
||||||
|
} else if (messageString == LOWER_KEYBOARD) {
|
||||||
|
setKeyboardRaised(_currentFocusItem, false);
|
||||||
|
} else {
|
||||||
|
emit webEventReceived(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#include "OffscreenQmlSurface.moc"
|
#include "OffscreenQmlSurface.moc"
|
||||||
|
|
|
@ -9,25 +9,27 @@
|
||||||
#ifndef hifi_OffscreenQmlSurface_h
|
#ifndef hifi_OffscreenQmlSurface_h
|
||||||
#define hifi_OffscreenQmlSurface_h
|
#define hifi_OffscreenQmlSurface_h
|
||||||
|
|
||||||
#include <QTimer>
|
|
||||||
#include <QUrl>
|
|
||||||
#include <atomic>
|
#include <atomic>
|
||||||
#include <functional>
|
#include <functional>
|
||||||
|
|
||||||
|
#include <QtCore/QJsonObject>
|
||||||
|
#include <QTimer>
|
||||||
|
#include <QUrl>
|
||||||
|
|
||||||
|
|
||||||
#include <GLMHelpers.h>
|
#include <GLMHelpers.h>
|
||||||
#include <ThreadHelpers.h>
|
#include <ThreadHelpers.h>
|
||||||
|
#include "TextureRecycler.h"
|
||||||
|
|
||||||
class QWindow;
|
class QWindow;
|
||||||
class QMyQuickRenderControl;
|
class QMyQuickRenderControl;
|
||||||
|
class OffscreenGLCanvas;
|
||||||
class QOpenGLContext;
|
class QOpenGLContext;
|
||||||
class QQmlEngine;
|
class QQmlEngine;
|
||||||
class QQmlContext;
|
class QQmlContext;
|
||||||
class QQmlComponent;
|
class QQmlComponent;
|
||||||
class QQuickWindow;
|
class QQuickWindow;
|
||||||
class QQuickItem;
|
class QQuickItem;
|
||||||
|
|
||||||
class OffscreenQmlRenderThread;
|
|
||||||
|
|
||||||
class OffscreenQmlSurface : public QObject {
|
class OffscreenQmlSurface : public QObject {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
Q_PROPERTY(bool focusText READ isFocusText NOTIFY focusTextChanged)
|
Q_PROPERTY(bool focusText READ isFocusText NOTIFY focusTextChanged)
|
||||||
|
@ -71,15 +73,35 @@ public:
|
||||||
QPointF mapToVirtualScreen(const QPointF& originalPoint, QObject* originalWidget);
|
QPointF mapToVirtualScreen(const QPointF& originalPoint, QObject* originalWidget);
|
||||||
bool eventFilter(QObject* originalDestination, QEvent* event) override;
|
bool eventFilter(QObject* originalDestination, QEvent* event) override;
|
||||||
|
|
||||||
|
void setKeyboardRaised(QObject* object, bool raised, bool numeric = false);
|
||||||
|
Q_INVOKABLE void synthesizeKeyPress(QString key);
|
||||||
|
|
||||||
|
using TextureAndFence = std::pair<uint32_t, void*>;
|
||||||
|
// Checks to see if a new texture is available. If one is, the function returns true and
|
||||||
|
// textureAndFence will be populated with the texture ID and a fence which will be signalled
|
||||||
|
// when the texture is safe to read.
|
||||||
|
// Returns false if no new texture is available
|
||||||
|
bool fetchTexture(TextureAndFence& textureAndFence);
|
||||||
|
// Release a previously acquired texture, along with a fence which indicates when reads from the
|
||||||
|
// texture have completed.
|
||||||
|
void releaseTexture(const TextureAndFence& textureAndFence);
|
||||||
|
|
||||||
signals:
|
signals:
|
||||||
void textureUpdated(unsigned int texture);
|
|
||||||
void focusObjectChanged(QObject* newFocus);
|
void focusObjectChanged(QObject* newFocus);
|
||||||
void focusTextChanged(bool focusText);
|
void focusTextChanged(bool focusText);
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
void requestUpdate();
|
|
||||||
void requestRender();
|
|
||||||
void onAboutToQuit();
|
void onAboutToQuit();
|
||||||
|
void focusDestroyed(QObject *obj);
|
||||||
|
|
||||||
|
// event bridge
|
||||||
|
public slots:
|
||||||
|
void emitScriptEvent(const QVariant& scriptMessage);
|
||||||
|
void emitWebEvent(const QVariant& webMessage);
|
||||||
|
signals:
|
||||||
|
void scriptEventReceived(const QVariant& message);
|
||||||
|
void webEventReceived(const QVariant& message);
|
||||||
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
bool filterEnabled(QObject* originalDestination, QEvent* event) const;
|
bool filterEnabled(QObject* originalDestination, QEvent* event) const;
|
||||||
|
@ -88,26 +110,47 @@ protected:
|
||||||
private:
|
private:
|
||||||
QObject* finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f);
|
QObject* finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f);
|
||||||
QPointF mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject);
|
QPointF mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject);
|
||||||
|
void setupFbo();
|
||||||
|
bool allowNewFrame(uint8_t fps);
|
||||||
|
void render();
|
||||||
|
void cleanup();
|
||||||
|
QJsonObject getGLContextData();
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void updateQuick();
|
void updateQuick();
|
||||||
void onFocusObjectChanged(QObject* newFocus);
|
void onFocusObjectChanged(QObject* newFocus);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
friend class OffscreenQmlRenderThread;
|
QQuickWindow* _quickWindow { nullptr };
|
||||||
OffscreenQmlRenderThread* _renderer{ nullptr };
|
QMyQuickRenderControl* _renderControl{ nullptr };
|
||||||
QQmlEngine* _qmlEngine{ nullptr };
|
QQmlEngine* _qmlEngine { nullptr };
|
||||||
QQmlComponent* _qmlComponent{ nullptr };
|
QQmlComponent* _qmlComponent { nullptr };
|
||||||
QQuickItem* _rootItem{ nullptr };
|
QQuickItem* _rootItem { nullptr };
|
||||||
|
OffscreenGLCanvas* _canvas { nullptr };
|
||||||
|
QJsonObject _glData;
|
||||||
|
|
||||||
QTimer _updateTimer;
|
QTimer _updateTimer;
|
||||||
uint32_t _currentTexture{ 0 };
|
uint32_t _fbo { 0 };
|
||||||
bool _render{ false };
|
uint32_t _depthStencil { 0 };
|
||||||
bool _polish{ true };
|
uint64_t _lastRenderTime { 0 };
|
||||||
bool _paused{ true };
|
uvec2 _size { 1920, 1080 };
|
||||||
|
TextureRecycler _textures { true };
|
||||||
|
|
||||||
|
// Texture management
|
||||||
|
std::mutex _textureMutex;
|
||||||
|
TextureAndFence _latestTextureAndFence { 0, 0 };
|
||||||
|
std::list<TextureAndFence> _returnedTextures;
|
||||||
|
|
||||||
|
|
||||||
|
bool _render { false };
|
||||||
|
bool _polish { true };
|
||||||
|
bool _paused { true };
|
||||||
bool _focusText { false };
|
bool _focusText { false };
|
||||||
uint8_t _maxFps{ 60 };
|
uint8_t _maxFps { 60 };
|
||||||
MouseTranslator _mouseTranslator{ [](const QPointF& p) { return p.toPoint(); } };
|
MouseTranslator _mouseTranslator { [](const QPointF& p) { return p.toPoint(); } };
|
||||||
QWindow* _proxyWindow { nullptr };
|
QWindow* _proxyWindow { nullptr };
|
||||||
|
|
||||||
|
QQuickItem* _currentFocusItem { nullptr };
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -1,563 +0,0 @@
|
||||||
//
|
|
||||||
// Created by Bradley Austin Davis on 2015/05/29
|
|
||||||
// Copyright 2015 High Fidelity, Inc.
|
|
||||||
//
|
|
||||||
// Distributed under the Apache License, Version 2.0.
|
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
|
||||||
//
|
|
||||||
#include "OglplusHelpers.h"
|
|
||||||
|
|
||||||
#include <set>
|
|
||||||
#include <oglplus/shapes/plane.hpp>
|
|
||||||
#include <oglplus/shapes/sky_box.hpp>
|
|
||||||
#include "GLLogging.h"
|
|
||||||
|
|
||||||
using namespace oglplus;
|
|
||||||
using namespace oglplus::shapes;
|
|
||||||
|
|
||||||
static const char * SIMPLE_TEXTURED_VS = R"VS(#version 410 core
|
|
||||||
#pragma line __LINE__
|
|
||||||
|
|
||||||
uniform mat4 mvp = mat4(1);
|
|
||||||
|
|
||||||
in vec3 Position;
|
|
||||||
in vec2 TexCoord;
|
|
||||||
|
|
||||||
out vec3 vPosition;
|
|
||||||
out vec2 vTexCoord;
|
|
||||||
|
|
||||||
void main() {
|
|
||||||
gl_Position = mvp * vec4(Position, 1);
|
|
||||||
vTexCoord = TexCoord;
|
|
||||||
vPosition = Position;
|
|
||||||
}
|
|
||||||
|
|
||||||
)VS";
|
|
||||||
|
|
||||||
static const char * SIMPLE_TEXTURED_FS = R"FS(#version 410 core
|
|
||||||
#pragma line __LINE__
|
|
||||||
|
|
||||||
uniform sampler2D sampler;
|
|
||||||
uniform float alpha = 1.0;
|
|
||||||
|
|
||||||
in vec3 vPosition;
|
|
||||||
in vec2 vTexCoord;
|
|
||||||
|
|
||||||
out vec4 FragColor;
|
|
||||||
|
|
||||||
void main() {
|
|
||||||
FragColor = texture(sampler, vTexCoord);
|
|
||||||
FragColor.a *= alpha;
|
|
||||||
if (FragColor.a <= 0.0) {
|
|
||||||
discard;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
)FS";
|
|
||||||
|
|
||||||
|
|
||||||
static const char * SIMPLE_TEXTURED_CUBEMAP_FS = R"FS(#version 410 core
|
|
||||||
#pragma line __LINE__
|
|
||||||
|
|
||||||
uniform samplerCube sampler;
|
|
||||||
uniform float alpha = 1.0;
|
|
||||||
|
|
||||||
in vec3 vPosition;
|
|
||||||
in vec3 vTexCoord;
|
|
||||||
|
|
||||||
out vec4 FragColor;
|
|
||||||
|
|
||||||
void main() {
|
|
||||||
|
|
||||||
FragColor = texture(sampler, vPosition);
|
|
||||||
FragColor.a *= alpha;
|
|
||||||
}
|
|
||||||
|
|
||||||
)FS";
|
|
||||||
|
|
||||||
|
|
||||||
ProgramPtr loadDefaultShader() {
|
|
||||||
ProgramPtr result;
|
|
||||||
compileProgram(result, SIMPLE_TEXTURED_VS, SIMPLE_TEXTURED_FS);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
ProgramPtr loadCubemapShader() {
|
|
||||||
ProgramPtr result;
|
|
||||||
compileProgram(result, SIMPLE_TEXTURED_VS, SIMPLE_TEXTURED_CUBEMAP_FS);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& gs, const std::string& fs) {
|
|
||||||
using namespace oglplus;
|
|
||||||
try {
|
|
||||||
result = std::make_shared<Program>();
|
|
||||||
// attach the shaders to the program
|
|
||||||
result->AttachShader(
|
|
||||||
VertexShader()
|
|
||||||
.Source(GLSLSource(vs))
|
|
||||||
.Compile()
|
|
||||||
);
|
|
||||||
result->AttachShader(
|
|
||||||
GeometryShader()
|
|
||||||
.Source(GLSLSource(gs))
|
|
||||||
.Compile()
|
|
||||||
);
|
|
||||||
result->AttachShader(
|
|
||||||
FragmentShader()
|
|
||||||
.Source(GLSLSource(fs))
|
|
||||||
.Compile()
|
|
||||||
);
|
|
||||||
result->Link();
|
|
||||||
} catch (ProgramBuildError& err) {
|
|
||||||
Q_UNUSED(err);
|
|
||||||
qWarning() << err.Log().c_str();
|
|
||||||
Q_ASSERT_X(false, "compileProgram", "Failed to build shader program");
|
|
||||||
qFatal("%s", (const char*)err.Message);
|
|
||||||
result.reset();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& fs) {
|
|
||||||
using namespace oglplus;
|
|
||||||
try {
|
|
||||||
result = std::make_shared<Program>();
|
|
||||||
// attach the shaders to the program
|
|
||||||
result->AttachShader(
|
|
||||||
VertexShader()
|
|
||||||
.Source(GLSLSource(vs))
|
|
||||||
.Compile()
|
|
||||||
);
|
|
||||||
result->AttachShader(
|
|
||||||
FragmentShader()
|
|
||||||
.Source(GLSLSource(fs))
|
|
||||||
.Compile()
|
|
||||||
);
|
|
||||||
result->Link();
|
|
||||||
} catch (ProgramBuildError& err) {
|
|
||||||
Q_UNUSED(err);
|
|
||||||
qWarning() << err.Log().c_str();
|
|
||||||
Q_ASSERT_X(false, "compileProgram", "Failed to build shader program");
|
|
||||||
qFatal("%s", (const char*) err.Message);
|
|
||||||
result.reset();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
ShapeWrapperPtr loadPlane(ProgramPtr program, float aspect) {
|
|
||||||
using namespace oglplus;
|
|
||||||
Vec3f a(1, 0, 0);
|
|
||||||
Vec3f b(0, 1, 0);
|
|
||||||
if (aspect > 1) {
|
|
||||||
b[1] /= aspect;
|
|
||||||
} else {
|
|
||||||
a[0] *= aspect;
|
|
||||||
}
|
|
||||||
return ShapeWrapperPtr(
|
|
||||||
new shapes::ShapeWrapper({ "Position", "TexCoord" }, shapes::Plane(a, b), *program)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
ShapeWrapperPtr loadSkybox(ProgramPtr program) {
|
|
||||||
return ShapeWrapperPtr(new shapes::ShapeWrapper(std::initializer_list<std::string>{ "Position" }, shapes::SkyBox(), *program));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return a point's cartesian coordinates on a sphere from pitch and yaw
|
|
||||||
static glm::vec3 getPoint(float yaw, float pitch) {
|
|
||||||
return glm::vec3(glm::cos(-pitch) * (-glm::sin(yaw)),
|
|
||||||
glm::sin(-pitch),
|
|
||||||
glm::cos(-pitch) * (-glm::cos(yaw)));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class SphereSection : public DrawingInstructionWriter, public DrawMode {
|
|
||||||
public:
|
|
||||||
using IndexArray = std::vector<GLuint>;
|
|
||||||
using PosArray = std::vector<float>;
|
|
||||||
using TexArray = std::vector<float>;
|
|
||||||
/// The type of the index container returned by Indices()
|
|
||||||
// vertex positions
|
|
||||||
PosArray _pos_data;
|
|
||||||
// vertex tex coords
|
|
||||||
TexArray _tex_data;
|
|
||||||
IndexArray _idx_data;
|
|
||||||
unsigned int _prim_count{ 0 };
|
|
||||||
|
|
||||||
public:
|
|
||||||
SphereSection(
|
|
||||||
const float fov,
|
|
||||||
const float aspectRatio,
|
|
||||||
const int slices_,
|
|
||||||
const int stacks_) {
|
|
||||||
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
|
|
||||||
if (fov >= PI) {
|
|
||||||
qCDebug(glLogging) << "TexturedHemisphere::buildVBO(): FOV greater or equal than Pi will create issues";
|
|
||||||
}
|
|
||||||
|
|
||||||
int gridSize = std::max(slices_, stacks_);
|
|
||||||
int gridSizeLog2 = 1;
|
|
||||||
while (1 << gridSizeLog2 < gridSize) {
|
|
||||||
++gridSizeLog2;
|
|
||||||
}
|
|
||||||
gridSize = (1 << gridSizeLog2) + 1;
|
|
||||||
// Compute number of vertices needed
|
|
||||||
int vertices = gridSize * gridSize;
|
|
||||||
_pos_data.resize(vertices * 3);
|
|
||||||
_tex_data.resize(vertices * 2);
|
|
||||||
|
|
||||||
// Compute vertices positions and texture UV coordinate
|
|
||||||
for (int y = 0; y <= gridSize; ++y) {
|
|
||||||
for (int x = 0; x <= gridSize; ++x) {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (int i = 0; i < gridSize; i++) {
|
|
||||||
float stacksRatio = (float)i / (float)(gridSize - 1); // First stack is 0.0f, last stack is 1.0f
|
|
||||||
// abs(theta) <= fov / 2.0f
|
|
||||||
float pitch = -fov * (stacksRatio - 0.5f);
|
|
||||||
for (int j = 0; j < gridSize; j++) {
|
|
||||||
float slicesRatio = (float)j / (float)(gridSize - 1); // First slice is 0.0f, last slice is 1.0f
|
|
||||||
// abs(phi) <= fov * aspectRatio / 2.0f
|
|
||||||
float yaw = -fov * aspectRatio * (slicesRatio - 0.5f);
|
|
||||||
int vertex = i * gridSize + j;
|
|
||||||
int posOffset = vertex * 3;
|
|
||||||
int texOffset = vertex * 2;
|
|
||||||
vec3 pos = getPoint(yaw, pitch);
|
|
||||||
_pos_data[posOffset] = pos.x;
|
|
||||||
_pos_data[posOffset + 1] = pos.y;
|
|
||||||
_pos_data[posOffset + 2] = pos.z;
|
|
||||||
_tex_data[texOffset] = slicesRatio;
|
|
||||||
_tex_data[texOffset + 1] = stacksRatio;
|
|
||||||
}
|
|
||||||
} // done with vertices
|
|
||||||
|
|
||||||
int rowLen = gridSize;
|
|
||||||
|
|
||||||
// gridsize now refers to the triangles, not the vertices, so reduce by one
|
|
||||||
// or die by fencepost error http://en.wikipedia.org/wiki/Off-by-one_error
|
|
||||||
--gridSize;
|
|
||||||
int quads = gridSize * gridSize;
|
|
||||||
for (int t = 0; t < quads; ++t) {
|
|
||||||
int x =
|
|
||||||
((t & 0x0001) >> 0) |
|
|
||||||
((t & 0x0004) >> 1) |
|
|
||||||
((t & 0x0010) >> 2) |
|
|
||||||
((t & 0x0040) >> 3) |
|
|
||||||
((t & 0x0100) >> 4) |
|
|
||||||
((t & 0x0400) >> 5) |
|
|
||||||
((t & 0x1000) >> 6) |
|
|
||||||
((t & 0x4000) >> 7);
|
|
||||||
int y =
|
|
||||||
((t & 0x0002) >> 1) |
|
|
||||||
((t & 0x0008) >> 2) |
|
|
||||||
((t & 0x0020) >> 3) |
|
|
||||||
((t & 0x0080) >> 4) |
|
|
||||||
((t & 0x0200) >> 5) |
|
|
||||||
((t & 0x0800) >> 6) |
|
|
||||||
((t & 0x2000) >> 7) |
|
|
||||||
((t & 0x8000) >> 8);
|
|
||||||
int i = x * (rowLen) + y;
|
|
||||||
|
|
||||||
_idx_data.push_back(i);
|
|
||||||
_idx_data.push_back(i + 1);
|
|
||||||
_idx_data.push_back(i + rowLen + 1);
|
|
||||||
|
|
||||||
_idx_data.push_back(i + rowLen + 1);
|
|
||||||
_idx_data.push_back(i + rowLen);
|
|
||||||
_idx_data.push_back(i);
|
|
||||||
}
|
|
||||||
_prim_count = quads * 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the winding direction of faces
|
|
||||||
FaceOrientation FaceWinding(void) const {
|
|
||||||
return FaceOrientation::CCW;
|
|
||||||
}
|
|
||||||
|
|
||||||
typedef GLuint(SphereSection::*VertexAttribFunc)(std::vector<GLfloat>&) const;
|
|
||||||
|
|
||||||
/// Makes the vertex positions and returns the number of values per vertex
|
|
||||||
template <typename T>
|
|
||||||
GLuint Positions(std::vector<T>& dest) const {
|
|
||||||
dest.clear();
|
|
||||||
dest.insert(dest.begin(), _pos_data.begin(), _pos_data.end());
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Makes the vertex normals and returns the number of values per vertex
|
|
||||||
template <typename T>
|
|
||||||
GLuint Normals(std::vector<T>& dest) const {
|
|
||||||
dest.clear();
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Makes the vertex tangents and returns the number of values per vertex
|
|
||||||
template <typename T>
|
|
||||||
GLuint Tangents(std::vector<T>& dest) const {
|
|
||||||
dest.clear();
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Makes the vertex bi-tangents and returns the number of values per vertex
|
|
||||||
template <typename T>
|
|
||||||
GLuint Bitangents(std::vector<T>& dest) const {
|
|
||||||
dest.clear();
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Makes the texture coordinates returns the number of values per vertex
|
|
||||||
template <typename T>
|
|
||||||
GLuint TexCoordinates(std::vector<T>& dest) const {
|
|
||||||
dest.clear();
|
|
||||||
dest.insert(dest.begin(), _tex_data.begin(), _tex_data.end());
|
|
||||||
return 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
typedef VertexAttribsInfo<
|
|
||||||
SphereSection,
|
|
||||||
std::tuple<
|
|
||||||
VertexPositionsTag,
|
|
||||||
VertexNormalsTag,
|
|
||||||
VertexTangentsTag,
|
|
||||||
VertexBitangentsTag,
|
|
||||||
VertexTexCoordinatesTag
|
|
||||||
>
|
|
||||||
> VertexAttribs;
|
|
||||||
|
|
||||||
Spheref MakeBoundingSphere(void) const {
|
|
||||||
GLfloat min_x = _pos_data[3], max_x = _pos_data[3];
|
|
||||||
GLfloat min_y = _pos_data[4], max_y = _pos_data[4];
|
|
||||||
GLfloat min_z = _pos_data[5], max_z = _pos_data[5];
|
|
||||||
for (std::size_t v = 0, vn = _pos_data.size() / 3; v != vn; ++v) {
|
|
||||||
GLfloat x = _pos_data[v * 3 + 0];
|
|
||||||
GLfloat y = _pos_data[v * 3 + 1];
|
|
||||||
GLfloat z = _pos_data[v * 3 + 2];
|
|
||||||
|
|
||||||
if (min_x > x) min_x = x;
|
|
||||||
if (min_y > y) min_y = y;
|
|
||||||
if (min_z > z) min_z = z;
|
|
||||||
if (max_x < x) max_x = x;
|
|
||||||
if (max_y < y) max_y = y;
|
|
||||||
if (max_z < z) max_z = z;
|
|
||||||
}
|
|
||||||
|
|
||||||
Vec3f c(
|
|
||||||
(min_x + max_x) * 0.5f,
|
|
||||||
(min_y + max_y) * 0.5f,
|
|
||||||
(min_z + max_z) * 0.5f
|
|
||||||
);
|
|
||||||
|
|
||||||
return Spheref(
|
|
||||||
c.x(), c.y(), c.z(),
|
|
||||||
Distance(c, Vec3f(min_x, min_y, min_z))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Queries the bounding sphere coordinates and dimensions
|
|
||||||
template <typename T>
|
|
||||||
void BoundingSphere(oglplus::Sphere<T>& bounding_sphere) const {
|
|
||||||
bounding_sphere = oglplus::Sphere<T>(MakeBoundingSphere());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/// Returns element indices that are used with the drawing instructions
|
|
||||||
const IndexArray & Indices(Default = Default()) const {
|
|
||||||
return _idx_data;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the instructions for rendering of faces
|
|
||||||
DrawingInstructions Instructions(PrimitiveType primitive) const {
|
|
||||||
DrawingInstructions instr = MakeInstructions();
|
|
||||||
DrawOperation operation;
|
|
||||||
operation.method = DrawOperation::Method::DrawElements;
|
|
||||||
operation.mode = primitive;
|
|
||||||
operation.first = 0;
|
|
||||||
operation.count = _prim_count * 3;
|
|
||||||
operation.restart_index = DrawOperation::NoRestartIndex();
|
|
||||||
operation.phase = 0;
|
|
||||||
AddInstruction(instr, operation);
|
|
||||||
return instr;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the instructions for rendering of faces
|
|
||||||
DrawingInstructions Instructions(Default = Default()) const {
|
|
||||||
return Instructions(PrimitiveType::Triangles);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov, float aspect, int slices, int stacks) {
|
|
||||||
using namespace oglplus;
|
|
||||||
return ShapeWrapperPtr(
|
|
||||||
new shapes::ShapeWrapper({ "Position", "TexCoord" }, SphereSection(fov, aspect, slices, stacks), *program)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace oglplus {
|
|
||||||
namespace shapes {
|
|
||||||
|
|
||||||
class Laser : public DrawingInstructionWriter, public DrawMode {
|
|
||||||
public:
|
|
||||||
using IndexArray = std::vector<GLuint>;
|
|
||||||
using PosArray = std::vector<float>;
|
|
||||||
/// The type of the index container returned by Indices()
|
|
||||||
// vertex positions
|
|
||||||
PosArray _pos_data;
|
|
||||||
IndexArray _idx_data;
|
|
||||||
unsigned int _prim_count { 0 };
|
|
||||||
|
|
||||||
public:
|
|
||||||
Laser() {
|
|
||||||
int vertices = 2;
|
|
||||||
_pos_data.resize(vertices * 3);
|
|
||||||
_pos_data[0] = 0;
|
|
||||||
_pos_data[1] = 0;
|
|
||||||
_pos_data[2] = 0;
|
|
||||||
|
|
||||||
_pos_data[3] = 0;
|
|
||||||
_pos_data[4] = 0;
|
|
||||||
_pos_data[5] = -1;
|
|
||||||
|
|
||||||
_idx_data.push_back(0);
|
|
||||||
_idx_data.push_back(1);
|
|
||||||
_prim_count = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the winding direction of faces
|
|
||||||
FaceOrientation FaceWinding(void) const {
|
|
||||||
return FaceOrientation::CCW;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Queries the bounding sphere coordinates and dimensions
|
|
||||||
template <typename T>
|
|
||||||
void BoundingSphere(Sphere<T>& bounding_sphere) const {
|
|
||||||
bounding_sphere = Sphere<T>(0, 0, -0.5, 0.5);
|
|
||||||
}
|
|
||||||
|
|
||||||
typedef GLuint(Laser::*VertexAttribFunc)(std::vector<GLfloat>&) const;
|
|
||||||
|
|
||||||
/// Makes the vertex positions and returns the number of values per vertex
|
|
||||||
template <typename T>
|
|
||||||
GLuint Positions(std::vector<T>& dest) const {
|
|
||||||
dest.clear();
|
|
||||||
dest.insert(dest.begin(), _pos_data.begin(), _pos_data.end());
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
typedef VertexAttribsInfo<
|
|
||||||
Laser,
|
|
||||||
std::tuple<VertexPositionsTag>
|
|
||||||
> VertexAttribs;
|
|
||||||
|
|
||||||
|
|
||||||
/// Returns element indices that are used with the drawing instructions
|
|
||||||
const IndexArray & Indices(Default = Default()) const {
|
|
||||||
return _idx_data;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the instructions for rendering of faces
|
|
||||||
DrawingInstructions Instructions(PrimitiveType primitive) const {
|
|
||||||
DrawingInstructions instr = MakeInstructions();
|
|
||||||
DrawOperation operation;
|
|
||||||
operation.method = DrawOperation::Method::DrawElements;
|
|
||||||
operation.mode = primitive;
|
|
||||||
operation.first = 0;
|
|
||||||
operation.count = _prim_count * 3;
|
|
||||||
operation.restart_index = DrawOperation::NoRestartIndex();
|
|
||||||
operation.phase = 0;
|
|
||||||
AddInstruction(instr, operation);
|
|
||||||
return instr;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the instructions for rendering of faces
|
|
||||||
DrawingInstructions Instructions(Default = Default()) const {
|
|
||||||
return Instructions(PrimitiveType::Lines);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ShapeWrapperPtr loadLaser(const ProgramPtr& program) {
|
|
||||||
return std::make_shared<shapes::ShapeWrapper>(shapes::ShapeWrapper("Position", shapes::Laser(), *program));
|
|
||||||
}
|
|
||||||
|
|
||||||
void TextureRecycler::setSize(const uvec2& size) {
|
|
||||||
if (size == _size) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
_size = size;
|
|
||||||
while (!_readyTextures.empty()) {
|
|
||||||
_readyTextures.pop();
|
|
||||||
}
|
|
||||||
std::set<Map::key_type> toDelete;
|
|
||||||
std::for_each(_allTextures.begin(), _allTextures.end(), [&](Map::const_reference item) {
|
|
||||||
if (!item.second._active && item.second._size != _size) {
|
|
||||||
toDelete.insert(item.first);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
std::for_each(toDelete.begin(), toDelete.end(), [&](Map::key_type key) {
|
|
||||||
_allTextures.erase(key);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
void TextureRecycler::clear() {
|
|
||||||
while (!_readyTextures.empty()) {
|
|
||||||
_readyTextures.pop();
|
|
||||||
}
|
|
||||||
_allTextures.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
TexturePtr TextureRecycler::getNextTexture() {
|
|
||||||
using namespace oglplus;
|
|
||||||
if (_readyTextures.empty()) {
|
|
||||||
TexturePtr newTexture(new Texture());
|
|
||||||
|
|
||||||
if (_useMipmaps) {
|
|
||||||
Context::Bound(oglplus::Texture::Target::_2D, *newTexture)
|
|
||||||
.MinFilter(TextureMinFilter::LinearMipmapLinear)
|
|
||||||
.MagFilter(TextureMagFilter::Linear)
|
|
||||||
.WrapS(TextureWrap::ClampToEdge)
|
|
||||||
.WrapT(TextureWrap::ClampToEdge)
|
|
||||||
.Anisotropy(8.0f)
|
|
||||||
.LODBias(-0.2f)
|
|
||||||
.Image2D(0, PixelDataInternalFormat::RGBA8,
|
|
||||||
_size.x, _size.y,
|
|
||||||
0, PixelDataFormat::RGB, PixelDataType::UnsignedByte, nullptr);
|
|
||||||
} else {
|
|
||||||
Context::Bound(oglplus::Texture::Target::_2D, *newTexture)
|
|
||||||
.MinFilter(TextureMinFilter::Linear)
|
|
||||||
.MagFilter(TextureMagFilter::Linear)
|
|
||||||
.WrapS(TextureWrap::ClampToEdge)
|
|
||||||
.WrapT(TextureWrap::ClampToEdge)
|
|
||||||
.Image2D(0, PixelDataInternalFormat::RGBA8,
|
|
||||||
_size.x, _size.y,
|
|
||||||
0, PixelDataFormat::RGB, PixelDataType::UnsignedByte, nullptr);
|
|
||||||
}
|
|
||||||
GLuint texId = GetName(*newTexture);
|
|
||||||
_allTextures[texId] = TexInfo{ newTexture, _size };
|
|
||||||
_readyTextures.push(newTexture);
|
|
||||||
}
|
|
||||||
|
|
||||||
TexturePtr result = _readyTextures.front();
|
|
||||||
_readyTextures.pop();
|
|
||||||
|
|
||||||
GLuint texId = GetName(*result);
|
|
||||||
auto& item = _allTextures[texId];
|
|
||||||
item._active = true;
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
void TextureRecycler::recycleTexture(GLuint texture) {
|
|
||||||
Q_ASSERT(_allTextures.count(texture));
|
|
||||||
auto& item = _allTextures[texture];
|
|
||||||
Q_ASSERT(item._active);
|
|
||||||
item._active = false;
|
|
||||||
if (item._size != _size) {
|
|
||||||
// Buh-bye
|
|
||||||
_allTextures.erase(texture);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
_readyTextures.push(item._tex);
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,217 +0,0 @@
|
||||||
//
|
|
||||||
// Created by Bradley Austin Davis on 2015/05/26
|
|
||||||
// Copyright 2015 High Fidelity, Inc.
|
|
||||||
//
|
|
||||||
// Distributed under the Apache License, Version 2.0.
|
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
|
||||||
//
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
// FIXME support oglplus on all platforms
|
|
||||||
// For now it's a convenient helper for Windows
|
|
||||||
|
|
||||||
#include <queue>
|
|
||||||
#include <map>
|
|
||||||
|
|
||||||
|
|
||||||
#include <QtGlobal>
|
|
||||||
|
|
||||||
#include "GLMHelpers.h"
|
|
||||||
|
|
||||||
#define OGLPLUS_USE_GLCOREARB_H 0
|
|
||||||
#define OGLPLUS_USE_GLEW 1
|
|
||||||
#define OGLPLUS_USE_BOOST_CONFIG 1
|
|
||||||
#define OGLPLUS_NO_SITE_CONFIG 1
|
|
||||||
#define OGLPLUS_LOW_PROFILE 1
|
|
||||||
|
|
||||||
// NOTE: oglplus does some naked "#pragma GCC" without proper platform wrapping, so we need to disable this warning.
|
|
||||||
#ifdef _WIN32
|
|
||||||
#pragma warning(push)
|
|
||||||
#pragma warning( disable : 4068 )
|
|
||||||
#elif defined(Q_OS_MAC)
|
|
||||||
#pragma clang diagnostic push
|
|
||||||
#pragma clang diagnostic ignored "-Wpessimizing-move"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if defined(__GNUC__) && !defined(__clang__)
|
|
||||||
#pragma GCC diagnostic push
|
|
||||||
#if __GNUC__ >= 5 && __GNUC_MINOR__ >= 1
|
|
||||||
#pragma GCC diagnostic ignored "-Wsuggest-override"
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <oglplus/gl.hpp>
|
|
||||||
|
|
||||||
#include <oglplus/all.hpp>
|
|
||||||
#include <oglplus/interop/glm.hpp>
|
|
||||||
#include <oglplus/bound/texture.hpp>
|
|
||||||
#include <oglplus/bound/framebuffer.hpp>
|
|
||||||
#include <oglplus/bound/renderbuffer.hpp>
|
|
||||||
#include <oglplus/shapes/wrapper.hpp>
|
|
||||||
|
|
||||||
#if defined(__GNUC__) && !defined(__clang__)
|
|
||||||
#pragma GCC diagnostic pop
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef _WIN32
|
|
||||||
#pragma warning(pop)
|
|
||||||
#elif defined(Q_OS_MAC)
|
|
||||||
#pragma clang diagnostic pop
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include "NumericalConstants.h"
|
|
||||||
|
|
||||||
using FramebufferPtr = std::shared_ptr<oglplus::Framebuffer>;
|
|
||||||
using RenderbufferPtr = std::shared_ptr<oglplus::Renderbuffer>;
|
|
||||||
using TexturePtr = std::shared_ptr<oglplus::Texture>;
|
|
||||||
using ShapeWrapperPtr = std::shared_ptr<oglplus::shapes::ShapeWrapper>;
|
|
||||||
using BufferPtr = std::shared_ptr<oglplus::Buffer>;
|
|
||||||
using VertexArrayPtr = std::shared_ptr<oglplus::VertexArray>;
|
|
||||||
using ProgramPtr = std::shared_ptr<oglplus::Program>;
|
|
||||||
using Mat4Uniform = oglplus::Uniform<mat4>;
|
|
||||||
|
|
||||||
ProgramPtr loadDefaultShader();
|
|
||||||
ProgramPtr loadCubemapShader();
|
|
||||||
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& fs);
|
|
||||||
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& gs, const std::string& fs);
|
|
||||||
|
|
||||||
ShapeWrapperPtr loadSkybox(ProgramPtr program);
|
|
||||||
ShapeWrapperPtr loadPlane(ProgramPtr program, float aspect = 1.0f);
|
|
||||||
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov = PI / 3.0f * 2.0f, float aspect = 16.0f / 9.0f, int slices = 128, int stacks = 128);
|
|
||||||
ShapeWrapperPtr loadLaser(const ProgramPtr& program);
|
|
||||||
|
|
||||||
|
|
||||||
// A basic wrapper for constructing a framebuffer with a renderbuffer
|
|
||||||
// for the depth attachment and an undefined type for the color attachement
|
|
||||||
// This allows us to reuse the basic framebuffer code for both the Mirror
|
|
||||||
// FBO as well as the Oculus swap textures we will use to render the scene
|
|
||||||
// Though we don't really need depth at all for the mirror FBO, or even an
|
|
||||||
// FBO, but using one means I can just use a glBlitFramebuffer to get it onto
|
|
||||||
// the screen.
|
|
||||||
template <
|
|
||||||
typename C,
|
|
||||||
typename D
|
|
||||||
>
|
|
||||||
struct FramebufferWrapper {
|
|
||||||
uvec2 size;
|
|
||||||
oglplus::Framebuffer fbo;
|
|
||||||
C color;
|
|
||||||
D depth;
|
|
||||||
|
|
||||||
FramebufferWrapper() {}
|
|
||||||
|
|
||||||
virtual ~FramebufferWrapper() {
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void Init(const uvec2 & size) {
|
|
||||||
this->size = size;
|
|
||||||
initColor();
|
|
||||||
initDepth();
|
|
||||||
initDone();
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename F>
|
|
||||||
void Bound(F f) {
|
|
||||||
Bound(oglplus::Framebuffer::Target::Draw, f);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename F>
|
|
||||||
void Bound(oglplus::Framebuffer::Target target , F f) {
|
|
||||||
fbo.Bind(target);
|
|
||||||
onBind(target);
|
|
||||||
f();
|
|
||||||
onUnbind(target);
|
|
||||||
oglplus::DefaultFramebuffer().Bind(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Viewport() {
|
|
||||||
oglplus::Context::Viewport(size.x, size.y);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected:
|
|
||||||
virtual void onBind(oglplus::Framebuffer::Target target) {}
|
|
||||||
virtual void onUnbind(oglplus::Framebuffer::Target target) {}
|
|
||||||
|
|
||||||
static GLenum toEnum(oglplus::Framebuffer::Target target) {
|
|
||||||
switch (target) {
|
|
||||||
case oglplus::Framebuffer::Target::Draw:
|
|
||||||
return GL_DRAW_FRAMEBUFFER;
|
|
||||||
case oglplus::Framebuffer::Target::Read:
|
|
||||||
return GL_READ_FRAMEBUFFER;
|
|
||||||
default:
|
|
||||||
Q_ASSERT(false);
|
|
||||||
return GL_FRAMEBUFFER;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void initDepth() {}
|
|
||||||
|
|
||||||
virtual void initColor() {}
|
|
||||||
|
|
||||||
virtual void initDone() = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct BasicFramebufferWrapper : public FramebufferWrapper <oglplus::Texture, oglplus::Renderbuffer> {
|
|
||||||
protected:
|
|
||||||
virtual void initDepth() override {
|
|
||||||
using namespace oglplus;
|
|
||||||
Context::Bound(Renderbuffer::Target::Renderbuffer, depth)
|
|
||||||
.Storage(
|
|
||||||
PixelDataInternalFormat::DepthComponent,
|
|
||||||
size.x, size.y);
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void initColor() override {
|
|
||||||
using namespace oglplus;
|
|
||||||
Context::Bound(oglplus::Texture::Target::_2D, color)
|
|
||||||
.MinFilter(TextureMinFilter::Linear)
|
|
||||||
.MagFilter(TextureMagFilter::Linear)
|
|
||||||
.WrapS(TextureWrap::ClampToEdge)
|
|
||||||
.WrapT(TextureWrap::ClampToEdge)
|
|
||||||
.Image2D(
|
|
||||||
0, PixelDataInternalFormat::RGBA8,
|
|
||||||
size.x, size.y,
|
|
||||||
0, PixelDataFormat::RGB, PixelDataType::UnsignedByte, nullptr
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void initDone() override {
|
|
||||||
using namespace oglplus;
|
|
||||||
static const Framebuffer::Target target = Framebuffer::Target::Draw;
|
|
||||||
Bound(target, [&] {
|
|
||||||
fbo.AttachTexture(target, FramebufferAttachment::Color, color, 0);
|
|
||||||
fbo.AttachRenderbuffer(target, FramebufferAttachment::Depth, depth);
|
|
||||||
fbo.Complete(target);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
using BasicFramebufferWrapperPtr = std::shared_ptr<BasicFramebufferWrapper>;
|
|
||||||
|
|
||||||
class TextureRecycler {
|
|
||||||
public:
|
|
||||||
TextureRecycler(bool useMipmaps) : _useMipmaps(useMipmaps) {}
|
|
||||||
void setSize(const uvec2& size);
|
|
||||||
void clear();
|
|
||||||
TexturePtr getNextTexture();
|
|
||||||
void recycleTexture(GLuint texture);
|
|
||||||
|
|
||||||
private:
|
|
||||||
|
|
||||||
struct TexInfo {
|
|
||||||
TexturePtr _tex;
|
|
||||||
uvec2 _size;
|
|
||||||
bool _active{ false };
|
|
||||||
|
|
||||||
TexInfo() {}
|
|
||||||
TexInfo(TexturePtr tex, const uvec2& size) : _tex(tex), _size(size) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
using Map = std::map<GLuint, TexInfo>;
|
|
||||||
using Queue = std::queue<TexturePtr>;
|
|
||||||
|
|
||||||
Map _allTextures;
|
|
||||||
Queue _readyTextures;
|
|
||||||
uvec2 _size{ 1920, 1080 };
|
|
||||||
bool _useMipmaps;
|
|
||||||
};
|
|
83
libraries/gl/src/gl/TextureRecycler.cpp
Normal file
83
libraries/gl/src/gl/TextureRecycler.cpp
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
//
|
||||||
|
// Created by Bradley Austin Davis on 2016-10-05
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
#include "TextureRecycler.h"
|
||||||
|
#include "Config.h"
|
||||||
|
|
||||||
|
#include <set>
|
||||||
|
|
||||||
|
|
||||||
|
void TextureRecycler::setSize(const uvec2& size) {
|
||||||
|
if (size == _size) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
_size = size;
|
||||||
|
while (!_readyTextures.empty()) {
|
||||||
|
_readyTextures.pop();
|
||||||
|
}
|
||||||
|
std::set<Map::key_type> toDelete;
|
||||||
|
std::for_each(_allTextures.begin(), _allTextures.end(), [&](Map::const_reference item) {
|
||||||
|
if (!item.second._active && item.second._size != _size) {
|
||||||
|
toDelete.insert(item.first);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
std::for_each(toDelete.begin(), toDelete.end(), [&](Map::key_type key) {
|
||||||
|
_allTextures.erase(key);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void TextureRecycler::clear() {
|
||||||
|
while (!_readyTextures.empty()) {
|
||||||
|
_readyTextures.pop();
|
||||||
|
}
|
||||||
|
_allTextures.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t TextureRecycler::getNextTexture() {
|
||||||
|
if (_readyTextures.empty()) {
|
||||||
|
uint32_t newTexture;
|
||||||
|
glGenTextures(1, &newTexture);
|
||||||
|
glBindTexture(GL_TEXTURE_2D, newTexture);
|
||||||
|
if (_useMipmaps) {
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
|
||||||
|
} else {
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||||
|
}
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, 8.0f);
|
||||||
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -0.2f);
|
||||||
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, 8.0f);
|
||||||
|
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, _size.x, _size.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
|
||||||
|
_allTextures.emplace(std::piecewise_construct, std::forward_as_tuple(newTexture), std::forward_as_tuple(newTexture, _size));
|
||||||
|
_readyTextures.push(newTexture);
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t result = _readyTextures.front();
|
||||||
|
_readyTextures.pop();
|
||||||
|
auto& item = _allTextures[result];
|
||||||
|
item._active = true;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
void TextureRecycler::recycleTexture(GLuint texture) {
|
||||||
|
Q_ASSERT(_allTextures.count(texture));
|
||||||
|
auto& item = _allTextures[texture];
|
||||||
|
Q_ASSERT(item._active);
|
||||||
|
item._active = false;
|
||||||
|
if (item._size != _size) {
|
||||||
|
// Buh-bye
|
||||||
|
_allTextures.erase(texture);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
_readyTextures.push(item._tex);
|
||||||
|
}
|
||||||
|
|
47
libraries/gl/src/gl/TextureRecycler.h
Normal file
47
libraries/gl/src/gl/TextureRecycler.h
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
//
|
||||||
|
// Created by Bradley Austin Davis on 2015-04-04
|
||||||
|
// Copyright 2015 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
#pragma once
|
||||||
|
#ifndef hifi_TextureRecycler_h
|
||||||
|
#define hifi_TextureRecycler_h
|
||||||
|
|
||||||
|
#include <atomic>
|
||||||
|
#include <queue>
|
||||||
|
#include <map>
|
||||||
|
|
||||||
|
#include <GLMHelpers.h>
|
||||||
|
|
||||||
|
class TextureRecycler {
|
||||||
|
public:
|
||||||
|
TextureRecycler(bool useMipmaps) : _useMipmaps(useMipmaps) {}
|
||||||
|
void setSize(const uvec2& size);
|
||||||
|
void clear();
|
||||||
|
uint32_t getNextTexture();
|
||||||
|
void recycleTexture(uint32_t texture);
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
struct TexInfo {
|
||||||
|
const uint32_t _tex{ 0 };
|
||||||
|
const uvec2 _size;
|
||||||
|
bool _active { false };
|
||||||
|
|
||||||
|
TexInfo() {}
|
||||||
|
TexInfo(uint32_t tex, const uvec2& size) : _tex(tex), _size(size) {}
|
||||||
|
TexInfo(const TexInfo& other) : _tex(other._tex), _size(other._size) {}
|
||||||
|
};
|
||||||
|
|
||||||
|
using Map = std::map<uint32_t, TexInfo>;
|
||||||
|
using Queue = std::queue<uint32_t>;
|
||||||
|
|
||||||
|
Map _allTextures;
|
||||||
|
Queue _readyTextures;
|
||||||
|
uvec2 _size{ 1920, 1080 };
|
||||||
|
bool _useMipmaps;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif
|
|
@ -119,8 +119,6 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
||||||
(&::gpu::gl::GLBackend::do_startNamedCall),
|
(&::gpu::gl::GLBackend::do_startNamedCall),
|
||||||
(&::gpu::gl::GLBackend::do_stopNamedCall),
|
(&::gpu::gl::GLBackend::do_stopNamedCall),
|
||||||
|
|
||||||
(&::gpu::gl::GLBackend::do_glActiveBindTexture),
|
|
||||||
|
|
||||||
(&::gpu::gl::GLBackend::do_glUniform1i),
|
(&::gpu::gl::GLBackend::do_glUniform1i),
|
||||||
(&::gpu::gl::GLBackend::do_glUniform1f),
|
(&::gpu::gl::GLBackend::do_glUniform1f),
|
||||||
(&::gpu::gl::GLBackend::do_glUniform2f),
|
(&::gpu::gl::GLBackend::do_glUniform2f),
|
||||||
|
@ -388,14 +386,6 @@ void GLBackend::do_popProfileRange(const Batch& batch, size_t paramOffset) {
|
||||||
// As long as we don;t use several versions of shaders we can avoid this more complex code path
|
// As long as we don;t use several versions of shaders we can avoid this more complex code path
|
||||||
// #define GET_UNIFORM_LOCATION(shaderUniformLoc) _pipeline._programShader->getUniformLocation(shaderUniformLoc, isStereo());
|
// #define GET_UNIFORM_LOCATION(shaderUniformLoc) _pipeline._programShader->getUniformLocation(shaderUniformLoc, isStereo());
|
||||||
#define GET_UNIFORM_LOCATION(shaderUniformLoc) shaderUniformLoc
|
#define GET_UNIFORM_LOCATION(shaderUniformLoc) shaderUniformLoc
|
||||||
void GLBackend::do_glActiveBindTexture(const Batch& batch, size_t paramOffset) {
|
|
||||||
glActiveTexture(batch._params[paramOffset + 2]._uint);
|
|
||||||
glBindTexture(
|
|
||||||
GET_UNIFORM_LOCATION(batch._params[paramOffset + 1]._uint),
|
|
||||||
batch._params[paramOffset + 0]._uint);
|
|
||||||
|
|
||||||
(void)CHECK_GL_ERROR();
|
|
||||||
}
|
|
||||||
|
|
||||||
void GLBackend::do_glUniform1i(const Batch& batch, size_t paramOffset) {
|
void GLBackend::do_glUniform1i(const Batch& batch, size_t paramOffset) {
|
||||||
if (_pipeline._program == 0) {
|
if (_pipeline._program == 0) {
|
||||||
|
@ -568,6 +558,11 @@ void GLBackend::releaseBuffer(GLuint id, Size size) const {
|
||||||
_buffersTrash.push_back({ id, size });
|
_buffersTrash.push_back({ id, size });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void GLBackend::releaseExternalTexture(GLuint id, const Texture::ExternalRecycler& recycler) const {
|
||||||
|
Lock lock(_trashMutex);
|
||||||
|
_externalTexturesTrash.push_back({ id, recycler });
|
||||||
|
}
|
||||||
|
|
||||||
void GLBackend::releaseTexture(GLuint id, Size size) const {
|
void GLBackend::releaseTexture(GLuint id, Size size) const {
|
||||||
Lock lock(_trashMutex);
|
Lock lock(_trashMutex);
|
||||||
_texturesTrash.push_back({ id, size });
|
_texturesTrash.push_back({ id, size });
|
||||||
|
@ -662,6 +657,19 @@ void GLBackend::recycle() const {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::list<std::pair<GLuint, Texture::ExternalRecycler>> externalTexturesTrash;
|
||||||
|
{
|
||||||
|
Lock lock(_trashMutex);
|
||||||
|
std::swap(_externalTexturesTrash, externalTexturesTrash);
|
||||||
|
}
|
||||||
|
for (auto pair : externalTexturesTrash) {
|
||||||
|
auto fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||||
|
pair.second(pair.first, fence);
|
||||||
|
decrementTextureGPUCount();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
std::list<GLuint> programsTrash;
|
std::list<GLuint> programsTrash;
|
||||||
{
|
{
|
||||||
|
|
|
@ -130,8 +130,6 @@ public:
|
||||||
// TODO: As long as we have gl calls explicitely issued from interface
|
// TODO: As long as we have gl calls explicitely issued from interface
|
||||||
// code, we need to be able to record and batch these calls. THe long
|
// code, we need to be able to record and batch these calls. THe long
|
||||||
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
|
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
|
||||||
virtual void do_glActiveBindTexture(const Batch& batch, size_t paramOffset) final;
|
|
||||||
|
|
||||||
virtual void do_glUniform1i(const Batch& batch, size_t paramOffset) final;
|
virtual void do_glUniform1i(const Batch& batch, size_t paramOffset) final;
|
||||||
virtual void do_glUniform1f(const Batch& batch, size_t paramOffset) final;
|
virtual void do_glUniform1f(const Batch& batch, size_t paramOffset) final;
|
||||||
virtual void do_glUniform2f(const Batch& batch, size_t paramOffset) final;
|
virtual void do_glUniform2f(const Batch& batch, size_t paramOffset) final;
|
||||||
|
@ -170,6 +168,7 @@ public:
|
||||||
virtual bool isTextureReady(const TexturePointer& texture);
|
virtual bool isTextureReady(const TexturePointer& texture);
|
||||||
|
|
||||||
virtual void releaseBuffer(GLuint id, Size size) const;
|
virtual void releaseBuffer(GLuint id, Size size) const;
|
||||||
|
virtual void releaseExternalTexture(GLuint id, const Texture::ExternalRecycler& recycler) const;
|
||||||
virtual void releaseTexture(GLuint id, Size size) const;
|
virtual void releaseTexture(GLuint id, Size size) const;
|
||||||
virtual void releaseFramebuffer(GLuint id) const;
|
virtual void releaseFramebuffer(GLuint id) const;
|
||||||
virtual void releaseShader(GLuint id) const;
|
virtual void releaseShader(GLuint id) const;
|
||||||
|
@ -194,6 +193,7 @@ protected:
|
||||||
mutable Mutex _trashMutex;
|
mutable Mutex _trashMutex;
|
||||||
mutable std::list<std::pair<GLuint, Size>> _buffersTrash;
|
mutable std::list<std::pair<GLuint, Size>> _buffersTrash;
|
||||||
mutable std::list<std::pair<GLuint, Size>> _texturesTrash;
|
mutable std::list<std::pair<GLuint, Size>> _texturesTrash;
|
||||||
|
mutable std::list<std::pair<GLuint, Texture::ExternalRecycler>> _externalTexturesTrash;
|
||||||
mutable std::list<GLuint> _framebuffersTrash;
|
mutable std::list<GLuint> _framebuffersTrash;
|
||||||
mutable std::list<GLuint> _shadersTrash;
|
mutable std::list<GLuint> _shadersTrash;
|
||||||
mutable std::list<GLuint> _programsTrash;
|
mutable std::list<GLuint> _programsTrash;
|
||||||
|
|
|
@ -6,6 +6,8 @@
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
//
|
//
|
||||||
#include "GLShader.h"
|
#include "GLShader.h"
|
||||||
|
#include <gl/GLShaders.h>
|
||||||
|
|
||||||
#include "GLBackend.h"
|
#include "GLBackend.h"
|
||||||
|
|
||||||
using namespace gpu;
|
using namespace gpu;
|
||||||
|
@ -68,7 +70,11 @@ GLShader* compileBackendShader(GLBackend& backend, const Shader& shader) {
|
||||||
|
|
||||||
std::string shaderDefines = glslVersion + "\n" + DOMAIN_DEFINES[shader.getType()] + "\n" + VERSION_DEFINES[version];
|
std::string shaderDefines = glslVersion + "\n" + DOMAIN_DEFINES[shader.getType()] + "\n" + VERSION_DEFINES[version];
|
||||||
|
|
||||||
bool result = compileShader(shaderDomain, shaderSource, shaderDefines, shaderObject.glshader, shaderObject.glprogram);
|
#ifdef SEPARATE_PROGRAM
|
||||||
|
bool result = ::gl::compileShader(shaderDomain, shaderSource, shaderDefines, shaderObject.glshader, shaderObject.glprogram);
|
||||||
|
#else
|
||||||
|
bool result = ::gl::compileShader(shaderDomain, shaderSource, shaderDefines, shaderObject.glshader);
|
||||||
|
#endif
|
||||||
if (!result) {
|
if (!result) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
@ -103,7 +109,7 @@ GLShader* compileBackendProgram(GLBackend& backend, const Shader& program) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
GLuint glprogram = compileProgram(shaderGLObjects);
|
GLuint glprogram = ::gl::compileProgram(shaderGLObjects);
|
||||||
if (glprogram == 0) {
|
if (glprogram == 0) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
|
@ -692,187 +692,6 @@ int makeOutputSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Sh
|
||||||
return 0; //inputsCount;
|
return 0; //inputsCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool compileShader(GLenum shaderDomain, const std::string& shaderSource, const std::string& defines, GLuint &shaderObject, GLuint &programObject) {
|
|
||||||
if (shaderSource.empty()) {
|
|
||||||
qCDebug(gpugllogging) << "GLShader::compileShader - no GLSL shader source code ? so failed to create";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the shader object
|
|
||||||
GLuint glshader = glCreateShader(shaderDomain);
|
|
||||||
if (!glshader) {
|
|
||||||
qCDebug(gpugllogging) << "GLShader::compileShader - failed to create the gl shader object";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assign the source
|
|
||||||
const int NUM_SOURCE_STRINGS = 2;
|
|
||||||
const GLchar* srcstr[] = { defines.c_str(), shaderSource.c_str() };
|
|
||||||
glShaderSource(glshader, NUM_SOURCE_STRINGS, srcstr, NULL);
|
|
||||||
|
|
||||||
// Compile !
|
|
||||||
glCompileShader(glshader);
|
|
||||||
|
|
||||||
// check if shader compiled
|
|
||||||
GLint compiled = 0;
|
|
||||||
glGetShaderiv(glshader, GL_COMPILE_STATUS, &compiled);
|
|
||||||
|
|
||||||
// if compilation fails
|
|
||||||
if (!compiled) {
|
|
||||||
|
|
||||||
// save the source code to a temp file so we can debug easily
|
|
||||||
/*
|
|
||||||
std::ofstream filestream;
|
|
||||||
filestream.open("debugshader.glsl");
|
|
||||||
if (filestream.is_open()) {
|
|
||||||
filestream << srcstr[0];
|
|
||||||
filestream << srcstr[1];
|
|
||||||
filestream.close();
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
GLint infoLength = 0;
|
|
||||||
glGetShaderiv(glshader, GL_INFO_LOG_LENGTH, &infoLength);
|
|
||||||
|
|
||||||
char* temp = new char[infoLength];
|
|
||||||
glGetShaderInfoLog(glshader, infoLength, NULL, temp);
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
filestream.open("debugshader.glsl.info.txt");
|
|
||||||
if (filestream.is_open()) {
|
|
||||||
filestream << std::string(temp);
|
|
||||||
filestream.close();
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
qCWarning(gpugllogging) << "GLShader::compileShader - failed to compile the gl shader object:";
|
|
||||||
for (auto s : srcstr) {
|
|
||||||
qCWarning(gpugllogging) << s;
|
|
||||||
}
|
|
||||||
qCWarning(gpugllogging) << "GLShader::compileShader - errors:";
|
|
||||||
qCWarning(gpugllogging) << temp;
|
|
||||||
delete[] temp;
|
|
||||||
|
|
||||||
glDeleteShader(glshader);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
GLuint glprogram = 0;
|
|
||||||
#ifdef SEPARATE_PROGRAM
|
|
||||||
// so far so good, program is almost done, need to link:
|
|
||||||
GLuint glprogram = glCreateProgram();
|
|
||||||
if (!glprogram) {
|
|
||||||
qCDebug(gpugllogging) << "GLShader::compileShader - failed to create the gl shader & gl program object";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
glProgramParameteri(glprogram, GL_PROGRAM_SEPARABLE, GL_TRUE);
|
|
||||||
glAttachShader(glprogram, glshader);
|
|
||||||
glLinkProgram(glprogram);
|
|
||||||
|
|
||||||
GLint linked = 0;
|
|
||||||
glGetProgramiv(glprogram, GL_LINK_STATUS, &linked);
|
|
||||||
|
|
||||||
if (!linked) {
|
|
||||||
/*
|
|
||||||
// save the source code to a temp file so we can debug easily
|
|
||||||
std::ofstream filestream;
|
|
||||||
filestream.open("debugshader.glsl");
|
|
||||||
if (filestream.is_open()) {
|
|
||||||
filestream << shaderSource->source;
|
|
||||||
filestream.close();
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
GLint infoLength = 0;
|
|
||||||
glGetProgramiv(glprogram, GL_INFO_LOG_LENGTH, &infoLength);
|
|
||||||
|
|
||||||
char* temp = new char[infoLength];
|
|
||||||
glGetProgramInfoLog(glprogram, infoLength, NULL, temp);
|
|
||||||
|
|
||||||
qCDebug(gpugllogging) << "GLShader::compileShader - failed to LINK the gl program object :";
|
|
||||||
qCDebug(gpugllogging) << temp;
|
|
||||||
|
|
||||||
/*
|
|
||||||
filestream.open("debugshader.glsl.info.txt");
|
|
||||||
if (filestream.is_open()) {
|
|
||||||
filestream << String(temp);
|
|
||||||
filestream.close();
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
delete[] temp;
|
|
||||||
|
|
||||||
glDeleteShader(glshader);
|
|
||||||
glDeleteProgram(glprogram);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
shaderObject = glshader;
|
|
||||||
programObject = glprogram;
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
GLuint compileProgram(const std::vector<GLuint>& glshaders) {
|
|
||||||
// A brand new program:
|
|
||||||
GLuint glprogram = glCreateProgram();
|
|
||||||
if (!glprogram) {
|
|
||||||
qCDebug(gpugllogging) << "GLShader::compileProgram - failed to create the gl program object";
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// glProgramParameteri(glprogram, GL_PROGRAM_, GL_TRUE);
|
|
||||||
// Create the program from the sub shaders
|
|
||||||
for (auto so : glshaders) {
|
|
||||||
glAttachShader(glprogram, so);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Link!
|
|
||||||
glLinkProgram(glprogram);
|
|
||||||
|
|
||||||
GLint linked = 0;
|
|
||||||
glGetProgramiv(glprogram, GL_LINK_STATUS, &linked);
|
|
||||||
|
|
||||||
if (!linked) {
|
|
||||||
/*
|
|
||||||
// save the source code to a temp file so we can debug easily
|
|
||||||
std::ofstream filestream;
|
|
||||||
filestream.open("debugshader.glsl");
|
|
||||||
if (filestream.is_open()) {
|
|
||||||
filestream << shaderSource->source;
|
|
||||||
filestream.close();
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
GLint infoLength = 0;
|
|
||||||
glGetProgramiv(glprogram, GL_INFO_LOG_LENGTH, &infoLength);
|
|
||||||
|
|
||||||
char* temp = new char[infoLength];
|
|
||||||
glGetProgramInfoLog(glprogram, infoLength, NULL, temp);
|
|
||||||
|
|
||||||
qCDebug(gpugllogging) << "GLShader::compileProgram - failed to LINK the gl program object :";
|
|
||||||
qCDebug(gpugllogging) << temp;
|
|
||||||
|
|
||||||
/*
|
|
||||||
filestream.open("debugshader.glsl.info.txt");
|
|
||||||
if (filestream.is_open()) {
|
|
||||||
filestream << std::string(temp);
|
|
||||||
filestream.close();
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
delete[] temp;
|
|
||||||
|
|
||||||
glDeleteProgram(glprogram);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
return glprogram;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void makeProgramBindings(ShaderObject& shaderObject) {
|
void makeProgramBindings(ShaderObject& shaderObject) {
|
||||||
if (!shaderObject.glprogram) {
|
if (!shaderObject.glprogram) {
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -44,8 +44,6 @@ int makeUniformSlots(GLuint glprogram, const Shader::BindingSet& slotBindings,
|
||||||
int makeUniformBlockSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Shader::SlotSet& buffers);
|
int makeUniformBlockSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Shader::SlotSet& buffers);
|
||||||
int makeInputSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Shader::SlotSet& inputs);
|
int makeInputSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Shader::SlotSet& inputs);
|
||||||
int makeOutputSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Shader::SlotSet& outputs);
|
int makeOutputSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Shader::SlotSet& outputs);
|
||||||
bool compileShader(GLenum shaderDomain, const std::string& shaderSource, const std::string& defines, GLuint &shaderObject, GLuint &programObject);
|
|
||||||
GLuint compileProgram(const std::vector<GLuint>& glshaders);
|
|
||||||
void makeProgramBindings(ShaderObject& shaderObject);
|
void makeProgramBindings(ShaderObject& shaderObject);
|
||||||
|
|
||||||
enum GLSyncState {
|
enum GLSyncState {
|
||||||
|
|
|
@ -136,6 +136,7 @@ float GLTexture::getMemoryPressure() {
|
||||||
// Create the texture and allocate storage
|
// Create the texture and allocate storage
|
||||||
GLTexture::GLTexture(const std::weak_ptr<GLBackend>& backend, const Texture& texture, GLuint id, bool transferrable) :
|
GLTexture::GLTexture(const std::weak_ptr<GLBackend>& backend, const Texture& texture, GLuint id, bool transferrable) :
|
||||||
GLObject(backend, texture, id),
|
GLObject(backend, texture, id),
|
||||||
|
_external(false),
|
||||||
_source(texture.source()),
|
_source(texture.source()),
|
||||||
_storageStamp(texture.getStamp()),
|
_storageStamp(texture.getStamp()),
|
||||||
_target(getGLTextureType(texture)),
|
_target(getGLTextureType(texture)),
|
||||||
|
@ -152,10 +153,41 @@ GLTexture::GLTexture(const std::weak_ptr<GLBackend>& backend, const Texture& tex
|
||||||
Backend::setGPUObject(texture, this);
|
Backend::setGPUObject(texture, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
GLTexture::GLTexture(const std::weak_ptr<GLBackend>& backend, const Texture& texture, GLuint id) :
|
||||||
|
GLObject(backend, texture, id),
|
||||||
|
_external(true),
|
||||||
|
_source(texture.source()),
|
||||||
|
_storageStamp(0),
|
||||||
|
_target(getGLTextureType(texture)),
|
||||||
|
_internalFormat(GL_RGBA8),
|
||||||
|
// FIXME force mips to 0?
|
||||||
|
_maxMip(texture.maxMip()),
|
||||||
|
_minMip(texture.minMip()),
|
||||||
|
_virtualSize(0),
|
||||||
|
_transferrable(false)
|
||||||
|
{
|
||||||
|
Backend::setGPUObject(texture, this);
|
||||||
|
|
||||||
|
// FIXME Is this necessary?
|
||||||
|
//withPreservedTexture([this] {
|
||||||
|
// syncSampler();
|
||||||
|
// if (_gpuObject.isAutogenerateMips()) {
|
||||||
|
// generateMips();
|
||||||
|
// }
|
||||||
|
//});
|
||||||
|
}
|
||||||
|
|
||||||
GLTexture::~GLTexture() {
|
GLTexture::~GLTexture() {
|
||||||
if (_id) {
|
auto backend = _backend.lock();
|
||||||
auto backend = _backend.lock();
|
if (backend) {
|
||||||
if (backend) {
|
if (_external) {
|
||||||
|
auto recycler = _gpuObject.getExternalRecycler();
|
||||||
|
if (recycler) {
|
||||||
|
backend->releaseExternalTexture(_id, recycler);
|
||||||
|
} else {
|
||||||
|
qWarning() << "No recycler available for texture " << _id << " possible leak";
|
||||||
|
}
|
||||||
|
} else if (_id) {
|
||||||
backend->releaseTexture(_id, _size);
|
backend->releaseTexture(_id, _size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue