Merge branch 'master' of https://github.com/highfidelity/hifi into location-cleanup
20
cmake/externals/GifCreator/CMakeLists.txt
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
set(EXTERNAL_NAME GifCreator)
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://hifi-public.s3.amazonaws.com/dependencies/GifCreator.zip
|
||||
URL_MD5 8ac8ef5196f47c658dce784df5ecdb70
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${INSTALL_DIR}/src/${EXTERNAL_NAME} CACHE PATH "List of GifCreator include directories")
|
4
cmake/externals/wasapi/CMakeLists.txt
vendored
|
@ -6,8 +6,8 @@ if (WIN32)
|
|||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi3.zip
|
||||
URL_MD5 1a2433f80a788a54c70f505ff4f43ac1
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/qtaudio_wasapi4.zip
|
||||
URL_MD5 2abde5340a64d387848f12b9536a7e85
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
|
@ -139,7 +139,8 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
set(CLIENT_DESKTOP_SHORTCUT_REG_KEY "ClientDesktopShortcut")
|
||||
set(CONSOLE_DESKTOP_SHORTCUT_REG_KEY "ConsoleDesktopShortcut")
|
||||
set(CONSOLE_STARTUP_REG_KEY "ConsoleStartupShortcut")
|
||||
set(LAUNCH_NOW_REG_KEY "LaunchAfterInstall")
|
||||
set(CLIENT_LAUNCH_NOW_REG_KEY "ClientLaunchAfterInstall")
|
||||
set(SERVER_LAUNCH_NOW_REG_KEY "ServerLaunchAfterInstall")
|
||||
endif ()
|
||||
|
||||
# setup component categories for installer
|
||||
|
|
26
cmake/modules/FindGifCreator.cmake
Normal file
|
@ -0,0 +1,26 @@
|
|||
#
|
||||
# FindGifCreator.cmake
|
||||
#
|
||||
# Try to find GifCreator include path.
|
||||
# Once done this will define
|
||||
#
|
||||
# GIFCREATOR_INCLUDE_DIRS
|
||||
#
|
||||
# Created on 11/15/2016 by Zach Fox
|
||||
# Copyright 2016 High Fidelity, Inc.
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
# setup hints for GifCreator search
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("GIFCREATOR")
|
||||
|
||||
# locate header
|
||||
find_path(GIFCREATOR_INCLUDE_DIRS "GifCreator/GifCreator.h" HINTS ${GIFCREATOR_SEARCH_DIRS})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(GIFCREATOR DEFAULT_MSG GIFCREATOR_INCLUDE_DIRS)
|
||||
|
||||
mark_as_advanced(GIFCREATOR_INCLUDE_DIRS GIFCREATOR_SEARCH_DIRS)
|
|
@ -38,7 +38,8 @@ set(POST_INSTALL_OPTIONS_REG_GROUP "@POST_INSTALL_OPTIONS_REG_GROUP@")
|
|||
set(CLIENT_DESKTOP_SHORTCUT_REG_KEY "@CLIENT_DESKTOP_SHORTCUT_REG_KEY@")
|
||||
set(CONSOLE_DESKTOP_SHORTCUT_REG_KEY "@CONSOLE_DESKTOP_SHORTCUT_REG_KEY@")
|
||||
set(CONSOLE_STARTUP_REG_KEY "@CONSOLE_STARTUP_REG_KEY@")
|
||||
set(LAUNCH_NOW_REG_KEY "@LAUNCH_NOW_REG_KEY@")
|
||||
set(SERVER_LAUNCH_NOW_REG_KEY "@SERVER_LAUNCH_NOW_REG_KEY@")
|
||||
set(CLIENT_LAUNCH_NOW_REG_KEY "@CLIENT_LAUNCH_NOW_REG_KEY@")
|
||||
set(INSTALLER_HEADER_IMAGE "@INSTALLER_HEADER_IMAGE@")
|
||||
set(UNINSTALLER_HEADER_IMAGE "@UNINSTALLER_HEADER_IMAGE@")
|
||||
set(ADD_REMOVE_ICON_PATH "@ADD_REMOVE_ICON_PATH@")
|
||||
|
|
|
@ -135,10 +135,6 @@ Var AR_RegFlags
|
|||
SectionSetFlags ${${SecName}} $AR_SecFlags
|
||||
|
||||
"default_${SecName}:"
|
||||
; The client is always selected by default
|
||||
${If} ${SecName} == @CLIENT_COMPONENT_NAME@
|
||||
SectionSetFlags ${${SecName}} 17
|
||||
${EndIf}
|
||||
|
||||
!insertmacro LoadSectionSelectedIntoVar ${SecName} ${SecName}_selected
|
||||
!macroend
|
||||
|
@ -368,7 +364,8 @@ Var PostInstallDialog
|
|||
Var DesktopClientCheckbox
|
||||
Var DesktopServerCheckbox
|
||||
Var ServerStartupCheckbox
|
||||
Var LaunchNowCheckbox
|
||||
Var LaunchServerNowCheckbox
|
||||
Var LaunchClientNowCheckbox
|
||||
Var CurrentOffset
|
||||
Var OffsetUnits
|
||||
Var CopyFromProductionCheckbox
|
||||
|
@ -431,17 +428,24 @@ Function PostInstallOptionsPage
|
|||
|
||||
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @CONSOLE_HF_SHORTCUT_NAME@ after install"
|
||||
${Else}
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @INTERFACE_HF_SHORTCUT_NAME@ after install"
|
||||
Pop $LaunchServerNowCheckbox
|
||||
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $LaunchServerNowCheckbox @SERVER_LAUNCH_NOW_REG_KEY@ ${BST_CHECKED}
|
||||
|
||||
IntOp $CurrentOffset $CurrentOffset + 15
|
||||
${EndIf}
|
||||
|
||||
Pop $LaunchNowCheckbox
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @INTERFACE_HF_SHORTCUT_NAME@ after install"
|
||||
Pop $LaunchClientNowCheckbox
|
||||
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $LaunchNowCheckbox @LAUNCH_NOW_REG_KEY@ ${BST_CHECKED}
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $LaunchClientNowCheckbox @CLIENT_LAUNCH_NOW_REG_KEY@ ${BST_CHECKED}
|
||||
${EndIf}
|
||||
|
||||
${If} @PR_BUILD@ == 1
|
||||
; a PR build defaults all install options expect LaunchNowCheckbox and the settings copy to unchecked
|
||||
; a PR build defaults all install options expect LaunchServerNowCheckbox, LaunchClientNowCheckbox and the settings copy to unchecked
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
${NSD_SetState} $DesktopClientCheckbox ${BST_UNCHECKED}
|
||||
${EndIf}
|
||||
|
@ -471,7 +475,8 @@ FunctionEnd
|
|||
Var DesktopClientState
|
||||
Var DesktopServerState
|
||||
Var ServerStartupState
|
||||
Var LaunchNowState
|
||||
Var LaunchServerNowState
|
||||
Var LaunchClientNowState
|
||||
Var CopyFromProductionState
|
||||
|
||||
Function ReadPostInstallOptions
|
||||
|
@ -493,8 +498,15 @@ Function ReadPostInstallOptions
|
|||
${NSD_GetState} $CopyFromProductionCheckbox $CopyFromProductionState
|
||||
${EndIf}
|
||||
|
||||
; check if we need to launch an application post-install
|
||||
${NSD_GetState} $LaunchNowCheckbox $LaunchNowState
|
||||
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
|
||||
; check if we need to launch the server post-install
|
||||
${NSD_GetState} $LaunchServerNowCheckbox $LaunchServerNowState
|
||||
${EndIf}
|
||||
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
; check if we need to launch the client post-install
|
||||
${NSD_GetState} $LaunchClientNowCheckbox $LaunchClientNowState
|
||||
${EndIf}
|
||||
FunctionEnd
|
||||
|
||||
Function HandlePostInstallOptions
|
||||
|
@ -565,20 +577,31 @@ Function HandlePostInstallOptions
|
|||
${EndIf}
|
||||
${EndIf}
|
||||
|
||||
${If} $LaunchNowState == ${BST_CHECKED}
|
||||
!insertmacro WritePostInstallOption @LAUNCH_NOW_REG_KEY@ YES
|
||||
${If} $LaunchServerNowState == ${BST_CHECKED}
|
||||
!insertmacro WritePostInstallOption @SERVER_LAUNCH_NOW_REG_KEY@ YES
|
||||
|
||||
; both launches use the explorer trick in case the user has elevated permissions for the installer
|
||||
; it won't be possible to use this approach if either application should be launched with a command line param
|
||||
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
|
||||
${If} $LaunchClientNowState == ${BST_CHECKED}
|
||||
!insertmacro WritePostInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ YES
|
||||
; create shortcut with ARGUMENTS
|
||||
CreateShortCut "$TEMP\SandboxShortcut.lnk" "$INSTDIR\@CONSOLE_INSTALL_SUBDIR@\@CONSOLE_WIN_EXEC_NAME@" "-- --launchInterface"
|
||||
Exec '"$WINDIR\explorer.exe" "$TEMP\SandboxShortcut.lnk"'
|
||||
${Else}
|
||||
Exec '"$WINDIR\explorer.exe" "$INSTDIR\@INTERFACE_WIN_EXEC_NAME@"'
|
||||
!insertmacro WritePostInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ NO
|
||||
Exec '"$WINDIR\explorer.exe" "$INSTDIR\@CONSOLE_INSTALL_SUBDIR@\@CONSOLE_WIN_EXEC_NAME@"'
|
||||
${EndIf}
|
||||
|
||||
${Else}
|
||||
!insertmacro WritePostInstallOption @LAUNCH_NOW_REG_KEY@ NO
|
||||
!insertmacro WritePostInstallOption @SERVER_LAUNCH_NOW_REG_KEY@ NO
|
||||
|
||||
; launch uses the explorer trick in case the user has elevated permissions for the installer
|
||||
${If} $LaunchClientNowState == ${BST_CHECKED}
|
||||
!insertmacro WritePostInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ YES
|
||||
Exec '"$WINDIR\explorer.exe" "$INSTDIR\@INTERFACE_WIN_EXEC_NAME@"'
|
||||
${Else}
|
||||
!insertmacro WritePostInstallOption @CLIENT_LAUNCH_NOW_REG_KEY@ NO
|
||||
${EndIf}
|
||||
|
||||
${EndIf}
|
||||
FunctionEnd
|
||||
|
||||
|
|
|
@ -351,3 +351,7 @@ if (ANDROID)
|
|||
|
||||
qt_create_apk()
|
||||
endif ()
|
||||
|
||||
add_dependency_external_projects(GifCreator)
|
||||
find_package(GifCreator REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PUBLIC ${GIFCREATOR_INCLUDE_DIRS})
|
||||
|
|
|
@ -2,17 +2,17 @@
|
|||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1"
|
||||
id="svg4136" inkscape:version="0.91 r13725" sodipodi:docname="address-bar.svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1440 200"
|
||||
style="enable-background:new 0 0 1440 200;" xml:space="preserve">
|
||||
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 856 100"
|
||||
style="enable-background:new 0 0 856 100;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#1E1E1E;}
|
||||
.st1{fill:#E6E7E8;}
|
||||
.st2{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M1428.6,172H11.5c-6.3,0-11.4-5.1-11.4-11.4v-111c0-6.3,5.1-11.4,11.4-11.4h1417.2c6.3,0,11.4,5.1,11.4,11.4
|
||||
v111C1440,166.9,1434.9,172,1428.6,172z"/>
|
||||
<path class="st1" d="M1428.6,165.8H11.5c-6.3,0-11.4-5.1-11.4-11.4v-111C0.1,37.1,5.2,32,11.5,32h1417.2c6.3,0,11.4,5.1,11.4,11.4
|
||||
v111C1440,160.7,1434.9,165.8,1428.6,165.8z"/>
|
||||
<path class="st2" d="M1429.9,165.8H421.3c-6.3,0-11.5-3.6-11.5-8.1V40.1c0-4.5,5.1-8.1,11.5-8.1h1008.6c6.3,0,11.5,3.7,11.5,8.1
|
||||
v117.6C1441.4,162.1,1436.2,165.8,1429.9,165.8z"/>
|
||||
<path class="st0" d="M849.6,87.1H6.3c-3.2,0-5.7-2.6-5.7-5.7V25.8c0-3.2,2.6-5.7,5.7-5.7h843.3c3.2,0,5.7,2.6,5.7,5.7v55.6
|
||||
C855.3,84.5,852.7,87.1,849.6,87.1z"/>
|
||||
<path class="st1" d="M849.6,84H6.3c-3.2,0-5.7-2.6-5.7-5.7V22.7c0-3.2,2.6-5.7,5.7-5.7h843.3c3.2,0,5.7,2.6,5.7,5.7v55.6
|
||||
C855.3,81.4,852.7,84,849.6,84z"/>
|
||||
<path class="st2" d="M850.2,84H211.6c-3.2,0-5.8-1.8-5.8-4.1V21c0-2.3,2.6-4.1,5.8-4.1h638.7c3.2,0,5.8,1.9,5.8,4.1v58.9
|
||||
C856,82.1,853.4,84,850.2,84z"/>
|
||||
</svg>
|
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 1.3 KiB |
|
@ -1,53 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 32 64" style="enable-background:new 0 0 32 64;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#168DB7;}
|
||||
.st1{fill:#FFFFFF;}
|
||||
.st2{opacity:0.63;fill:#58595B;enable-background:new ;}
|
||||
</style>
|
||||
<circle cx="15.8" cy="48.2" r="14.7"/>
|
||||
<circle class="st0" cx="15.8" cy="47.6" r="14.7"/>
|
||||
<circle cx="16.1" cy="44.9" r="3"/>
|
||||
<path d="M18.2,50.2H14c-1.7,0-3.1,1.5-3.1,3.2V55c1.4,1.1,3.1,1.7,4.9,1.7c2.1,0,4-0.8,5.4-2.1v-1.2C21.3,51.7,19.9,50.2,18.2,50.2z
|
||||
"/>
|
||||
<circle cx="22.9" cy="44.9" r="1.6"/>
|
||||
<path d="M24,47.9h-1.8c-0.4,0-0.8,0.2-1,0.4c0.5,0.3,1,0.7,1.4,1.1c0.5,0.5,0.7,1.2,0.7,1.9c0.1,0.4,0.1,0.8,0.1,1.2v0.3
|
||||
c0.8-1,1.5-2.7,1.8-3.6v-0.1C25.2,48.8,24.6,47.9,24,47.9z"/>
|
||||
<circle cx="9.1" cy="45.2" r="1.6"/>
|
||||
<path d="M8.7,53.2c0-0.3,0-0.6,0-0.9v-0.1c0-0.1,0-0.2,0-0.3s0-0.1,0-0.2c0-0.1,0-0.1,0-0.2c0.1-0.9,0.7-2,1.4-2.5
|
||||
c0.2-0.2,0.4-0.3,0.6-0.4c-0.2-0.2-0.6-0.3-0.9-0.3H8c-0.6,0-1.2,0.8-1.1,1.2v0.1C7.2,50.6,7.9,52.3,8.7,53.2z"/>
|
||||
<circle class="st1" cx="16.1" cy="44.3" r="3"/>
|
||||
<path class="st1" d="M18.2,49.6H14c-1.7,0-3.1,1.5-3.1,3.2v1.6c1.4,1.1,3.1,1.7,4.9,1.7c2.1,0,4-0.8,5.4-2.1v-1.2
|
||||
C21.3,51.1,19.9,49.6,18.2,49.6z"/>
|
||||
<circle class="st1" cx="22.9" cy="44.4" r="1.6"/>
|
||||
<path class="st1" d="M24,47.4h-1.8c-0.4,0-0.8,0.2-1,0.4c0.5,0.3,1,0.7,1.4,1.1c0.5,0.5,0.7,1.2,0.7,1.9c0.1,0.4,0.1,0.8,0.1,1.2
|
||||
v0.3c0.8-1,1.5-2.7,1.8-3.6v-0.1C25.2,48.2,24.6,47.4,24,47.4z"/>
|
||||
<circle class="st1" cx="9.1" cy="44.6" r="1.6"/>
|
||||
<path class="st1" d="M8.7,52.7c0-0.3,0-0.6,0-0.9v-0.1c0-0.1,0-0.2,0-0.3s0-0.1,0-0.2c0-0.1,0-0.1,0-0.2c0.1-0.9,0.7-2,1.4-2.5
|
||||
c0.2-0.2,0.4-0.3,0.6-0.4c-0.2-0.2-0.6-0.3-0.9-0.3H8c-0.6,0-1.2,0.8-1.1,1.2v0.1C7.2,50,7.9,51.7,8.7,52.7z"/>
|
||||
<path d="M15.9,3.4c-7,0-12.7,5.7-12.7,12.7s5.7,12.7,12.7,12.7s12.7-5.7,12.7-12.7S22.9,3.4,15.9,3.4z M15.9,27.2
|
||||
c-6.1,0-11.1-5-11.1-11.1S9.8,5,15.9,5C22,4.9,27,9.9,27,16.1C27,22.2,22,27.2,15.9,27.2z"/>
|
||||
<circle class="st2" cx="15.9" cy="15.5" r="12.4"/>
|
||||
<path class="st1" d="M15.9,2.8c-7,0-12.7,5.7-12.7,12.7s5.7,12.7,12.7,12.7s12.7-5.7,12.7-12.7S22.9,2.8,15.9,2.8z M15.9,26.6
|
||||
c-6.1,0-11.1-5-11.1-11.1s5-11.1,11.1-11.1C22,4.4,27,9.4,27,15.5S22,26.6,15.9,26.6z"/>
|
||||
<circle cx="16.1" cy="12.9" r="3"/>
|
||||
<path d="M18.2,18.2H14c-1.7,0-3.1,1.5-3.1,3.2V23c1.4,1.1,3.1,1.7,4.9,1.7c2.1,0,4-0.8,5.4-2.1v-1.2C21.3,19.7,19.9,18.2,18.2,18.2z
|
||||
"/>
|
||||
<circle cx="22.9" cy="12.9" r="1.6"/>
|
||||
<path d="M24,15.9h-1.8c-0.4,0-0.8,0.2-1,0.4c0.5,0.3,1,0.7,1.4,1.1c0.5,0.5,0.7,1.2,0.7,1.9c0.1,0.4,0.1,0.8,0.1,1.2v0.3
|
||||
c0.8-1,1.5-2.7,1.8-3.6v-0.1C25.2,16.8,24.6,15.9,24,15.9z"/>
|
||||
<circle cx="9.1" cy="13.2" r="1.6"/>
|
||||
<path d="M8.7,21.2c0-0.3,0-0.6,0-0.9v-0.1c0-0.1,0-0.2,0-0.3c0-0.1,0-0.1,0-0.2s0-0.1,0-0.2c0.1-0.9,0.7-2,1.4-2.5
|
||||
c0.2-0.2,0.4-0.3,0.6-0.4c-0.2-0.2-0.6-0.3-0.9-0.3H8c-0.6,0-1.2,0.8-1.1,1.2v0.1C7.2,18.6,7.9,20.2,8.7,21.2z"/>
|
||||
<circle class="st1" cx="16.1" cy="12.3" r="3"/>
|
||||
<path class="st1" d="M18.2,17.6H14c-1.7,0-3.1,1.5-3.1,3.2v1.6c1.4,1.1,3.1,1.7,4.9,1.7c2.1,0,4-0.8,5.4-2.1v-1.2
|
||||
C21.3,19.1,19.9,17.6,18.2,17.6z"/>
|
||||
<circle class="st1" cx="22.9" cy="12.4" r="1.6"/>
|
||||
<path class="st1" d="M24,15.4h-1.8c-0.4,0-0.8,0.2-1,0.4c0.5,0.3,1,0.7,1.4,1.1c0.5,0.5,0.7,1.2,0.7,1.9c0.1,0.4,0.1,0.8,0.1,1.2
|
||||
v0.3c0.8-1,1.5-2.7,1.8-3.6v-0.1C25.2,16.2,24.6,15.4,24,15.4z"/>
|
||||
<circle class="st1" cx="9.1" cy="12.6" r="1.6"/>
|
||||
<path class="st1" d="M8.7,20.7c0-0.3,0-0.6,0-0.9v-0.1c0-0.1,0-0.2,0-0.3c0-0.1,0-0.1,0-0.2s0-0.1,0-0.2c0.1-0.9,0.7-2,1.4-2.5
|
||||
c0.2-0.2,0.4-0.3,0.6-0.4c-0.2-0.2-0.6-0.3-0.9-0.3H8c-0.6,0-1.2,0.8-1.1,1.2v0.1C7.2,18,7.9,19.7,8.7,20.7z"/>
|
||||
</svg>
|
Before Width: | Height: | Size: 3.7 KiB |
24
interface/resources/images/info-icon-2-state.svg
Normal file
|
@ -0,0 +1,24 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 32 64" style="enable-background:new 0 0 32 64;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#168DB7;}
|
||||
.st1{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M15.8,3.4C9.1,3.4,3.7,8.8,3.7,15.5s5.4,12.1,12.1,12.1s12.1-5.4,12.1-12.1S22.5,3.4,15.8,3.4z M15.8,26.6
|
||||
c-6.1,0-11.1-4.9-11.1-11.1S9.7,4.4,15.8,4.4c6.1,0.1,11.1,5,11.1,11.2C26.9,21.6,21.9,26.6,15.8,26.6z"/>
|
||||
<g>
|
||||
<path class="st0" d="M16.3,19c-0.1,0.6,0.1,0.9,0.8,0.9h0.7L17.6,21c-0.6,0.3-1.1,0.3-1.5,0.3c-1,0-1.9-0.5-1.6-2l0.8-5.6
|
||||
c-0.4,0-0.7-0.1-1.1-0.1l0.1-1.3h2.9L16.3,19z M17.7,10c-0.1,0.5-0.5,0.8-1.1,0.8c-0.6,0-1-0.4-1-1c0.1-0.5,0.6-0.9,1.1-0.9
|
||||
C17.4,8.9,17.8,9.4,17.7,10z"/>
|
||||
</g>
|
||||
<path class="st0" d="M15.8,35.4c-6.7,0-12.1,5.4-12.1,12.1s5.4,12.1,12.1,12.1s12.1-5.4,12.1-12.1S22.5,35.4,15.8,35.4z"/>
|
||||
<path class="st0" d="M15.8,58.2c-5.9,0-10.7-4.8-10.7-10.7s4.8-10.7,10.7-10.7c5.9,0.1,10.7,4.9,10.7,10.8
|
||||
C26.5,53.4,21.7,58.2,15.8,58.2z"/>
|
||||
<g>
|
||||
<path class="st1" d="M16.3,51c-0.1,0.6,0.1,0.9,0.8,0.9h0.7L17.6,53c-0.6,0.3-1.1,0.3-1.5,0.3c-1,0-1.9-0.5-1.6-2l0.8-5.6
|
||||
c-0.4,0-0.7-0.1-1.1-0.1l0.1-1.3h2.9L16.3,51z M17.7,42c-0.1,0.5-0.5,0.8-1.1,0.8c-0.6,0-1-0.4-1-1c0.1-0.5,0.6-0.9,1.1-0.9
|
||||
C17.4,40.9,17.8,41.4,17.7,42z"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.5 KiB |
14
interface/resources/images/snap-icon.svg
Normal file
|
@ -0,0 +1,14 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 32 31.7" style="enable-background:new 0 0 32 31.7;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#168DB7;}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M24.4,9.8h-2.1V8.9c0-1.4-0.8-1.7-2.2-1.7h-7c-1.4,0-2.1,0.3-2.1,1.7v0.8H8.8c-1.4,0-2.7,0.6-2.7,2V21
|
||||
c0,1.4,1.1,2.7,2.7,2.7h15.4c1.4,0,2.4-1.5,2.4-2.9v-9C26.9,10.3,25.8,9.8,24.4,9.8z M16.8,21.5c-3.2,0-5.6-2.5-5.6-5.6
|
||||
c0-3.2,2.5-5.6,5.6-5.6c3.2,0,5.6,2.5,5.6,5.6S20.1,21.5,16.8,21.5z"/>
|
||||
<circle class="st0" cx="16.8" cy="15.9" r="3.4"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 780 B |
|
@ -1,33 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 32 64" style="enable-background:new 0 0 32 64;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{opacity:0.64;fill:#58595B;}
|
||||
.st1{fill:#FFFFFF;}
|
||||
.st2{fill:#168DB7;}
|
||||
</style>
|
||||
<circle class="st0" cx="15.8" cy="15.5" r="12.5"/>
|
||||
<path d="M15.8,3.4c-7,0-12.7,5.7-12.7,12.7s5.7,12.7,12.7,12.7c7,0,12.7-5.7,12.7-12.7S22.8,3.4,15.8,3.4z M15.8,27.3
|
||||
c-6.2,0-11.2-5-11.2-11.2C4.6,10,9.7,5,15.8,5C22,5,27,10,27,16.1C27,22.3,22,27.3,15.8,27.3z"/>
|
||||
<path class="st1" d="M15.8,2.8c-7,0-12.7,5.7-12.7,12.7s5.7,12.7,12.7,12.7c7,0,12.7-5.7,12.7-12.7S22.8,2.8,15.8,2.8z M15.8,26.7
|
||||
c-6.2,0-11.2-5-11.2-11.2c0-6.2,5-11.2,11.2-11.2C22,4.4,27,9.4,27,15.6C27,21.7,22,26.7,15.8,26.7z"/>
|
||||
<path d="M21.5,11.6H20V11c0-1-0.6-1.2-1.6-1.2h-5c-1,0-1.5,0.2-1.5,1.2v0.6h-1.5c-1,0-1.9,0.4-1.9,1.4v6.6c0,1,0.8,1.9,1.9,1.9h11
|
||||
c1,0,1.7-1.1,1.7-2.1V13C23.3,12,22.5,11.6,21.5,11.6z M16.1,20.1c-2.3,0-4-1.8-4-4c0-2.3,1.8-4,4-4c2.3,0,4,1.8,4,4
|
||||
S18.4,20.1,16.1,20.1z"/>
|
||||
<circle cx="16.1" cy="16" r="2.4"/>
|
||||
<path class="st1" d="M21.5,11H20v-0.6c0-1-0.6-1.2-1.6-1.2h-5c-1,0-1.5,0.2-1.5,1.2V11h-1.5c-1,0-1.9,0.4-1.9,1.4v6.6
|
||||
c0,1,0.8,1.9,1.9,1.9h11c1,0,1.7-1.1,1.7-2.1v-6.4C23.3,11.5,22.5,11,21.5,11z M16.1,19.5c-2.3,0-4-1.8-4-4c0-2.3,1.8-4,4-4
|
||||
c2.3,0,4,1.8,4,4S18.4,19.5,16.1,19.5z"/>
|
||||
<circle class="st1" cx="16.1" cy="15.5" r="2.4"/>
|
||||
<circle cx="15.8" cy="48.2" r="14.8"/>
|
||||
<circle class="st2" cx="15.8" cy="47.6" r="14.8"/>
|
||||
<path d="M21.5,43.6H20V43c0-1-0.6-1.2-1.6-1.2h-5c-1,0-1.5,0.2-1.5,1.2v0.6h-1.5c-1,0-1.9,0.4-1.9,1.4v6.6c0,1,0.8,1.9,1.9,1.9h11
|
||||
c1,0,1.7-1.1,1.7-2.1V45C23.3,44.1,22.5,43.6,21.5,43.6z M16.1,52.1c-2.3,0-4-1.8-4-4c0-2.3,1.8-4,4-4c2.3,0,4,1.8,4,4
|
||||
C20.1,50.2,18.4,52.1,16.1,52.1z"/>
|
||||
<circle cx="16.1" cy="48" r="2.4"/>
|
||||
<path class="st1" d="M21.5,43.1H20v-0.6c0-1-0.6-1.2-1.6-1.2h-5c-1,0-1.5,0.2-1.5,1.2v0.6h-1.5c-1,0-1.9,0.4-1.9,1.4v6.6
|
||||
c0,1,0.8,1.9,1.9,1.9h11c1,0,1.7-1.1,1.7-2.1v-6.4C23.3,43.5,22.5,43.1,21.5,43.1z M16.1,51.5c-2.3,0-4-1.8-4-4c0-2.3,1.8-4,4-4
|
||||
c2.3,0,4,1.8,4,4C20.1,49.7,18.4,51.5,16.1,51.5z"/>
|
||||
<circle class="st1" cx="16.1" cy="47.5" r="2.4"/>
|
||||
</svg>
|
Before Width: | Height: | Size: 2.3 KiB |
39
interface/resources/images/swipe-chevron.svg
Normal file
|
@ -0,0 +1,39 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 72 152" style="enable-background:new 0 0 72 152;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#1794C3;}
|
||||
.st1{fill:#FFFFFF;}
|
||||
</style>
|
||||
<polygon class="st0" points="39.8,152 2.1,152 2.1,0 39.8,0 71.2,76 "/>
|
||||
<rect x="0.8" class="st1" width="2.3" height="152"/>
|
||||
<g>
|
||||
<path d="M29.9,69.7c0.4,0.2,0.7,0.6,0.7,1.1c0,1,0.1,4.3,0.1,5.3c1.5,0.4,5.3,1.8,6.7,2.1c0.2,0,0.4,0,0.6,0.1
|
||||
c0.3,0.1,0.5,0.4,0.5,0.9c0,0.2,0,0.3,0,0.5c0,2.2,0,1.6-0.1,3.8c-0.1,2.3-1.1,4-3.3,4.9c-1.6,0.7-3,0.8-4.3,0.1
|
||||
c-0.4-0.2-0.7-0.4-1.1-0.8c-2.1-1.8-4.3-3.5-6.5-5.3c-0.5-0.4-0.7-1.1-0.4-1.7c0.3-0.6,0.9-0.9,1.5-0.7c0.1,0,0.2,0.1,0.3,0.1
|
||||
c0.1,0.1,0.3,0.2,0.4,0.3c0.9,0.8,1.5,1.5,2.4,2.3c0,0,0,0,0,0c0,0,0.1,0,0.1,0.1c0-0.2,0-0.3,0-0.5c0.1-3.7,0.2-7.4,0.3-11.2
|
||||
c0-0.5,0.1-1,0.5-1.4c0.4-0.3,0.9-0.4,1.3-0.2C29.9,69.7,29.9,69.7,29.9,69.7 M31,67.6c-0.1,0-0.2-0.1-0.3-0.1
|
||||
c-1.2-0.5-2.6-0.4-3.7,0.4c-1.5,1.1-1.5,2.7-1.5,3.2c-0.1,2.3-0.1,4.6-0.2,6.8c-0.1,0-0.2-0.1-0.3-0.1c-1.7-0.5-3.5,0.3-4.4,2
|
||||
c-0.4,0.8-0.5,1.7-0.3,2.5c0.2,0.8,0.7,1.5,1.3,2.1c0.7,0.6,1.4,1.2,2.1,1.7c1.4,1.1,2.9,2.3,4.3,3.5c0.5,0.4,1,0.8,1.5,1
|
||||
c2.5,1.3,4.8,0.6,6.3-0.1c2.9-1.3,4.5-3.8,4.7-7c0-1.3,0.1-1.7,0.1-2.2c0-0.3,0-0.8,0-1.7c0-0.3,0-0.5,0-0.8
|
||||
c-0.1-1.2-0.8-2.2-1.8-2.7c-0.4-0.2-0.8-0.3-1.2-0.4c-0.8-0.2-2.3-0.7-3.7-1.2c-0.4-0.1-0.8-0.3-1.1-0.4c0-0.5,0-1,0-1.6
|
||||
c0-0.8,0-1.6,0-2C33,69.4,32.3,68.3,31,67.6L31,67.6z"/>
|
||||
</g>
|
||||
<path d="M41,65.6l-4.2-3.9l-0.2,2.4c-2.1-0.9-4.6-1.4-7.3-1.4c-2.8,0-5.4,0.5-7.4,1.5l-0.3-2.6l-4,4.1l4.9,2.9l-0.3-2.4
|
||||
c1.9-1,4.4-1.6,7.2-1.6c2.6,0,5.2,0.5,7.1,1.5l-0.2,2.6L41,65.6z"/>
|
||||
<g>
|
||||
<path class="st1" d="M29.6,68.6c0.4,0.2,0.7,0.6,0.7,1.1c0,1,0.1,4.3,0.1,5.3c1.5,0.4,5.3,1.8,6.7,2.1c0.2,0.1,0.4,0.1,0.6,0.2
|
||||
c0.3,0.1,0.5,0.4,0.5,0.8c0,0.2,0,0.3,0,0.5c0,2.2,0,1.6-0.1,3.8c-0.1,2.3-1.1,4-3.3,4.9c-1.6,0.7-3,0.8-4.3,0.1
|
||||
c-0.4-0.2-0.7-0.4-1.1-0.8c-2.1-1.8-4.3-3.5-6.5-5.3c-0.5-0.4-0.7-1.1-0.4-1.7c0.3-0.6,0.9-0.9,1.5-0.7c0.1,0,0.2,0.1,0.3,0.1
|
||||
c0.1,0.1,0.3,0.2,0.4,0.3c0.9,0.8,1.5,1.5,2.4,2.3c0,0,0,0,0,0c0,0,0.1,0,0.1,0.1c0-0.2,0-0.3,0-0.5c0.1-3.7,0.2-7.4,0.3-11.2
|
||||
c0-0.5,0.1-1,0.5-1.4c0.4-0.3,0.9-0.4,1.3-0.2C29.5,68.6,29.5,68.6,29.6,68.6 M30.7,66.5c-0.1,0-0.2-0.1-0.3-0.1
|
||||
c-1.2-0.5-2.6-0.4-3.7,0.4c-1.5,1.1-1.5,2.7-1.5,3.2c-0.1,2.3-0.1,4.6-0.2,6.8c-0.1,0-0.2-0.1-0.3-0.1c-1.7-0.5-3.5,0.3-4.4,2
|
||||
c-0.4,0.8-0.5,1.7-0.3,2.5c0.2,0.8,0.7,1.5,1.3,2.1c0.7,0.6,1.4,1.2,2.1,1.7c1.4,1.1,2.9,2.3,4.3,3.5c0.5,0.4,1,0.8,1.5,1
|
||||
c2.5,1.3,4.8,0.6,6.3-0.1c2.9-1.3,4.5-3.8,4.7-7c0-1.3,0.1-1.7,0.1-2.2c0-0.3,0-0.8,0-1.7c0-0.3,0-0.5,0-0.8
|
||||
c-0.1-1.2-0.7-2.1-1.7-2.6c-0.4-0.2-0.9-0.4-1.3-0.5c-0.8-0.2-2.3-0.7-3.7-1.2c-0.4-0.1-0.8-0.3-1.1-0.4c0-0.5,0-1,0-1.6
|
||||
c0-0.8,0-1.6-0.1-2C32.6,68.3,31.9,67.2,30.7,66.5L30.7,66.5z"/>
|
||||
</g>
|
||||
<path class="st1" d="M40.6,64.5l-4.2-3.9L36.2,63c-2.1-0.9-4.6-1.4-7.3-1.4c-2.8,0-5.4,0.5-7.4,1.5l-0.3-2.6l-4,4.1l4.9,2.9
|
||||
l-0.3-2.4c1.9-1,4.4-1.6,7.2-1.6c2.6,0,5.2,0.5,7.1,1.5l-0.2,2.6L40.6,64.5z"/>
|
||||
</svg>
|
After Width: | Height: | Size: 3.1 KiB |
|
@ -24,7 +24,7 @@ Window {
|
|||
HifiStyles.HifiConstants { id: hifiStyleConstants }
|
||||
|
||||
objectName: "AddressBarDialog"
|
||||
title: "Go To"
|
||||
title: "Go To:"
|
||||
|
||||
shown: false
|
||||
destroyOnHidden: false
|
||||
|
@ -33,6 +33,7 @@ Window {
|
|||
|
||||
width: addressBarDialog.implicitWidth
|
||||
height: addressBarDialog.implicitHeight
|
||||
property int gap: 14
|
||||
|
||||
onShownChanged: {
|
||||
addressBarDialog.keyboardEnabled = HMD.active;
|
||||
|
@ -65,7 +66,7 @@ Window {
|
|||
clearAddressLineTimer.start();
|
||||
}
|
||||
property var allStories: [];
|
||||
property int cardWidth: 200;
|
||||
property int cardWidth: 212;
|
||||
property int cardHeight: 152;
|
||||
property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/";
|
||||
property bool isCursorVisible: false // Override default cursor visibility.
|
||||
|
@ -78,7 +79,7 @@ Window {
|
|||
property bool punctuationMode: false
|
||||
|
||||
implicitWidth: backgroundImage.width
|
||||
implicitHeight: backgroundImage.height + (keyboardEnabled ? keyboard.height : 0) + cardHeight;
|
||||
implicitHeight: scroll.height + gap + backgroundImage.height + (keyboardEnabled ? keyboard.height : 0);
|
||||
|
||||
// The buttons have their button state changed on hover, so we have to manually fix them up here
|
||||
onBackEnabledChanged: backArrow.buttonState = addressBarDialog.backEnabled ? 1 : 0;
|
||||
|
@ -92,13 +93,14 @@ Window {
|
|||
|
||||
ListView {
|
||||
id: scroll
|
||||
width: backgroundImage.width;
|
||||
height: cardHeight;
|
||||
spacing: hifi.layout.spacing;
|
||||
height: cardHeight + scroll.stackedCardShadowHeight
|
||||
property int stackedCardShadowHeight: 10;
|
||||
spacing: gap;
|
||||
clip: true;
|
||||
anchors {
|
||||
left: backgroundImage.left
|
||||
right: swipe.left
|
||||
bottom: backgroundImage.top
|
||||
horizontalCenter: backgroundImage.horizontalCenter
|
||||
}
|
||||
model: suggestions;
|
||||
orientation: ListView.Horizontal;
|
||||
|
@ -114,29 +116,66 @@ Window {
|
|||
timestamp: model.created_at;
|
||||
onlineUsers: model.online_users;
|
||||
storyId: model.metaverseId;
|
||||
drillDownToPlace: model.drillDownToPlace;
|
||||
shadowHeight: scroll.stackedCardShadowHeight;
|
||||
hoverThunk: function () { ListView.view.currentIndex = index; }
|
||||
unhoverThunk: function () { ListView.view.currentIndex = -1; }
|
||||
}
|
||||
highlightMoveDuration: -1;
|
||||
highlightMoveVelocity: -1;
|
||||
highlight: Rectangle { color: "transparent"; border.width: 4; border.color: "#1DB5ED"; z: 1; }
|
||||
leftMargin: 50; // Start the first item over by about the same amount as the last item peeks through on the other side.
|
||||
rightMargin: 50;
|
||||
highlight: Rectangle { color: "transparent"; border.width: 4; border.color: hifiStyleConstants.colors.blueHighlight; z: 1; }
|
||||
}
|
||||
Image { // Just a visual indicator that the user can swipe the cards over to see more.
|
||||
source: "../images/Swipe-Icon-single.svg"
|
||||
width: 50;
|
||||
id: swipe;
|
||||
source: "../images/swipe-chevron.svg";
|
||||
width: 72;
|
||||
visible: suggestions.count > 3;
|
||||
anchors {
|
||||
right: scroll.right;
|
||||
verticalCenter: scroll.verticalCenter;
|
||||
right: backgroundImage.right;
|
||||
top: scroll.top;
|
||||
}
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
onClicked: scroll.currentIndex = (scroll.currentIndex < 0) ? 3 : (scroll.currentIndex + 3)
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
spacing: 2 * hifi.layout.spacing;
|
||||
anchors {
|
||||
top: parent.top;
|
||||
left: parent.left;
|
||||
leftMargin: 150;
|
||||
topMargin: -30;
|
||||
}
|
||||
property var selected: allTab;
|
||||
TextButton {
|
||||
id: allTab;
|
||||
text: "ALL";
|
||||
property string includeActions: 'snapshot,concurrency';
|
||||
selected: allTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
TextButton {
|
||||
id: placeTab;
|
||||
text: "PLACES";
|
||||
property string includeActions: 'concurrency';
|
||||
selected: placeTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
TextButton {
|
||||
id: snapsTab;
|
||||
text: "SNAPS";
|
||||
property string includeActions: 'snapshot';
|
||||
selected: snapsTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
id: backgroundImage
|
||||
source: "../images/address-bar.svg"
|
||||
width: 720
|
||||
source: "../images/address-bar-856.svg"
|
||||
width: 856
|
||||
height: 100
|
||||
anchors {
|
||||
bottom: parent.keyboardEnabled ? keyboard.top : parent.bottom;
|
||||
|
@ -362,6 +401,7 @@ Window {
|
|||
tags: tags,
|
||||
description: description,
|
||||
online_users: data.details.concurrency || 0,
|
||||
drillDownToPlace: false,
|
||||
|
||||
searchText: [name].concat(tags, description || []).join(' ').toUpperCase()
|
||||
}
|
||||
|
@ -371,8 +411,30 @@ Window {
|
|||
return true;
|
||||
}
|
||||
return (place.place_name !== AddressManager.placename); // Not our entry, but do show other entry points to current domain.
|
||||
// could also require right protocolVersion
|
||||
}
|
||||
property var selectedTab: allTab;
|
||||
function tabSelect(textButton) {
|
||||
selectedTab = textButton;
|
||||
fillDestinations();
|
||||
}
|
||||
property var placeMap: ({});
|
||||
function addToSuggestions(place) {
|
||||
var collapse = allTab.selected && (place.action !== 'concurrency');
|
||||
if (collapse) {
|
||||
var existing = placeMap[place.place_name];
|
||||
if (existing) {
|
||||
existing.drillDownToPlace = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
suggestions.append(place);
|
||||
if (collapse) {
|
||||
placeMap[place.place_name] = suggestions.get(suggestions.count - 1);
|
||||
} else if (place.action === 'concurrency') {
|
||||
suggestions.get(suggestions.count - 1).drillDownToPlace = true; // Don't change raw place object (in allStories).
|
||||
}
|
||||
}
|
||||
property int requestId: 0;
|
||||
function getUserStoryPage(pageNumber, cb) { // cb(error) after all pages of domain data have been added to model
|
||||
var options = [
|
||||
'now=' + new Date().toISOString(),
|
||||
|
@ -383,29 +445,23 @@ Window {
|
|||
'page=' + pageNumber
|
||||
];
|
||||
var url = metaverseBase + 'user_stories?' + options.join('&');
|
||||
var thisRequestId = ++requestId;
|
||||
getRequest(url, function (error, data) {
|
||||
if (handleError(url, error, data, cb)) {
|
||||
if ((thisRequestId !== requestId) || handleError(url, error, data, cb)) {
|
||||
return;
|
||||
}
|
||||
var stories = data.user_stories.map(function (story) { // explicit single-argument function
|
||||
return makeModelData(story, url);
|
||||
});
|
||||
allStories = allStories.concat(stories);
|
||||
if (!addressLine.text) { // Don't add if the user is already filtering
|
||||
stories.forEach(function (story) {
|
||||
if (suggestable(story)) {
|
||||
suggestions.append(story);
|
||||
}
|
||||
});
|
||||
}
|
||||
stories.forEach(makeFilteredPlaceProcessor());
|
||||
if ((data.current_page < data.total_pages) && (data.current_page <= 10)) { // just 10 pages = 100 stories for now
|
||||
return getUserStoryPage(pageNumber + 1, cb);
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
function filterChoicesByText() {
|
||||
suggestions.clear();
|
||||
function makeFilteredPlaceProcessor() { // answer a function(placeData) that adds it to suggestions if it matches
|
||||
var words = addressLine.text.toUpperCase().split(/\s+/).filter(identity),
|
||||
data = allStories;
|
||||
function matches(place) {
|
||||
|
@ -416,16 +472,22 @@ Window {
|
|||
return place.searchText.indexOf(word) >= 0;
|
||||
});
|
||||
}
|
||||
data.forEach(function (place) {
|
||||
return function (place) {
|
||||
if (matches(place)) {
|
||||
suggestions.append(place);
|
||||
addToSuggestions(place);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
function filterChoicesByText() {
|
||||
suggestions.clear();
|
||||
placeMap = {};
|
||||
allStories.forEach(makeFilteredPlaceProcessor());
|
||||
}
|
||||
|
||||
function fillDestinations() {
|
||||
allStories = [];
|
||||
suggestions.clear();
|
||||
placeMap = {};
|
||||
getUserStoryPage(1, function (error) {
|
||||
console.log('user stories query', error || 'ok', allStories.length);
|
||||
});
|
||||
|
|
|
@ -18,6 +18,7 @@ import "toolbars"
|
|||
import "../styles-uit"
|
||||
|
||||
Rectangle {
|
||||
id: root;
|
||||
property string userName: "";
|
||||
property string placeName: "";
|
||||
property string action: "";
|
||||
|
@ -27,13 +28,22 @@ Rectangle {
|
|||
property var goFunction: null;
|
||||
property string storyId: "";
|
||||
|
||||
property bool drillDownToPlace: false;
|
||||
property bool showPlace: isConcurrency;
|
||||
property string messageColor: hifi.colors.blueAccent;
|
||||
property string timePhrase: pastTime(timestamp);
|
||||
property int onlineUsers: 0;
|
||||
property bool isConcurrency: action === 'concurrency';
|
||||
property bool isStacked: !isConcurrency && drillDownToPlace;
|
||||
|
||||
property int textPadding: 10;
|
||||
property int smallMargin: 4;
|
||||
property int messageHeight: 40;
|
||||
property int textSize: 24;
|
||||
property int textSizeSmall: 18;
|
||||
property int stackShadowNarrowing: 5;
|
||||
property string defaultThumbnail: Qt.resolvedUrl("../../images/default-domain.gif");
|
||||
property int shadowHeight: 20;
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
function pastTime(timestamp) { // Answer a descriptive string
|
||||
|
@ -59,13 +69,16 @@ Rectangle {
|
|||
|
||||
Image {
|
||||
id: lobby;
|
||||
width: parent.width;
|
||||
height: parent.height;
|
||||
width: parent.width - (isConcurrency ? 0 : (2 * smallMargin));
|
||||
height: parent.height - messageHeight - (isConcurrency ? 0 : smallMargin);
|
||||
source: thumbnail || defaultThumbnail;
|
||||
fillMode: Image.PreserveAspectCrop;
|
||||
// source gets filled in later
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
anchors.left: parent.left;
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter;
|
||||
top: parent.top;
|
||||
topMargin: isConcurrency ? 0 : smallMargin;
|
||||
}
|
||||
onStatusChanged: {
|
||||
if (status == Image.Error) {
|
||||
console.log("source: " + source + ": failed to load " + hifiUrl);
|
||||
|
@ -73,13 +86,41 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
}
|
||||
Rectangle {
|
||||
id: shadow1;
|
||||
visible: isStacked;
|
||||
width: parent.width - stackShadowNarrowing;
|
||||
height: shadowHeight / 2;
|
||||
anchors {
|
||||
top: parent.bottom;
|
||||
horizontalCenter: parent.horizontalCenter;
|
||||
}
|
||||
gradient: Gradient {
|
||||
GradientStop { position: 0.0; color: "gray" }
|
||||
GradientStop { position: 1.0; color: "white" }
|
||||
}
|
||||
}
|
||||
Rectangle {
|
||||
id: shadow2;
|
||||
visible: isStacked;
|
||||
width: shadow1.width - stackShadowNarrowing;
|
||||
height: shadowHeight / 2;
|
||||
anchors {
|
||||
top: shadow1.bottom;
|
||||
horizontalCenter: parent.horizontalCenter;
|
||||
}
|
||||
gradient: Gradient {
|
||||
GradientStop { position: 0.0; color: "gray" }
|
||||
GradientStop { position: 1.0; color: "white" }
|
||||
}
|
||||
}
|
||||
property int dropHorizontalOffset: 0;
|
||||
property int dropVerticalOffset: 1;
|
||||
property int dropRadius: 2;
|
||||
property int dropSamples: 9;
|
||||
property int dropSpread: 0;
|
||||
DropShadow {
|
||||
visible: desktop.gradientsSupported;
|
||||
visible: showPlace && desktop.gradientsSupported;
|
||||
source: place;
|
||||
anchors.fill: place;
|
||||
horizontalOffset: dropHorizontalOffset;
|
||||
|
@ -89,37 +130,57 @@ Rectangle {
|
|||
color: hifi.colors.black;
|
||||
spread: dropSpread;
|
||||
}
|
||||
DropShadow {
|
||||
visible: users.visible && desktop.gradientsSupported;
|
||||
source: users;
|
||||
anchors.fill: users;
|
||||
horizontalOffset: dropHorizontalOffset;
|
||||
verticalOffset: dropVerticalOffset;
|
||||
radius: dropRadius;
|
||||
samples: dropSamples;
|
||||
color: hifi.colors.black;
|
||||
spread: dropSpread;
|
||||
}
|
||||
RalewaySemiBold {
|
||||
id: place;
|
||||
visible: showPlace;
|
||||
text: placeName;
|
||||
color: hifi.colors.white;
|
||||
size: textSize;
|
||||
elide: Text.ElideRight; // requires constrained width
|
||||
anchors {
|
||||
top: parent.top;
|
||||
left: parent.left;
|
||||
right: parent.right;
|
||||
margins: textPadding;
|
||||
}
|
||||
}
|
||||
FiraSansRegular {
|
||||
id: users;
|
||||
text: (action === 'concurrency') ? onlineUsers : 'snapshot';
|
||||
size: (action === 'concurrency') ? textSize : textSizeSmall;
|
||||
color: hifi.colors.white;
|
||||
Row {
|
||||
FiraSansRegular {
|
||||
id: users;
|
||||
visible: isConcurrency;
|
||||
text: onlineUsers;
|
||||
size: textSize;
|
||||
color: messageColor;
|
||||
anchors.verticalCenter: message.verticalCenter;
|
||||
}
|
||||
Image {
|
||||
id: icon;
|
||||
source: "../../images/snap-icon.svg"
|
||||
width: 40;
|
||||
height: 40;
|
||||
visible: action === 'snapshot';
|
||||
}
|
||||
RalewayRegular {
|
||||
id: message;
|
||||
text: isConcurrency ? ((onlineUsers === 1) ? "person" : "people") : (drillDownToPlace ? "snapshots" : ("by " + userName));
|
||||
size: textSizeSmall;
|
||||
color: messageColor;
|
||||
elide: Text.ElideRight; // requires a width to be specified`
|
||||
width: root.width - textPadding
|
||||
- (users.visible ? users.width + parent.spacing : 0)
|
||||
- (icon.visible ? icon.width + parent.spacing : 0)
|
||||
- (actionIcon.width + (2 * smallMargin));
|
||||
anchors {
|
||||
bottom: parent.bottom;
|
||||
bottomMargin: parent.spacing;
|
||||
}
|
||||
}
|
||||
spacing: textPadding;
|
||||
height: messageHeight;
|
||||
anchors {
|
||||
verticalCenter: usersImage.verticalCenter;
|
||||
right: usersImage.left;
|
||||
margins: textPadding;
|
||||
bottom: parent.bottom;
|
||||
left: parent.left;
|
||||
leftMargin: textPadding;
|
||||
}
|
||||
}
|
||||
// These two can be supplied to provide hover behavior.
|
||||
|
@ -128,7 +189,6 @@ Rectangle {
|
|||
property var hoverThunk: function () { };
|
||||
property var unhoverThunk: function () { };
|
||||
MouseArea {
|
||||
id: zmouseArea;
|
||||
anchors.fill: parent;
|
||||
acceptedButtons: Qt.LeftButton;
|
||||
onClicked: goFunction("hifi://" + hifiUrl);
|
||||
|
@ -136,18 +196,26 @@ Rectangle {
|
|||
onEntered: hoverThunk();
|
||||
onExited: unhoverThunk();
|
||||
}
|
||||
ToolbarButton {
|
||||
id: usersImage;
|
||||
imageURL: "../../images/" + action + ".svg";
|
||||
StateImage {
|
||||
id: actionIcon;
|
||||
imageURL: "../../images/info-icon-2-state.svg";
|
||||
size: 32;
|
||||
onClicked: goFunction("/user_stories/" + storyId);
|
||||
buttonState: 0;
|
||||
defaultState: 0;
|
||||
hoverState: 1;
|
||||
buttonState: messageArea.containsMouse ? 1 : 0;
|
||||
anchors {
|
||||
bottom: parent.bottom;
|
||||
right: parent.right;
|
||||
margins: textPadding;
|
||||
margins: smallMargin;
|
||||
}
|
||||
}
|
||||
MouseArea {
|
||||
id: messageArea;
|
||||
width: parent.width;
|
||||
height: messageHeight;
|
||||
anchors {
|
||||
top: lobby.bottom;
|
||||
}
|
||||
acceptedButtons: Qt.LeftButton;
|
||||
onClicked: goFunction(drillDownToPlace ? ("/places/" + placeName) : ("/user_stories/" + storyId));
|
||||
hoverEnabled: true;
|
||||
}
|
||||
}
|
||||
|
|
56
interface/resources/qml/hifi/TextButton.qml
Normal file
|
@ -0,0 +1,56 @@
|
|||
//
|
||||
// TextButton.qml
|
||||
//
|
||||
// Created by Howard Stearns 11/12/16
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
import Hifi 1.0
|
||||
import QtQuick 2.4
|
||||
import "../styles-uit"
|
||||
|
||||
Rectangle {
|
||||
property alias text: label.text;
|
||||
property alias pixelSize: label.font.pixelSize;
|
||||
property bool selected: false;
|
||||
property int spacing: 2;
|
||||
property var action: function () { };
|
||||
property string highlightColor: hifi.colors.blueHighlight;
|
||||
width: label.width + 64;
|
||||
height: 32;
|
||||
color: "transparent";
|
||||
HifiConstants { id: hifi; }
|
||||
RalewaySemiBold {
|
||||
id: label;
|
||||
color: hifi.colors.white;
|
||||
font.pixelSize: 20;
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter;
|
||||
verticalCenter: parent.verticalCenter;
|
||||
}
|
||||
}
|
||||
Rectangle {
|
||||
// This is crazy. All of this stuff (except the opacity) ought to be in the parent, with the label drawn on top.
|
||||
// But there's a bug in QT such that if you select this TextButton, AND THEN enter the area of
|
||||
// a TextButton created before this one, AND THEN enter a ListView with a highlight, then our label
|
||||
// will draw as though it on the bottom. (If the phase of the moon is right, it will do this for a
|
||||
// about half a second and then render normally. But if you're not lucky it just stays this way.)
|
||||
// So.... here we deliberately put the rectangle on TOP of the text so that you can't tell when the bug
|
||||
// is happening.
|
||||
anchors.fill: parent;
|
||||
radius: height / 2;
|
||||
border.width: 4;
|
||||
border.color: clickArea.containsMouse ? highlightColor : "transparent";
|
||||
color: clickArea.containsPress ? hifi.colors.darkGray : (selected ? hifi.colors.blueAccent : "transparent");
|
||||
opacity: (clickArea.containsMouse && !clickArea.containsPress) ? 0.8 : 0.5;
|
||||
}
|
||||
MouseArea {
|
||||
id: clickArea;
|
||||
anchors.fill: parent;
|
||||
acceptedButtons: Qt.LeftButton;
|
||||
onClicked: action(parent);
|
||||
hoverEnabled: true;
|
||||
}
|
||||
}
|
34
interface/resources/qml/hifi/toolbars/StateImage.qml
Normal file
|
@ -0,0 +1,34 @@
|
|||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
|
||||
Item {
|
||||
property alias imageURL: image.source
|
||||
property alias alpha: image.opacity
|
||||
property var subImage;
|
||||
property int yOffset: 0
|
||||
property int buttonState: 0
|
||||
property real size: 50
|
||||
width: size; height: size
|
||||
property bool pinned: false
|
||||
clip: true
|
||||
|
||||
function updateYOffset() { yOffset = size * buttonState; }
|
||||
onButtonStateChanged: updateYOffset();
|
||||
|
||||
Component.onCompleted: {
|
||||
if (subImage) {
|
||||
if (subImage.y) {
|
||||
yOffset = subImage.y;
|
||||
return;
|
||||
}
|
||||
}
|
||||
updateYOffset();
|
||||
}
|
||||
|
||||
Image {
|
||||
id: image
|
||||
y: -parent.yOffset;
|
||||
width: parent.width
|
||||
}
|
||||
}
|
||||
|
|
@ -1,41 +1,13 @@
|
|||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
|
||||
Item {
|
||||
StateImage {
|
||||
id: button
|
||||
property alias imageURL: image.source
|
||||
property alias alpha: image.opacity
|
||||
property var subImage;
|
||||
property int yOffset: 0
|
||||
property int buttonState: 0
|
||||
property int hoverState: -1
|
||||
property int defaultState: -1
|
||||
property var toolbar;
|
||||
property real size: 50 // toolbar ? toolbar.buttonSize : 50
|
||||
width: size; height: size
|
||||
property bool pinned: false
|
||||
clip: true
|
||||
|
||||
onButtonStateChanged: {
|
||||
yOffset = size * buttonState;
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
if (subImage) {
|
||||
if (subImage.y) {
|
||||
yOffset = subImage.y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
signal clicked()
|
||||
|
||||
Image {
|
||||
id: image
|
||||
y: -button.yOffset;
|
||||
width: parent.width
|
||||
}
|
||||
|
||||
Timer {
|
||||
id: asyncClickSender
|
||||
interval: 10
|
||||
|
|
|
@ -152,6 +152,7 @@
|
|||
#include "ui/LoginDialog.h"
|
||||
#include "ui/overlays/Cube3DOverlay.h"
|
||||
#include "ui/Snapshot.h"
|
||||
#include "ui/SnapshotAnimated.h"
|
||||
#include "ui/StandAloneJSConsole.h"
|
||||
#include "ui/Stats.h"
|
||||
#include "ui/UpdateDialog.h"
|
||||
|
@ -5428,19 +5429,27 @@ void Application::toggleLogDialog() {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::takeSnapshot(bool notify, float aspectRatio) {
|
||||
postLambdaEvent([notify, aspectRatio, this] {
|
||||
|
||||
void Application::takeSnapshot(bool notify, bool includeAnimated, float aspectRatio) {
|
||||
postLambdaEvent([notify, includeAnimated, aspectRatio, this] {
|
||||
QMediaPlayer* player = new QMediaPlayer();
|
||||
QFileInfo inf = QFileInfo(PathUtils::resourcesPath() + "sounds/snap.wav");
|
||||
player->setMedia(QUrl::fromLocalFile(inf.absoluteFilePath()));
|
||||
player->play();
|
||||
|
||||
// Get a screenshot and save it
|
||||
QString path = Snapshot::saveSnapshot(getActiveDisplayPlugin()->getScreenshot(aspectRatio));
|
||||
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotTaken(path, notify);
|
||||
// If we're not doing an animated snapshot as well...
|
||||
if (!includeAnimated || !(SnapshotAnimated::alsoTakeAnimatedSnapshot.get())) {
|
||||
// Tell the dependency manager that the capture of the still snapshot has taken place.
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotTaken(path, "", notify);
|
||||
} else {
|
||||
// Get an animated GIF snapshot and save it
|
||||
SnapshotAnimated::saveSnapshotAnimated(path, aspectRatio, qApp, DependencyManager::get<WindowScriptingInterface>());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void Application::shareSnapshot(const QString& path) {
|
||||
postLambdaEvent([path] {
|
||||
// not much to do here, everything is done in snapshot code...
|
||||
|
|
|
@ -266,7 +266,7 @@ public:
|
|||
float getAvatarSimrate() const { return _avatarSimCounter.rate(); }
|
||||
float getAverageSimsPerSecond() const { return _simCounter.rate(); }
|
||||
|
||||
void takeSnapshot(bool notify, float aspectRatio = 0.0f);
|
||||
void takeSnapshot(bool notify, bool includeAnimated = false, float aspectRatio = 0.0f);
|
||||
void shareSnapshot(const QString& filename);
|
||||
|
||||
model::SkyboxPointer getDefaultSkybox() const { return _defaultSkybox; }
|
||||
|
|
|
@ -17,6 +17,30 @@
|
|||
|
||||
class MenuItemProperties;
|
||||
|
||||
/**jsdoc
|
||||
* The `Menu` provides access to the menu that is shown at the top of the window
|
||||
* shown on a user's desktop and the right click menu that is accessible
|
||||
* in both Desktop and HMD mode.
|
||||
*
|
||||
* <h3>Groupings</h3>
|
||||
* A `grouping` is a way to group a set of menus and/or menu items together
|
||||
* so that they can all be set visible or invisible as a group. There are
|
||||
* 2 available groups: "Advanced" and "Developer"
|
||||
* These groupings can be toggled in the "Settings" menu.
|
||||
*
|
||||
* @namespace Menu
|
||||
*/
|
||||
|
||||
/**
|
||||
* CURRENTLY NOT WORKING:
|
||||
*
|
||||
* <h3>Action groups</h3>
|
||||
* When 1+ menu items are checkable and in the same action group, only 1 can be
|
||||
* selected at any one time. If another item in the action group is selected, the
|
||||
* previous will be deselected. This feature provides the ability to create
|
||||
* "radio-button"-like menus.
|
||||
*/
|
||||
|
||||
class MenuScriptingInterface : public QObject {
|
||||
Q_OBJECT
|
||||
MenuScriptingInterface() { };
|
||||
|
@ -28,33 +52,142 @@ private slots:
|
|||
void menuItemTriggered();
|
||||
|
||||
public slots:
|
||||
/**jsdoc
|
||||
* Add a new top-level menu.
|
||||
* @function Menu.addMenu
|
||||
* @param {string} menuName Name that will be shown in the menu.
|
||||
* @param {string} grouping Name of the grouping to add this menu to.
|
||||
*/
|
||||
void addMenu(const QString& menuName, const QString& grouping = QString());
|
||||
|
||||
/**jsdoc
|
||||
* Remove a top-level menu.
|
||||
* @function Menu.removeMenu
|
||||
* @param {string} menuName Name of the menu to remove.
|
||||
*/
|
||||
void removeMenu(const QString& menuName);
|
||||
|
||||
/**jsdoc
|
||||
* Check whether a top-level menu exists.
|
||||
* @function Menu.menuExists
|
||||
* @param {string} menuName Name of the menu to check for existence.
|
||||
* @return {bool} `true` if the menu exists, otherwise `false`.
|
||||
*/
|
||||
bool menuExists(const QString& menuName);
|
||||
|
||||
/**jsdoc
|
||||
* Add a separator with an unclickable label below it.
|
||||
* The line will be placed at the bottom of the menu.
|
||||
* @function Menu.addSeparator
|
||||
* @param {string} menuName Name of the menu to add a separator to.
|
||||
* @param {string} separatorName Name of the separator that will be shown (but unclickable) below the separator line.
|
||||
*/
|
||||
void addSeparator(const QString& menuName, const QString& separatorName);
|
||||
|
||||
/**jsdoc
|
||||
* Remove a separator and its label from a menu.
|
||||
* @function Menu.removeSeparator
|
||||
* @param {string} menuName Name of the menu to remove a separator from.
|
||||
* @param {string} separatorName Name of the separator to remove.
|
||||
*/
|
||||
void removeSeparator(const QString& menuName, const QString& separatorName);
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* Add a new menu item to a menu.
|
||||
* @function Menu.addMenuItem
|
||||
* @param {Menu.MenuItemProperties} properties
|
||||
*/
|
||||
void addMenuItem(const MenuItemProperties& properties);
|
||||
|
||||
/**jsdoc
|
||||
* Add a new menu item to a menu.
|
||||
* @function Menu.addMenuItem
|
||||
* @param {string} menuName Name of the menu to add a menu item to.
|
||||
* @param {string} menuItem Name of the menu item. This is what will be displayed in the menu.
|
||||
* @param {string} shortcutKey A shortcut key that can be used to trigger the menu item.
|
||||
*/
|
||||
void addMenuItem(const QString& menuName, const QString& menuitem, const QString& shortcutKey);
|
||||
|
||||
/**jsdoc
|
||||
* Add a new menu item to a menu.
|
||||
* @function Menu.addMenuItem
|
||||
* @param {string} menuName Name of the menu to add a menu item to.
|
||||
* @param {string} menuItem Name of the menu item. This is what will be displayed in the menu.
|
||||
*/
|
||||
void addMenuItem(const QString& menuName, const QString& menuitem);
|
||||
|
||||
/**jsdoc
|
||||
* Remove a menu item from a menu.
|
||||
* @function Menu.removeMenuItem
|
||||
* @param {string} menuName Name of the menu to remove a menu item from.
|
||||
* @param {string} menuItem Name of the menu item to remove.
|
||||
*/
|
||||
void removeMenuItem(const QString& menuName, const QString& menuitem);
|
||||
|
||||
/**jsdoc
|
||||
* Check if a menu item exists.
|
||||
* @function Menu.menuItemExists
|
||||
* @param {string} menuName Name of the menu that the menu item is in.
|
||||
* @param {string} menuItem Name of the menu item to check for existence of.
|
||||
* @return {bool} `true` if the menu item exists, otherwise `false`.
|
||||
*/
|
||||
bool menuItemExists(const QString& menuName, const QString& menuitem);
|
||||
|
||||
/**
|
||||
* Not working, will not document until fixed
|
||||
*/
|
||||
void addActionGroup(const QString& groupName, const QStringList& actionList,
|
||||
const QString& selected = QString());
|
||||
void removeActionGroup(const QString& groupName);
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* Check whether a checkable menu item is checked.
|
||||
* @function Menu.isOptionChecked
|
||||
* @param {string} menuOption The name of the menu item.
|
||||
* @return `true` if the option is checked, otherwise false.
|
||||
*/
|
||||
bool isOptionChecked(const QString& menuOption);
|
||||
|
||||
/**jsdoc
|
||||
* Set a checkable menu item as checked or unchecked.
|
||||
* @function Menu.setIsOptionChecked
|
||||
* @param {string} menuOption The name of the menu item to modify.
|
||||
* @param {bool} isChecked If `true`, the menu item will be checked, otherwise it will not be checked.
|
||||
*/
|
||||
void setIsOptionChecked(const QString& menuOption, bool isChecked);
|
||||
|
||||
/**jsdoc
|
||||
* Toggle the status of a checkable menu item. If it is checked, it will be unchecked.
|
||||
* If it is unchecked, it will be checked.
|
||||
* @function Menu.setIsOptionChecked
|
||||
* @param {string} menuOption The name of the menu item to toggle.
|
||||
*/
|
||||
void triggerOption(const QString& menuOption);
|
||||
|
||||
/**jsdoc
|
||||
* Check whether a menu is enabled. If a menu is disabled it will be greyed out
|
||||
* and unselectable.
|
||||
* Menus are enabled by default.
|
||||
* @function Menu.isMenuEnabled
|
||||
* @param {string} menuName The name of the menu to check.
|
||||
* @return {bool} `true` if the menu is enabled, otherwise false.
|
||||
*/
|
||||
bool isMenuEnabled(const QString& menuName);
|
||||
|
||||
/**jsdoc
|
||||
* Set a menu to be enabled or disabled.
|
||||
* @function Menu.setMenuEnabled
|
||||
* @param {string} menuName The name of the menu to modify.
|
||||
* @param {bool} isEnabled Whether the menu will be enabled or not.
|
||||
*/
|
||||
void setMenuEnabled(const QString& menuName, bool isEnabled);
|
||||
|
||||
|
||||
signals:
|
||||
/**jsdoc
|
||||
* This is a signal that is emitted when a menu item is clicked.
|
||||
* @function Menu.menuItemEvent
|
||||
* @param {string} menuItem Name of the menu item that was triggered.
|
||||
*/
|
||||
void menuItemEvent(const QString& menuItem);
|
||||
};
|
||||
|
||||
|
|
|
@ -199,8 +199,8 @@ void WindowScriptingInterface::copyToClipboard(const QString& text) {
|
|||
QApplication::clipboard()->setText(text);
|
||||
}
|
||||
|
||||
void WindowScriptingInterface::takeSnapshot(bool notify, float aspectRatio) {
|
||||
qApp->takeSnapshot(notify, aspectRatio);
|
||||
void WindowScriptingInterface::takeSnapshot(bool notify, bool includeAnimated, float aspectRatio) {
|
||||
qApp->takeSnapshot(notify, includeAnimated, aspectRatio);
|
||||
}
|
||||
|
||||
void WindowScriptingInterface::shareSnapshot(const QString& path) {
|
||||
|
|
|
@ -52,7 +52,7 @@ public slots:
|
|||
QScriptValue save(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
|
||||
void showAssetServer(const QString& upload = "");
|
||||
void copyToClipboard(const QString& text);
|
||||
void takeSnapshot(bool notify = true, float aspectRatio = 0.0f);
|
||||
void takeSnapshot(bool notify = true, bool includeAnimated = false, float aspectRatio = 0.0f);
|
||||
void shareSnapshot(const QString& path);
|
||||
bool isPhysicsEnabled();
|
||||
|
||||
|
@ -60,7 +60,7 @@ signals:
|
|||
void domainChanged(const QString& domainHostname);
|
||||
void svoImportRequested(const QString& url);
|
||||
void domainConnectionRefused(const QString& reasonMessage, int reasonCode, const QString& extraInfo);
|
||||
void snapshotTaken(const QString& path, bool notify);
|
||||
void snapshotTaken(const QString& pathStillSnapshot, const QString& pathAnimatedSnapshot, bool notify);
|
||||
void snapshotShared(const QString& error);
|
||||
|
||||
private:
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
#include "LODManager.h"
|
||||
#include "Menu.h"
|
||||
#include "Snapshot.h"
|
||||
#include "SnapshotAnimated.h"
|
||||
#include "UserActivityLogger.h"
|
||||
|
||||
#include "AmbientOcclusionEffect.h"
|
||||
|
@ -83,6 +84,20 @@ void setupPreferences() {
|
|||
auto preference = new BrowsePreference(SNAPSHOTS, "Put my snapshots here", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool { return SnapshotAnimated::alsoTakeAnimatedSnapshot.get(); };
|
||||
auto setter = [](bool value) { SnapshotAnimated::alsoTakeAnimatedSnapshot.set(value); };
|
||||
preferences->addPreference(new CheckPreference(SNAPSHOTS, "Take Animated GIF Snapshot with HUD Button", getter, setter));
|
||||
}
|
||||
{
|
||||
auto getter = []()->float { return SnapshotAnimated::snapshotAnimatedDuration.get(); };
|
||||
auto setter = [](float value) { SnapshotAnimated::snapshotAnimatedDuration.set(value); };
|
||||
auto preference = new SpinnerPreference(SNAPSHOTS, "Animated Snapshot Duration", getter, setter);
|
||||
preference->setMin(3);
|
||||
preference->setMax(10);
|
||||
preference->setStep(1);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
// Scripts
|
||||
{
|
||||
|
|
|
@ -51,16 +51,24 @@ SnapshotMetaData* Snapshot::parseSnapshotData(QString snapshotPath) {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
QImage shot(snapshotPath);
|
||||
QUrl url;
|
||||
|
||||
// no location data stored
|
||||
if (shot.text(URL).isEmpty()) {
|
||||
if (snapshotPath.right(3) == "jpg") {
|
||||
QImage shot(snapshotPath);
|
||||
|
||||
// no location data stored
|
||||
if (shot.text(URL).isEmpty()) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// parsing URL
|
||||
url = QUrl(shot.text(URL), QUrl::ParsingMode::StrictMode);
|
||||
} else if (snapshotPath.right(3) == "gif") {
|
||||
url = QUrl(DependencyManager::get<AddressManager>()->currentShareableAddress());
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// parsing URL
|
||||
QUrl url = QUrl(shot.text(URL), QUrl::ParsingMode::StrictMode);
|
||||
|
||||
SnapshotMetaData* data = new SnapshotMetaData();
|
||||
data->setURL(url);
|
||||
|
||||
|
@ -156,7 +164,11 @@ void Snapshot::uploadSnapshot(const QString& filename) {
|
|||
file->open(QIODevice::ReadOnly);
|
||||
|
||||
QHttpPart imagePart;
|
||||
imagePart.setHeader(QNetworkRequest::ContentTypeHeader, QVariant("image/jpeg"));
|
||||
if (filename.right(3) == "gif") {
|
||||
imagePart.setHeader(QNetworkRequest::ContentTypeHeader, QVariant("image/gif"));
|
||||
} else {
|
||||
imagePart.setHeader(QNetworkRequest::ContentTypeHeader, QVariant("image/jpeg"));
|
||||
}
|
||||
imagePart.setHeader(QNetworkRequest::ContentDispositionHeader,
|
||||
QVariant("form-data; name=\"image\"; filename=\"" + file->fileName() + "\""));
|
||||
imagePart.setBodyDevice(file);
|
||||
|
|
139
interface/src/ui/SnapshotAnimated.cpp
Normal file
|
@ -0,0 +1,139 @@
|
|||
//
|
||||
// SnapshotAnimated.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Zach Fox on 11/14/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QtCore/QDateTime>
|
||||
#include <QtCore/QObject>
|
||||
#include <QtCore/QString>
|
||||
#include <QtGui/QImage>
|
||||
|
||||
#include <QtConcurrent/qtconcurrentrun.h>
|
||||
#include "SnapshotAnimated.h"
|
||||
|
||||
QTimer* SnapshotAnimated::snapshotAnimatedTimer = NULL;
|
||||
qint64 SnapshotAnimated::snapshotAnimatedTimestamp = 0;
|
||||
qint64 SnapshotAnimated::snapshotAnimatedFirstFrameTimestamp = 0;
|
||||
bool SnapshotAnimated::snapshotAnimatedTimerRunning = false;
|
||||
QString SnapshotAnimated::snapshotAnimatedPath;
|
||||
QString SnapshotAnimated::snapshotStillPath;
|
||||
QVector<QImage> SnapshotAnimated::snapshotAnimatedFrameVector;
|
||||
QVector<qint64> SnapshotAnimated::snapshotAnimatedFrameDelayVector;
|
||||
Application* SnapshotAnimated::app;
|
||||
float SnapshotAnimated::aspectRatio;
|
||||
QSharedPointer<WindowScriptingInterface> SnapshotAnimated::snapshotAnimatedDM;
|
||||
GifWriter SnapshotAnimated::snapshotAnimatedGifWriter;
|
||||
|
||||
|
||||
Setting::Handle<bool> SnapshotAnimated::alsoTakeAnimatedSnapshot("alsoTakeAnimatedSnapshot", true);
|
||||
Setting::Handle<float> SnapshotAnimated::snapshotAnimatedDuration("snapshotAnimatedDuration", SNAPSNOT_ANIMATED_DURATION_SECS);
|
||||
|
||||
void SnapshotAnimated::saveSnapshotAnimated(QString pathStill, float aspectRatio, Application* app, QSharedPointer<WindowScriptingInterface> dm) {
|
||||
// If we're not in the middle of capturing an animated snapshot...
|
||||
if (SnapshotAnimated::snapshotAnimatedFirstFrameTimestamp == 0) {
|
||||
SnapshotAnimated::snapshotAnimatedTimer = new QTimer();
|
||||
SnapshotAnimated::aspectRatio = aspectRatio;
|
||||
SnapshotAnimated::app = app;
|
||||
SnapshotAnimated::snapshotAnimatedDM = dm;
|
||||
// Define the output location of the still and animated snapshots.
|
||||
SnapshotAnimated::snapshotStillPath = pathStill;
|
||||
SnapshotAnimated::snapshotAnimatedPath = pathStill;
|
||||
SnapshotAnimated::snapshotAnimatedPath.replace("jpg", "gif");
|
||||
|
||||
// Ensure the snapshot timer is Precise (attempted millisecond precision)
|
||||
SnapshotAnimated::snapshotAnimatedTimer->setTimerType(Qt::PreciseTimer);
|
||||
|
||||
// Connect the snapshotAnimatedTimer QTimer to the lambda slot function
|
||||
QObject::connect((SnapshotAnimated::snapshotAnimatedTimer), &QTimer::timeout, captureFrames);
|
||||
|
||||
// Start the snapshotAnimatedTimer QTimer - argument for this is in milliseconds
|
||||
SnapshotAnimated::snapshotAnimatedTimerRunning = true;
|
||||
SnapshotAnimated::snapshotAnimatedTimer->start(SNAPSNOT_ANIMATED_FRAME_DELAY_MSEC);
|
||||
// If we're already in the middle of capturing an animated snapshot...
|
||||
} else {
|
||||
// Just tell the dependency manager that the capture of the still snapshot has taken place.
|
||||
emit dm->snapshotTaken(pathStill, "", false);
|
||||
}
|
||||
}
|
||||
|
||||
void SnapshotAnimated::captureFrames() {
|
||||
if (SnapshotAnimated::snapshotAnimatedTimerRunning) {
|
||||
// Get a screenshot from the display, then scale the screenshot down,
|
||||
// then convert it to the image format the GIF library needs,
|
||||
// then save all that to the QImage named "frame"
|
||||
QImage frame(SnapshotAnimated::app->getActiveDisplayPlugin()->getScreenshot(SnapshotAnimated::aspectRatio));
|
||||
frame = frame.scaledToWidth(SNAPSNOT_ANIMATED_WIDTH);
|
||||
SnapshotAnimated::snapshotAnimatedFrameVector.append(frame);
|
||||
|
||||
// If that was the first frame...
|
||||
if (SnapshotAnimated::snapshotAnimatedFirstFrameTimestamp == 0) {
|
||||
// Record the current frame timestamp
|
||||
SnapshotAnimated::snapshotAnimatedTimestamp = QDateTime::currentMSecsSinceEpoch();
|
||||
// Record the first frame timestamp
|
||||
SnapshotAnimated::snapshotAnimatedFirstFrameTimestamp = SnapshotAnimated::snapshotAnimatedTimestamp;
|
||||
SnapshotAnimated::snapshotAnimatedFrameDelayVector.append(SNAPSNOT_ANIMATED_FRAME_DELAY_MSEC / 10);
|
||||
// If this is an intermediate or the final frame...
|
||||
} else {
|
||||
// Push the current frame delay onto the vector
|
||||
SnapshotAnimated::snapshotAnimatedFrameDelayVector.append(round(((float)(QDateTime::currentMSecsSinceEpoch() - SnapshotAnimated::snapshotAnimatedTimestamp)) / 10));
|
||||
// Record the current frame timestamp
|
||||
SnapshotAnimated::snapshotAnimatedTimestamp = QDateTime::currentMSecsSinceEpoch();
|
||||
|
||||
// If that was the last frame...
|
||||
if ((SnapshotAnimated::snapshotAnimatedTimestamp - SnapshotAnimated::snapshotAnimatedFirstFrameTimestamp) >= (SnapshotAnimated::snapshotAnimatedDuration.get() * MSECS_PER_SECOND)) {
|
||||
SnapshotAnimated::snapshotAnimatedTimerRunning = false;
|
||||
// Reset the current frame timestamp
|
||||
SnapshotAnimated::snapshotAnimatedTimestamp = 0;
|
||||
SnapshotAnimated::snapshotAnimatedFirstFrameTimestamp = 0;
|
||||
|
||||
// Kick off the thread that'll pack the frames into the GIF
|
||||
QtConcurrent::run(processFrames);
|
||||
// Stop the snapshot QTimer. This action by itself DOES NOT GUARANTEE
|
||||
// that the slot will not be called again in the future.
|
||||
// See: http://lists.qt-project.org/pipermail/qt-interest-old/2009-October/013926.html
|
||||
SnapshotAnimated::snapshotAnimatedTimer->stop();
|
||||
delete SnapshotAnimated::snapshotAnimatedTimer;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void SnapshotAnimated::processFrames() {
|
||||
uint32_t width = SnapshotAnimated::snapshotAnimatedFrameVector[0].width();
|
||||
uint32_t height = SnapshotAnimated::snapshotAnimatedFrameVector[0].height();
|
||||
|
||||
// Create the GIF from the temporary files
|
||||
// Write out the header and beginning of the GIF file
|
||||
GifBegin(
|
||||
&(SnapshotAnimated::snapshotAnimatedGifWriter),
|
||||
qPrintable(SnapshotAnimated::snapshotAnimatedPath),
|
||||
width,
|
||||
height,
|
||||
1); // "1" means "yes there is a delay" with this GifCreator library.
|
||||
for (int itr = 0; itr < SnapshotAnimated::snapshotAnimatedFrameVector.size(); itr++) {
|
||||
// Write each frame to the GIF
|
||||
GifWriteFrame(&(SnapshotAnimated::snapshotAnimatedGifWriter),
|
||||
(uint8_t*)SnapshotAnimated::snapshotAnimatedFrameVector[itr].convertToFormat(QImage::Format_RGBA8888).bits(),
|
||||
width,
|
||||
height,
|
||||
SnapshotAnimated::snapshotAnimatedFrameDelayVector[itr]);
|
||||
}
|
||||
// Write out the end of the GIF
|
||||
GifEnd(&(SnapshotAnimated::snapshotAnimatedGifWriter));
|
||||
|
||||
// Clear out the frame and frame delay vectors.
|
||||
// Also release the memory not required to store the items.
|
||||
SnapshotAnimated::snapshotAnimatedFrameVector.clear();
|
||||
SnapshotAnimated::snapshotAnimatedFrameVector.squeeze();
|
||||
SnapshotAnimated::snapshotAnimatedFrameDelayVector.clear();
|
||||
SnapshotAnimated::snapshotAnimatedFrameDelayVector.squeeze();
|
||||
|
||||
// Let the dependency manager know that the snapshots have been taken.
|
||||
emit SnapshotAnimated::snapshotAnimatedDM->snapshotTaken(SnapshotAnimated::snapshotStillPath, SnapshotAnimated::snapshotAnimatedPath, false);
|
||||
}
|
58
interface/src/ui/SnapshotAnimated.h
Normal file
|
@ -0,0 +1,58 @@
|
|||
//
|
||||
// SnapshotAnimated.h
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Zach Fox on 11/14/16.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_SnapshotAnimated_h
|
||||
#define hifi_SnapshotAnimated_h
|
||||
|
||||
#include <QtCore/QVector>
|
||||
#include <Application.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GifCreator.h>
|
||||
#include <qtimer.h>
|
||||
#include <SettingHandle.h>
|
||||
#include "scripting/WindowScriptingInterface.h"
|
||||
|
||||
// If the snapshot width or the framerate are too high for the
|
||||
// application to handle, the framerate of the output GIF will drop.
|
||||
#define SNAPSNOT_ANIMATED_WIDTH (480)
|
||||
// This value should divide evenly into 100. Snapshot framerate is NOT guaranteed.
|
||||
#define SNAPSNOT_ANIMATED_TARGET_FRAMERATE (25)
|
||||
#define SNAPSNOT_ANIMATED_DURATION_SECS (3)
|
||||
#define SNAPSNOT_ANIMATED_DURATION_MSEC (SNAPSNOT_ANIMATED_DURATION_SECS*1000)
|
||||
|
||||
#define SNAPSNOT_ANIMATED_FRAME_DELAY_MSEC (1000/SNAPSNOT_ANIMATED_TARGET_FRAMERATE)
|
||||
|
||||
class SnapshotAnimated {
|
||||
private:
|
||||
static QTimer* snapshotAnimatedTimer;
|
||||
static qint64 snapshotAnimatedTimestamp;
|
||||
static qint64 snapshotAnimatedFirstFrameTimestamp;
|
||||
static bool snapshotAnimatedTimerRunning;
|
||||
static QString snapshotStillPath;
|
||||
|
||||
static QString snapshotAnimatedPath;
|
||||
static QVector<QImage> snapshotAnimatedFrameVector;
|
||||
static QVector<qint64> snapshotAnimatedFrameDelayVector;
|
||||
static QSharedPointer<WindowScriptingInterface> snapshotAnimatedDM;
|
||||
static Application* app;
|
||||
static float aspectRatio;
|
||||
|
||||
static GifWriter snapshotAnimatedGifWriter;
|
||||
|
||||
static void captureFrames();
|
||||
static void processFrames();
|
||||
public:
|
||||
static void saveSnapshotAnimated(QString pathStill, float aspectRatio, Application* app, QSharedPointer<WindowScriptingInterface> dm);
|
||||
static Setting::Handle<bool> alsoTakeAnimatedSnapshot;
|
||||
static Setting::Handle<float> snapshotAnimatedDuration;
|
||||
};
|
||||
|
||||
#endif // hifi_SnapshotAnimated_h
|
|
@ -39,6 +39,14 @@ Q_DECLARE_METATYPE(OverlayPropertyResult);
|
|||
QScriptValue OverlayPropertyResultToScriptValue(QScriptEngine* engine, const OverlayPropertyResult& value);
|
||||
void OverlayPropertyResultFromScriptValue(const QScriptValue& object, OverlayPropertyResult& value);
|
||||
|
||||
/**jsdoc
|
||||
* @typedef Overlays.RayToOverlayIntersectionResult
|
||||
* @property {bool} intersects True if the PickRay intersected with a 3D overlay.
|
||||
* @property {Overlays.OverlayID} overlayID The ID of the overlay that was intersected with.
|
||||
* @property {float} distance The distance from the PickRay origin to the intersection point.
|
||||
* @property {Vec3} surfaceNormal The normal of the surface that was intersected with.
|
||||
* @property {Vec3} intersection The point at which the PickRay intersected with the overlay.
|
||||
*/
|
||||
class RayToOverlayIntersectionResult {
|
||||
public:
|
||||
RayToOverlayIntersectionResult();
|
||||
|
@ -57,6 +65,16 @@ Q_DECLARE_METATYPE(RayToOverlayIntersectionResult);
|
|||
QScriptValue RayToOverlayIntersectionResultToScriptValue(QScriptEngine* engine, const RayToOverlayIntersectionResult& value);
|
||||
void RayToOverlayIntersectionResultFromScriptValue(const QScriptValue& object, RayToOverlayIntersectionResult& value);
|
||||
|
||||
/**jsdoc
|
||||
* @typedef {int} Overlays.OverlayID
|
||||
*/
|
||||
|
||||
/**jsdoc
|
||||
*
|
||||
* Overlays namespace...
|
||||
* @namespace Overlays
|
||||
*/
|
||||
|
||||
class Overlays : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
|
@ -72,57 +90,137 @@ public:
|
|||
Overlay::Pointer getOverlay(unsigned int id) const;
|
||||
OverlayPanel::Pointer getPanel(unsigned int id) const { return _panels[id]; }
|
||||
|
||||
void cleanupAllOverlays();
|
||||
|
||||
public slots:
|
||||
/// adds an overlay with the specific properties
|
||||
unsigned int addOverlay(const QString& type, const QVariant& properties);
|
||||
|
||||
/// adds an overlay that's already been created
|
||||
unsigned int addOverlay(Overlay* overlay) { return addOverlay(Overlay::Pointer(overlay)); }
|
||||
unsigned int addOverlay(Overlay::Pointer overlay);
|
||||
|
||||
/// clones an existing overlay
|
||||
void cleanupAllOverlays();
|
||||
|
||||
public slots:
|
||||
/**jsdoc
|
||||
* Add an overlays to the scene. The properties specified will depend
|
||||
* on the type of overlay that is being created.
|
||||
*
|
||||
* @function Overlays.addOverlay
|
||||
* @param {string} type The type of the overlay to add.
|
||||
* @param {Overlays.OverlayProperties} The properties of the overlay that you want to add.
|
||||
* @return {Overlays.OverlayID} The ID of the newly created overlay.
|
||||
*/
|
||||
unsigned int addOverlay(const QString& type, const QVariant& properties);
|
||||
|
||||
/**jsdoc
|
||||
* Create a clone of an existing overlay.
|
||||
*
|
||||
* @function Overlays.cloneOverlay
|
||||
* @param {Overlays.OverlayID} overlayID The ID of the overlay to clone.
|
||||
* @return {Overlays.OverlayID} The ID of the new overlay.
|
||||
*/
|
||||
unsigned int cloneOverlay(unsigned int id);
|
||||
|
||||
/// edits an overlay updating only the included properties, will return the identified OverlayID in case of
|
||||
/// successful edit, if the input id is for an unknown overlay this function will have no effect
|
||||
/**jsdoc
|
||||
* Edit an overlay's properties.
|
||||
*
|
||||
* @function Overlays.editOverlay
|
||||
* @param {Overlays.OverlayID} overlayID The ID of the overlay to edit.
|
||||
* @return {bool} `true` if the overlay was found and edited, otherwise false.
|
||||
*/
|
||||
bool editOverlay(unsigned int id, const QVariant& properties);
|
||||
|
||||
/// edits an overlay updating only the included properties, will return the identified OverlayID in case of
|
||||
/// successful edit, if the input id is for an unknown overlay this function will have no effect
|
||||
bool editOverlays(const QVariant& propertiesById);
|
||||
|
||||
/// deletes an overlay
|
||||
/**jsdoc
|
||||
* Delete an overlay.
|
||||
*
|
||||
* @function Overlays.deleteOverlay
|
||||
* @param {Overlays.OverlayID} overlayID The ID of the overlay to delete.
|
||||
*/
|
||||
void deleteOverlay(unsigned int id);
|
||||
|
||||
/// get the string type of the overlay used in addOverlay
|
||||
/**jsdoc
|
||||
* Get the type of an overlay.
|
||||
*
|
||||
* @function Overlays.getOverlayType
|
||||
* @param {Overlays.OverlayID} overlayID The ID of the overlay to get the type of.
|
||||
* @return {string} The type of the overlay if found, otherwise the empty string.
|
||||
*/
|
||||
QString getOverlayType(unsigned int overlayId) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the ID of the overlay at a particular point on the HUD/screen.
|
||||
*
|
||||
* @function Overlays.getOverlayAtPoint
|
||||
* @param {Vec2} point The point to check for an overlay.
|
||||
* @return {Overlays.OverlayID} The ID of the overlay at the point specified.
|
||||
* If no overlay is found, `0` will be returned.
|
||||
*/
|
||||
unsigned int getOverlayAtPoint(const glm::vec2& point);
|
||||
|
||||
/**jsdoc
|
||||
* Get the value of a an overlay's property.
|
||||
*
|
||||
* @function Overlays.getProperty
|
||||
* @param {Overlays.OverlayID} The ID of the overlay to get the property of.
|
||||
* @param {string} The name of the property to get the value of.
|
||||
* @return {Object} The value of the property. If the overlay or the property could
|
||||
* not be found, `undefined` will be returned.
|
||||
*/
|
||||
OverlayPropertyResult getProperty(unsigned int id, const QString& property);
|
||||
|
||||
/*jsdoc
|
||||
* Find the closest 3D overlay hit by a pick ray.
|
||||
*
|
||||
* @function Overlays.findRayIntersection
|
||||
* @param {PickRay} The PickRay to use for finding overlays.
|
||||
* @return {Overlays.RayToOverlayIntersectionResult} The result of the ray cast.
|
||||
*/
|
||||
RayToOverlayIntersectionResult findRayIntersection(const PickRay& ray);
|
||||
|
||||
/**jsdoc
|
||||
* Check whether an overlay's assets have been loaded. For example, if the
|
||||
* overlay is an "image" overlay, this will indicate whether the its image
|
||||
* has loaded.
|
||||
* @function Overlays.isLoaded
|
||||
* @param {Overlays.OverlayID} The ID of the overlay to check.
|
||||
* @return {bool} `true` if the overlay's assets have been loaded, otherwise `false`.
|
||||
*/
|
||||
bool isLoaded(unsigned int id);
|
||||
|
||||
/**jsdoc
|
||||
* Calculates the size of the given text in the specified overlay if it is a text overlay.
|
||||
* If it is a 2D text overlay, the size will be in pixels.
|
||||
* If it is a 3D text overlay, the size will be in meters.
|
||||
*
|
||||
* @function Overlays.textSize
|
||||
* @param {Overlays.OverlayID} The ID of the overlay to measure.
|
||||
* @param {string} The string to measure.
|
||||
* @return {Vec2} The size of the text.
|
||||
*/
|
||||
QSizeF textSize(unsigned int id, const QString& text) const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the width of the virtual 2D HUD.
|
||||
*
|
||||
* @function Overlays.width
|
||||
* @return {float} The width of the 2D HUD.
|
||||
*/
|
||||
float width() const;
|
||||
|
||||
/**jsdoc
|
||||
* Get the height of the virtual 2D HUD.
|
||||
*
|
||||
* @function Overlays.height
|
||||
* @return {float} The height of the 2D HUD.
|
||||
*/
|
||||
float height() const;
|
||||
|
||||
/// return true if there is an overlay with that id else false
|
||||
bool isAddedOverlay(unsigned int id);
|
||||
|
||||
unsigned int getParentPanel(unsigned int childId) const;
|
||||
void setParentPanel(unsigned int childId, unsigned int panelId);
|
||||
|
||||
/// returns the top most 2D overlay at the screen point, or 0 if not overlay at that point
|
||||
unsigned int getOverlayAtPoint(const glm::vec2& point);
|
||||
|
||||
/// returns the value of specified property, or null if there is no such property
|
||||
OverlayPropertyResult getProperty(unsigned int id, const QString& property);
|
||||
|
||||
/// returns details about the closest 3D Overlay hit by the pick ray
|
||||
RayToOverlayIntersectionResult findRayIntersection(const PickRay& ray);
|
||||
|
||||
/// returns whether the overlay's assets are loaded or not
|
||||
bool isLoaded(unsigned int id);
|
||||
|
||||
/// returns the size of the given text in the specified overlay if it is a text overlay: in pixels if it is a 2D text
|
||||
/// overlay; in meters if it is a 3D text overlay
|
||||
QSizeF textSize(unsigned int id, const QString& text) const;
|
||||
|
||||
// Return the size of the virtual screen
|
||||
float width() const;
|
||||
float height() const;
|
||||
|
||||
|
||||
/// adds a panel that has already been created
|
||||
unsigned int addPanel(OverlayPanel::Pointer panel);
|
||||
|
||||
|
@ -138,13 +236,16 @@ public slots:
|
|||
/// deletes a panel and all child overlays
|
||||
void deletePanel(unsigned int panelId);
|
||||
|
||||
/// return true if there is an overlay with that id else false
|
||||
bool isAddedOverlay(unsigned int id);
|
||||
|
||||
/// return true if there is a panel with that id else false
|
||||
bool isAddedPanel(unsigned int id) { return _panels.contains(id); }
|
||||
|
||||
signals:
|
||||
/**jsdoc
|
||||
* Emitted when an overlay is deleted
|
||||
*
|
||||
* @function Overlays.overlayDeleted
|
||||
* @param {OverlayID} The ID of the overlay that was deleted.
|
||||
*/
|
||||
void overlayDeleted(unsigned int id);
|
||||
void panelDeleted(unsigned int id);
|
||||
|
||||
|
|
|
@ -61,6 +61,10 @@ static const auto DEFAULT_ORIENTATION_GETTER = [] { return Quaternions::IDENTITY
|
|||
|
||||
static const int DEFAULT_BUFFER_FRAMES = 1;
|
||||
|
||||
// OUTPUT_CHANNEL_COUNT is audio pipeline output format, which is always 2 channel.
|
||||
// _outputFormat.channelCount() is device output format, which may be 1 or multichannel.
|
||||
static const int OUTPUT_CHANNEL_COUNT = 2;
|
||||
|
||||
static const bool DEFAULT_STARVE_DETECTION_ENABLED = true;
|
||||
static const int STARVE_DETECTION_THRESHOLD = 3;
|
||||
static const int STARVE_DETECTION_PERIOD = 10 * 1000; // 10 Seconds
|
||||
|
@ -140,7 +144,7 @@ AudioClient::AudioClient() :
|
|||
_reverbOptions(&_scriptReverbOptions),
|
||||
_inputToNetworkResampler(NULL),
|
||||
_networkToOutputResampler(NULL),
|
||||
_audioLimiter(AudioConstants::SAMPLE_RATE, AudioConstants::STEREO),
|
||||
_audioLimiter(AudioConstants::SAMPLE_RATE, OUTPUT_CHANNEL_COUNT),
|
||||
_outgoingAvatarAudioSequenceNumber(0),
|
||||
_audioOutputIODevice(_receivedAudioStream, this),
|
||||
_stats(&_receivedAudioStream),
|
||||
|
@ -237,14 +241,6 @@ QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& de
|
|||
return result;
|
||||
}
|
||||
|
||||
int numDestinationSamplesRequired(const QAudioFormat& sourceFormat, const QAudioFormat& destinationFormat,
|
||||
int numSourceSamples) {
|
||||
float ratio = (float) destinationFormat.channelCount() / sourceFormat.channelCount();
|
||||
ratio *= (float) destinationFormat.sampleRate() / sourceFormat.sampleRate();
|
||||
|
||||
return (numSourceSamples * ratio) + 0.5f;
|
||||
}
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
QString friendlyNameForAudioDevice(IMMDevice* pEndpoint) {
|
||||
QString deviceName;
|
||||
|
@ -387,14 +383,36 @@ bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
|||
|
||||
adjustedAudioFormat = desiredAudioFormat;
|
||||
|
||||
#ifdef Q_OS_ANDROID
|
||||
#if defined(Q_OS_WIN)
|
||||
|
||||
// On Windows, using WASAPI shared mode, the sample rate and channel count must
|
||||
// exactly match the internal mix format. Any other format will fail to open.
|
||||
|
||||
adjustedAudioFormat = audioDevice.preferredFormat(); // returns mixFormat
|
||||
|
||||
adjustedAudioFormat.setCodec("audio/pcm");
|
||||
adjustedAudioFormat.setSampleSize(16);
|
||||
adjustedAudioFormat.setSampleType(QAudioFormat::SignedInt);
|
||||
adjustedAudioFormat.setByteOrder(QAudioFormat::LittleEndian);
|
||||
|
||||
if (!audioDevice.isFormatSupported(adjustedAudioFormat)) {
|
||||
qCDebug(audioclient) << "WARNING: The mix format is" << adjustedAudioFormat << "but isFormatSupported() failed.";
|
||||
return false;
|
||||
}
|
||||
// converting to/from this rate must produce an integral number of samples
|
||||
if (adjustedAudioFormat.sampleRate() * AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL % AudioConstants::SAMPLE_RATE != 0) {
|
||||
qCDebug(audioclient) << "WARNING: The current sample rate [" << adjustedAudioFormat.sampleRate() << "] is not supported.";
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
||||
#elif defined(Q_OS_ANDROID)
|
||||
// FIXME: query the native sample rate of the device?
|
||||
adjustedAudioFormat.setSampleRate(48000);
|
||||
#else
|
||||
|
||||
//
|
||||
// Attempt the device sample rate in decreasing order of preference.
|
||||
// On Windows, using WASAPI shared mode, only a match with the hardware sample rate will succeed.
|
||||
//
|
||||
if (audioDevice.supportedSampleRates().contains(48000)) {
|
||||
adjustedAudioFormat.setSampleRate(48000);
|
||||
|
@ -427,15 +445,15 @@ bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
|||
}
|
||||
|
||||
bool sampleChannelConversion(const int16_t* sourceSamples, int16_t* destinationSamples, unsigned int numSourceSamples,
|
||||
const QAudioFormat& sourceAudioFormat, const QAudioFormat& destinationAudioFormat) {
|
||||
if (sourceAudioFormat.channelCount() == 2 && destinationAudioFormat.channelCount() == 1) {
|
||||
const int sourceChannelCount, const int destinationChannelCount) {
|
||||
if (sourceChannelCount == 2 && destinationChannelCount == 1) {
|
||||
// loop through the stereo input audio samples and average every two samples
|
||||
for (uint i = 0; i < numSourceSamples; i += 2) {
|
||||
destinationSamples[i / 2] = (sourceSamples[i] / 2) + (sourceSamples[i + 1] / 2);
|
||||
}
|
||||
|
||||
return true;
|
||||
} else if (sourceAudioFormat.channelCount() == 1 && destinationAudioFormat.channelCount() == 2) {
|
||||
} else if (sourceChannelCount == 1 && destinationChannelCount == 2) {
|
||||
|
||||
// loop through the mono input audio and repeat each sample twice
|
||||
for (uint i = 0; i < numSourceSamples; ++i) {
|
||||
|
@ -451,26 +469,24 @@ bool sampleChannelConversion(const int16_t* sourceSamples, int16_t* destinationS
|
|||
void possibleResampling(AudioSRC* resampler,
|
||||
const int16_t* sourceSamples, int16_t* destinationSamples,
|
||||
unsigned int numSourceSamples, unsigned int numDestinationSamples,
|
||||
const QAudioFormat& sourceAudioFormat, const QAudioFormat& destinationAudioFormat) {
|
||||
const int sourceChannelCount, const int destinationChannelCount) {
|
||||
|
||||
if (numSourceSamples > 0) {
|
||||
if (!resampler) {
|
||||
if (!sampleChannelConversion(sourceSamples, destinationSamples, numSourceSamples,
|
||||
sourceAudioFormat, destinationAudioFormat)) {
|
||||
sourceChannelCount, destinationChannelCount)) {
|
||||
// no conversion, we can copy the samples directly across
|
||||
memcpy(destinationSamples, sourceSamples, numSourceSamples * AudioConstants::SAMPLE_SIZE);
|
||||
}
|
||||
} else {
|
||||
|
||||
if (sourceAudioFormat.channelCount() != destinationAudioFormat.channelCount()) {
|
||||
float channelCountRatio = (float)destinationAudioFormat.channelCount() / sourceAudioFormat.channelCount();
|
||||
if (sourceChannelCount != destinationChannelCount) {
|
||||
|
||||
int numChannelCoversionSamples = (int)(numSourceSamples * channelCountRatio);
|
||||
int numChannelCoversionSamples = (numSourceSamples * destinationChannelCount) / sourceChannelCount;
|
||||
int16_t* channelConversionSamples = new int16_t[numChannelCoversionSamples];
|
||||
|
||||
sampleChannelConversion(sourceSamples, channelConversionSamples,
|
||||
numSourceSamples,
|
||||
sourceAudioFormat, destinationAudioFormat);
|
||||
sampleChannelConversion(sourceSamples, channelConversionSamples, numSourceSamples,
|
||||
sourceChannelCount, destinationChannelCount);
|
||||
|
||||
resampler->render(channelConversionSamples, destinationSamples, numChannelCoversionSamples);
|
||||
|
||||
|
@ -480,7 +496,7 @@ void possibleResampling(AudioSRC* resampler,
|
|||
unsigned int numAdjustedSourceSamples = numSourceSamples;
|
||||
unsigned int numAdjustedDestinationSamples = numDestinationSamples;
|
||||
|
||||
if (sourceAudioFormat.channelCount() == 2 && destinationAudioFormat.channelCount() == 2) {
|
||||
if (sourceChannelCount == 2 && destinationChannelCount == 2) {
|
||||
numAdjustedSourceSamples /= 2;
|
||||
numAdjustedDestinationSamples /= 2;
|
||||
}
|
||||
|
@ -502,7 +518,7 @@ void AudioClient::start() {
|
|||
_desiredInputFormat.setChannelCount(1);
|
||||
|
||||
_desiredOutputFormat = _desiredInputFormat;
|
||||
_desiredOutputFormat.setChannelCount(2);
|
||||
_desiredOutputFormat.setChannelCount(OUTPUT_CHANNEL_COUNT);
|
||||
|
||||
QAudioDeviceInfo inputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioInput);
|
||||
qCDebug(audioclient) << "The default audio input device is" << inputDeviceInfo.deviceName();
|
||||
|
@ -824,6 +840,36 @@ void AudioClient::setReverbOptions(const AudioEffectOptions* options) {
|
|||
}
|
||||
}
|
||||
|
||||
static void channelUpmix(int16_t* source, int16_t* dest, int numSamples, int numExtraChannels) {
|
||||
|
||||
for (int i = 0; i < numSamples/2; i++) {
|
||||
|
||||
// read 2 samples
|
||||
int16_t left = *source++;
|
||||
int16_t right = *source++;
|
||||
|
||||
// write 2 + N samples
|
||||
*dest++ = left;
|
||||
*dest++ = right;
|
||||
for (int n = 0; n < numExtraChannels; n++) {
|
||||
*dest++ = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void channelDownmix(int16_t* source, int16_t* dest, int numSamples) {
|
||||
|
||||
for (int i = 0; i < numSamples/2; i++) {
|
||||
|
||||
// read 2 samples
|
||||
int16_t left = *source++;
|
||||
int16_t right = *source++;
|
||||
|
||||
// write 1 sample
|
||||
*dest++ = (int16_t)((left + right) / 2);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
||||
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
|
||||
bool hasReverb = _reverb || _receivedAudioStream.hasReverb();
|
||||
|
@ -857,7 +903,7 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
|||
static QByteArray loopBackByteArray;
|
||||
|
||||
int numInputSamples = inputByteArray.size() / AudioConstants::SAMPLE_SIZE;
|
||||
int numLoopbackSamples = numDestinationSamplesRequired(_inputFormat, _outputFormat, numInputSamples);
|
||||
int numLoopbackSamples = (numInputSamples * OUTPUT_CHANNEL_COUNT) / _inputFormat.channelCount();
|
||||
|
||||
loopBackByteArray.resize(numLoopbackSamples * AudioConstants::SAMPLE_SIZE);
|
||||
|
||||
|
@ -865,7 +911,7 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
|||
int16_t* loopbackSamples = reinterpret_cast<int16_t*>(loopBackByteArray.data());
|
||||
|
||||
// upmix mono to stereo
|
||||
if (!sampleChannelConversion(inputSamples, loopbackSamples, numInputSamples, _inputFormat, _outputFormat)) {
|
||||
if (!sampleChannelConversion(inputSamples, loopbackSamples, numInputSamples, _inputFormat.channelCount(), OUTPUT_CHANNEL_COUNT)) {
|
||||
// no conversion, just copy the samples
|
||||
memcpy(loopbackSamples, inputSamples, numInputSamples * AudioConstants::SAMPLE_SIZE);
|
||||
}
|
||||
|
@ -876,7 +922,29 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
|||
_sourceReverb.render(loopbackSamples, loopbackSamples, numLoopbackSamples/2);
|
||||
}
|
||||
|
||||
_loopbackOutputDevice->write(loopBackByteArray);
|
||||
// if required, upmix or downmix to deviceChannelCount
|
||||
int deviceChannelCount = _outputFormat.channelCount();
|
||||
if (deviceChannelCount == OUTPUT_CHANNEL_COUNT) {
|
||||
|
||||
_loopbackOutputDevice->write(loopBackByteArray);
|
||||
|
||||
} else {
|
||||
|
||||
static QByteArray deviceByteArray;
|
||||
|
||||
int numDeviceSamples = (numLoopbackSamples * deviceChannelCount) / OUTPUT_CHANNEL_COUNT;
|
||||
|
||||
deviceByteArray.resize(numDeviceSamples * AudioConstants::SAMPLE_SIZE);
|
||||
|
||||
int16_t* deviceSamples = reinterpret_cast<int16_t*>(deviceByteArray.data());
|
||||
|
||||
if (deviceChannelCount > OUTPUT_CHANNEL_COUNT) {
|
||||
channelUpmix(loopbackSamples, deviceSamples, numLoopbackSamples, deviceChannelCount - OUTPUT_CHANNEL_COUNT);
|
||||
} else {
|
||||
channelDownmix(loopbackSamples, deviceSamples, numLoopbackSamples);
|
||||
}
|
||||
_loopbackOutputDevice->write(deviceByteArray);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::handleAudioInput() {
|
||||
|
@ -923,7 +991,7 @@ void AudioClient::handleAudioInput() {
|
|||
possibleResampling(_inputToNetworkResampler,
|
||||
inputAudioSamples.get(), networkAudioSamples,
|
||||
inputSamplesRequired, numNetworkSamples,
|
||||
_inputFormat, _desiredInputFormat);
|
||||
_inputFormat.channelCount(), _desiredInputFormat.channelCount());
|
||||
|
||||
// Remove DC offset
|
||||
if (!_isStereoInput) {
|
||||
|
@ -1170,9 +1238,9 @@ bool AudioClient::outputLocalInjector(bool isStereo, AudioInjector* injector) {
|
|||
}
|
||||
|
||||
void AudioClient::outputFormatChanged() {
|
||||
_outputFrameSize = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * _outputFormat.channelCount() * _outputFormat.sampleRate()) /
|
||||
_outputFrameSize = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * OUTPUT_CHANNEL_COUNT * _outputFormat.sampleRate()) /
|
||||
_desiredOutputFormat.sampleRate();
|
||||
_receivedAudioStream.outputFormatChanged(_outputFormat.sampleRate(), _outputFormat.channelCount());
|
||||
_receivedAudioStream.outputFormatChanged(_outputFormat.sampleRate(), OUTPUT_CHANNEL_COUNT);
|
||||
}
|
||||
|
||||
bool AudioClient::switchInputToAudioDevice(const QAudioDeviceInfo& inputDeviceInfo) {
|
||||
|
@ -1316,9 +1384,8 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
|
||||
assert(_desiredOutputFormat.sampleSize() == 16);
|
||||
assert(_outputFormat.sampleSize() == 16);
|
||||
int channelCount = (_desiredOutputFormat.channelCount() == 2 && _outputFormat.channelCount() == 2) ? 2 : 1;
|
||||
|
||||
_networkToOutputResampler = new AudioSRC(_desiredOutputFormat.sampleRate(), _outputFormat.sampleRate(), channelCount);
|
||||
_networkToOutputResampler = new AudioSRC(_desiredOutputFormat.sampleRate(), _outputFormat.sampleRate(), OUTPUT_CHANNEL_COUNT);
|
||||
|
||||
} else {
|
||||
qCDebug(audioclient) << "No resampling required for network output to match actual output format.";
|
||||
|
@ -1328,8 +1395,11 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
|
||||
// setup our general output device for audio-mixer audio
|
||||
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
|
||||
int osDefaultBufferSize = _audioOutput->bufferSize();
|
||||
int requestedSize = _sessionOutputBufferSizeFrames *_outputFrameSize * AudioConstants::SAMPLE_SIZE;
|
||||
int deviceChannelCount = _outputFormat.channelCount();
|
||||
int deviceFrameSize = (AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * deviceChannelCount * _outputFormat.sampleRate()) / _desiredOutputFormat.sampleRate();
|
||||
int requestedSize = _sessionOutputBufferSizeFrames * deviceFrameSize * AudioConstants::SAMPLE_SIZE;
|
||||
_audioOutput->setBufferSize(requestedSize);
|
||||
|
||||
connect(_audioOutput, &QAudioOutput::notify, this, &AudioClient::outputNotify);
|
||||
|
@ -1341,14 +1411,13 @@ bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDevice
|
|||
_audioOutput->start(&_audioOutputIODevice);
|
||||
lock.unlock();
|
||||
|
||||
qCDebug(audioclient) << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / AudioConstants::SAMPLE_SIZE / (float)_outputFrameSize <<
|
||||
qCDebug(audioclient) << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / AudioConstants::SAMPLE_SIZE / (float)deviceFrameSize <<
|
||||
"requested bytes:" << requestedSize << "actual bytes:" << _audioOutput->bufferSize() <<
|
||||
"os default:" << osDefaultBufferSize << "period size:" << _audioOutput->periodSize();
|
||||
|
||||
// setup a loopback audio output device
|
||||
_loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
|
||||
|
||||
_timeSinceLastReceived.start();
|
||||
|
||||
supportedFormat = true;
|
||||
|
@ -1447,15 +1516,27 @@ float AudioClient::gainForSource(float distance, float volume) {
|
|||
}
|
||||
|
||||
qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
||||
auto samplesRequested = maxSize / AudioConstants::SAMPLE_SIZE;
|
||||
|
||||
// samples requested from OUTPUT_CHANNEL_COUNT
|
||||
int deviceChannelCount = _audio->_outputFormat.channelCount();
|
||||
int samplesRequested = (int)(maxSize / AudioConstants::SAMPLE_SIZE) * OUTPUT_CHANNEL_COUNT / deviceChannelCount;
|
||||
|
||||
int samplesPopped;
|
||||
int bytesWritten;
|
||||
|
||||
if ((samplesPopped = _receivedAudioStream.popSamples((int)samplesRequested, false)) > 0) {
|
||||
if ((samplesPopped = _receivedAudioStream.popSamples(samplesRequested, false)) > 0) {
|
||||
qCDebug(audiostream, "Read %d samples from buffer (%d available)", samplesPopped, _receivedAudioStream.getSamplesAvailable());
|
||||
AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput();
|
||||
lastPopOutput.readSamples((int16_t*)data, samplesPopped);
|
||||
bytesWritten = samplesPopped * AudioConstants::SAMPLE_SIZE;
|
||||
|
||||
// if required, upmix or downmix to deviceChannelCount
|
||||
if (deviceChannelCount == OUTPUT_CHANNEL_COUNT) {
|
||||
lastPopOutput.readSamples((int16_t*)data, samplesPopped);
|
||||
} else if (deviceChannelCount > OUTPUT_CHANNEL_COUNT) {
|
||||
lastPopOutput.readSamplesWithUpmix((int16_t*)data, samplesPopped, deviceChannelCount - OUTPUT_CHANNEL_COUNT);
|
||||
} else {
|
||||
lastPopOutput.readSamplesWithDownmix((int16_t*)data, samplesPopped);
|
||||
}
|
||||
bytesWritten = (samplesPopped * AudioConstants::SAMPLE_SIZE) * deviceChannelCount / OUTPUT_CHANNEL_COUNT;
|
||||
} else {
|
||||
// nothing on network, don't grab anything from injectors, and just return 0s
|
||||
// this will flood the log: qCDebug(audioclient, "empty/partial network buffer");
|
||||
|
|
|
@ -105,6 +105,8 @@ public:
|
|||
|
||||
void readSamples(int16_t* dest, int numSamples);
|
||||
void readSamplesWithFade(int16_t* dest, int numSamples, float fade);
|
||||
void readSamplesWithUpmix(int16_t* dest, int numSamples, int numExtraChannels);
|
||||
void readSamplesWithDownmix(int16_t* dest, int numSamples);
|
||||
|
||||
private:
|
||||
int16_t* atShiftedBy(int i);
|
||||
|
@ -225,6 +227,40 @@ inline void AudioRingBuffer::ConstIterator::readSamplesWithFade(int16_t* dest, i
|
|||
}
|
||||
}
|
||||
|
||||
inline void AudioRingBuffer::ConstIterator::readSamplesWithUpmix(int16_t* dest, int numSamples, int numExtraChannels) {
|
||||
int16_t* at = _at;
|
||||
for (int i = 0; i < numSamples/2; i++) {
|
||||
|
||||
// read 2 samples
|
||||
int16_t left = *at;
|
||||
at = (at == _bufferLast) ? _bufferFirst : at + 1;
|
||||
int16_t right = *at;
|
||||
at = (at == _bufferLast) ? _bufferFirst : at + 1;
|
||||
|
||||
// write 2 + N samples
|
||||
*dest++ = left;
|
||||
*dest++ = right;
|
||||
for (int n = 0; n < numExtraChannels; n++) {
|
||||
*dest++ = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline void AudioRingBuffer::ConstIterator::readSamplesWithDownmix(int16_t* dest, int numSamples) {
|
||||
int16_t* at = _at;
|
||||
for (int i = 0; i < numSamples/2; i++) {
|
||||
|
||||
// read 2 samples
|
||||
int16_t left = *at;
|
||||
at = (at == _bufferLast) ? _bufferFirst : at + 1;
|
||||
int16_t right = *at;
|
||||
at = (at == _bufferLast) ? _bufferFirst : at + 1;
|
||||
|
||||
// write 1 sample
|
||||
*dest++ = (int16_t)((left + right) / 2);
|
||||
}
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator AudioRingBuffer::nextOutput() const {
|
||||
return ConstIterator(_buffer, _bufferLength, _nextOutput);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,13 @@ EntityItemPointer RenderableLineEntityItem::factory(const EntityItemID& entityID
|
|||
return entity;
|
||||
}
|
||||
|
||||
RenderableLineEntityItem::~RenderableLineEntityItem() {
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
if (geometryCache) {
|
||||
geometryCache->releaseID(_lineVerticesID);
|
||||
}
|
||||
}
|
||||
|
||||
void RenderableLineEntityItem::updateGeometry() {
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
if (_lineVerticesID == GeometryCache::UNKNOWN_ID) {
|
||||
|
|
|
@ -23,6 +23,7 @@ public:
|
|||
LineEntityItem(entityItemID),
|
||||
_lineVerticesID(GeometryCache::UNKNOWN_ID)
|
||||
{ }
|
||||
~RenderableLineEntityItem();
|
||||
|
||||
virtual void render(RenderArgs* args) override;
|
||||
|
||||
|
|
|
@ -1139,8 +1139,8 @@ void RenderablePolyVoxEntityItem::getMesh() {
|
|||
auto indexBuffer = std::make_shared<gpu::Buffer>(vecIndices.size() * sizeof(uint32_t),
|
||||
(gpu::Byte*)vecIndices.data());
|
||||
auto indexBufferPtr = gpu::BufferPointer(indexBuffer);
|
||||
auto indexBufferView = new gpu::BufferView(indexBufferPtr, gpu::Element(gpu::SCALAR, gpu::UINT32, gpu::RAW));
|
||||
mesh->setIndexBuffer(*indexBufferView);
|
||||
gpu::BufferView indexBufferView(indexBufferPtr, gpu::Element(gpu::SCALAR, gpu::UINT32, gpu::RAW));
|
||||
mesh->setIndexBuffer(indexBufferView);
|
||||
|
||||
const std::vector<PolyVox::PositionMaterialNormal>& vecVertices = polyVoxMesh.getVertices();
|
||||
auto vertexBuffer = std::make_shared<gpu::Buffer>(vecVertices.size() * sizeof(PolyVox::PositionMaterialNormal),
|
||||
|
@ -1150,10 +1150,10 @@ void RenderablePolyVoxEntityItem::getMesh() {
|
|||
if (vertexBufferPtr->getSize() > sizeof(float) * 3) {
|
||||
vertexBufferSize = vertexBufferPtr->getSize() - sizeof(float) * 3;
|
||||
}
|
||||
auto vertexBufferView = new gpu::BufferView(vertexBufferPtr, 0, vertexBufferSize,
|
||||
sizeof(PolyVox::PositionMaterialNormal),
|
||||
gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::RAW));
|
||||
mesh->setVertexBuffer(*vertexBufferView);
|
||||
gpu::BufferView vertexBufferView(vertexBufferPtr, 0, vertexBufferSize,
|
||||
sizeof(PolyVox::PositionMaterialNormal),
|
||||
gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::RAW));
|
||||
mesh->setVertexBuffer(vertexBufferView);
|
||||
mesh->addAttribute(gpu::Stream::NORMAL,
|
||||
gpu::BufferView(vertexBufferPtr,
|
||||
sizeof(float) * 3,
|
||||
|
@ -1323,14 +1323,14 @@ void RenderablePolyVoxEntityItem::setCollisionPoints(ShapeInfo::PointCollection
|
|||
// include the registrationPoint in the shape key, because the offset is already
|
||||
// included in the points and the shapeManager wont know that the shape has changed.
|
||||
withWriteLock([&] {
|
||||
QString shapeKey = QString(_voxelData.toBase64()) + "," +
|
||||
QString::number(_registrationPoint.x) + "," +
|
||||
QString::number(_registrationPoint.y) + "," +
|
||||
QString::number(_registrationPoint.z);
|
||||
_shapeInfo.setParams(SHAPE_TYPE_COMPOUND, collisionModelDimensions, shapeKey);
|
||||
_shapeInfo.setPointCollection(pointCollection);
|
||||
_meshDirty = false;
|
||||
});
|
||||
QString shapeKey = QString(_voxelData.toBase64()) + "," +
|
||||
QString::number(_registrationPoint.x) + "," +
|
||||
QString::number(_registrationPoint.y) + "," +
|
||||
QString::number(_registrationPoint.z);
|
||||
_shapeInfo.setParams(SHAPE_TYPE_COMPOUND, collisionModelDimensions, shapeKey);
|
||||
_shapeInfo.setPointCollection(pointCollection);
|
||||
_meshDirty = false;
|
||||
});
|
||||
}
|
||||
|
||||
void RenderablePolyVoxEntityItem::setXNNeighborID(const EntityItemID& xNNeighborID) {
|
||||
|
@ -1439,3 +1439,16 @@ void RenderablePolyVoxEntityItem::bonkNeighbors() {
|
|||
currentZNNeighbor->setVolDataDirty();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void RenderablePolyVoxEntityItem::locationChanged(bool tellPhysics) {
|
||||
EntityItem::locationChanged(tellPhysics);
|
||||
if (!_pipeline || !render::Item::isValidID(_myItem)) {
|
||||
return;
|
||||
}
|
||||
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
pendingChanges.updateItem<PolyVoxPayload>(_myItem, [](PolyVoxPayload& payload) {});
|
||||
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
|
|
|
@ -141,6 +141,9 @@ public:
|
|||
// Transparent polyvox didn't seem to be working so disable for now
|
||||
bool isTransparent() override { return false; }
|
||||
|
||||
protected:
|
||||
virtual void locationChanged(bool tellPhysics = true) override;
|
||||
|
||||
private:
|
||||
// The PolyVoxEntityItem class has _voxelData which contains dimensions and compressed voxel data. The dimensions
|
||||
// may not match _voxelVolumeSize.
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
const float METERS_TO_INCHES = 39.3701f;
|
||||
static uint32_t _currentWebCount { 0 };
|
||||
// Don't allow more than 100 concurrent web views
|
||||
static const uint32_t MAX_CONCURRENT_WEB_VIEWS = 100;
|
||||
static const uint32_t MAX_CONCURRENT_WEB_VIEWS = 20;
|
||||
// If a web-view hasn't been rendered for 30 seconds, de-allocate the framebuffer
|
||||
static uint64_t MAX_NO_RENDER_INTERVAL = 30 * USECS_PER_SECOND;
|
||||
|
||||
|
@ -69,8 +69,6 @@ bool RenderableWebEntityItem::buildWebSurface(QSharedPointer<EntityTreeRenderer>
|
|||
qWarning() << "Too many concurrent web views to create new view";
|
||||
return false;
|
||||
}
|
||||
qDebug() << "Building web surface";
|
||||
|
||||
QString javaScriptToInject;
|
||||
QFile webChannelFile(":qtwebchannel/qwebchannel.js");
|
||||
QFile createGlobalEventBridgeFile(PathUtils::resourcesPath() + "/html/createGlobalEventBridge.js");
|
||||
|
@ -85,12 +83,15 @@ bool RenderableWebEntityItem::buildWebSurface(QSharedPointer<EntityTreeRenderer>
|
|||
qCWarning(entitiesrenderer) << "unable to find qwebchannel.js or createGlobalEventBridge.js";
|
||||
}
|
||||
|
||||
++_currentWebCount;
|
||||
// Save the original GL context, because creating a QML surface will create a new context
|
||||
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
|
||||
if (!currentContext) {
|
||||
return false;
|
||||
}
|
||||
|
||||
++_currentWebCount;
|
||||
qDebug() << "Building web surface: " << getID() << ", #" << _currentWebCount << ", url = " << _sourceUrl;
|
||||
|
||||
QSurface * currentSurface = currentContext->surface();
|
||||
|
||||
auto deleter = [](OffscreenQmlSurface* webSurface) {
|
||||
|
@ -356,6 +357,8 @@ void RenderableWebEntityItem::destroyWebSurface() {
|
|||
QObject::disconnect(_hoverLeaveConnection);
|
||||
_hoverLeaveConnection = QMetaObject::Connection();
|
||||
_webSurface.reset();
|
||||
|
||||
qDebug() << "Delete web surface: " << getID() << ", #" << _currentWebCount << ", url = " << _sourceUrl;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -438,6 +438,7 @@ void EntityTree::deleteEntity(const EntityItemID& entityID, bool force, bool ign
|
|||
return;
|
||||
}
|
||||
|
||||
unhookChildAvatar(entityID);
|
||||
emit deletingEntity(entityID);
|
||||
|
||||
// NOTE: callers must lock the tree before using this method
|
||||
|
@ -447,6 +448,17 @@ void EntityTree::deleteEntity(const EntityItemID& entityID, bool force, bool ign
|
|||
_isDirty = true;
|
||||
}
|
||||
|
||||
void EntityTree::unhookChildAvatar(const EntityItemID entityID) {
|
||||
|
||||
EntityItemPointer entity = findEntityByEntityItemID(entityID);
|
||||
|
||||
entity->forEachDescendant([&](SpatiallyNestablePointer child) {
|
||||
if (child->getNestableType() == NestableType::Avatar) {
|
||||
child->setParentID(nullptr);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void EntityTree::deleteEntities(QSet<EntityItemID> entityIDs, bool force, bool ignoreWarnings) {
|
||||
// NOTE: callers must lock the tree before using this method
|
||||
DeleteEntityOperator theOperator(getThisPointer());
|
||||
|
@ -476,6 +488,7 @@ void EntityTree::deleteEntities(QSet<EntityItemID> entityIDs, bool force, bool i
|
|||
}
|
||||
|
||||
// tell our delete operator about this entityID
|
||||
unhookChildAvatar(entityID);
|
||||
theOperator.addEntityIDToDeleteList(entityID);
|
||||
emit deletingEntity(entityID);
|
||||
}
|
||||
|
|
|
@ -121,6 +121,8 @@ public:
|
|||
// use this method if you have a pointer to the entity (avoid an extra entity lookup)
|
||||
bool updateEntity(EntityItemPointer entity, const EntityItemProperties& properties, const SharedNodePointer& senderNode = SharedNodePointer(nullptr));
|
||||
|
||||
// check if the avatar is a child of this entity, If so set the avatar parentID to null
|
||||
void unhookChildAvatar(const EntityItemID entityID);
|
||||
void deleteEntity(const EntityItemID& entityID, bool force = false, bool ignoreWarnings = true);
|
||||
void deleteEntities(QSet<EntityItemID> entityIDs, bool force = false, bool ignoreWarnings = true);
|
||||
|
||||
|
|
|
@ -83,9 +83,7 @@ public:
|
|||
const Vec4i& region, QImage& destImage) final override;
|
||||
|
||||
|
||||
static const int MAX_NUM_ATTRIBUTES = Stream::NUM_INPUT_SLOTS;
|
||||
static const int MAX_NUM_INPUT_BUFFERS = 16;
|
||||
|
||||
// this is the maximum numeber of available input buffers
|
||||
size_t getNumInputBuffers() const { return _input._invalidBuffers.size(); }
|
||||
|
||||
// this is the maximum per shader stage on the low end apple
|
||||
|
@ -147,6 +145,10 @@ public:
|
|||
virtual void do_startNamedCall(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_stopNamedCall(const Batch& batch, size_t paramOffset) final;
|
||||
|
||||
static const int MAX_NUM_ATTRIBUTES = Stream::NUM_INPUT_SLOTS;
|
||||
// The drawcall Info attribute channel is reserved and is the upper bound for the number of availables Input buffers
|
||||
static const int MAX_NUM_INPUT_BUFFERS = Stream::DRAW_CALL_INFO;
|
||||
|
||||
virtual void do_pushProfileRange(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_popProfileRange(const Batch& batch, size_t paramOffset) final;
|
||||
|
||||
|
@ -235,18 +237,21 @@ protected:
|
|||
virtual void initInput() final;
|
||||
virtual void killInput() final;
|
||||
virtual void syncInputStateCache() final;
|
||||
virtual void resetInputStage() final;
|
||||
virtual void updateInput();
|
||||
virtual void resetInputStage();
|
||||
virtual void updateInput() = 0;
|
||||
|
||||
struct InputStageState {
|
||||
bool _invalidFormat { true };
|
||||
Stream::FormatPointer _format;
|
||||
std::string _formatKey;
|
||||
|
||||
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
|
||||
ActivationCache _attributeActivation { 0 };
|
||||
|
||||
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> BuffersState;
|
||||
BuffersState _invalidBuffers { 0 };
|
||||
|
||||
BuffersState _invalidBuffers{ 0 };
|
||||
BuffersState _attribBindingBuffers{ 0 };
|
||||
|
||||
Buffers _buffers;
|
||||
Offsets _bufferOffsets;
|
||||
|
@ -266,7 +271,11 @@ protected:
|
|||
GLuint _defaultVAO { 0 };
|
||||
|
||||
InputStageState() :
|
||||
_buffers(_invalidBuffers.size()),
|
||||
_invalidFormat(true),
|
||||
_format(0),
|
||||
_formatKey(),
|
||||
_attributeActivation(0),
|
||||
_buffers(_invalidBuffers.size(), BufferPointer(0)),
|
||||
_bufferOffsets(_invalidBuffers.size(), 0),
|
||||
_bufferStrides(_invalidBuffers.size(), 0),
|
||||
_bufferVBOs(_invalidBuffers.size(), 0) {}
|
||||
|
@ -276,8 +285,8 @@ protected:
|
|||
void killTransform();
|
||||
// Synchronize the state cache of this Backend with the actual real state of the GL Context
|
||||
void syncTransformStateCache();
|
||||
void updateTransform(const Batch& batch);
|
||||
void resetTransformStage();
|
||||
virtual void updateTransform(const Batch& batch) = 0;
|
||||
virtual void resetTransformStage();
|
||||
|
||||
// Allows for correction of the camera pose to account for changes
|
||||
// between the time when a was recorded and the time(s) when it is
|
||||
|
@ -325,6 +334,8 @@ protected:
|
|||
bool _invalidProj { false };
|
||||
bool _invalidViewport { false };
|
||||
|
||||
bool _enabledDrawcallInfoBuffer{ false };
|
||||
|
||||
using Pair = std::pair<size_t, size_t>;
|
||||
using List = std::list<Pair>;
|
||||
List _cameraOffsets;
|
||||
|
@ -399,8 +410,8 @@ protected:
|
|||
|
||||
void resetQueryStage();
|
||||
struct QueryStageState {
|
||||
|
||||
};
|
||||
uint32_t _rangeQueryDepth { 0 };
|
||||
} _queryStage;
|
||||
|
||||
void resetStages();
|
||||
|
||||
|
|
|
@ -10,16 +10,26 @@
|
|||
//
|
||||
#include "GLBackend.h"
|
||||
#include "GLShared.h"
|
||||
#include "GLInputFormat.h"
|
||||
|
||||
using namespace gpu;
|
||||
using namespace gpu::gl;
|
||||
|
||||
void GLBackend::do_setInputFormat(const Batch& batch, size_t paramOffset) {
|
||||
Stream::FormatPointer format = batch._streamFormats.get(batch._params[paramOffset]._uint);
|
||||
|
||||
if (format != _input._format) {
|
||||
_input._format = format;
|
||||
_input._invalidFormat = true;
|
||||
if (format) {
|
||||
auto inputFormat = GLInputFormat::sync((*format));
|
||||
assert(inputFormat);
|
||||
if (_input._formatKey != inputFormat->key) {
|
||||
_input._formatKey = inputFormat->key;
|
||||
_input._invalidFormat = true;
|
||||
}
|
||||
} else {
|
||||
_input._formatKey.clear();
|
||||
_input._invalidFormat = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,16 +103,9 @@ void GLBackend::resetInputStage() {
|
|||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
|
||||
(void) CHECK_GL_ERROR();
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
|
||||
|
||||
for (uint32_t i = 0; i < _input._attributeActivation.size(); i++) {
|
||||
glDisableVertexAttribArray(i);
|
||||
glVertexAttribPointer(i, 4, GL_FLOAT, GL_FALSE, 0, 0);
|
||||
}
|
||||
|
||||
// Reset vertex buffer and format
|
||||
_input._format.reset();
|
||||
_input._formatKey.clear();
|
||||
_input._invalidFormat = false;
|
||||
_input._attributeActivation.reset();
|
||||
|
||||
|
@ -114,6 +117,7 @@ void GLBackend::resetInputStage() {
|
|||
}
|
||||
_input._invalidBuffers.reset();
|
||||
|
||||
// THe vertex array binding MUST be reset in the specific Backend versions as they use different techniques
|
||||
}
|
||||
|
||||
void GLBackend::do_setIndexBuffer(const Batch& batch, size_t paramOffset) {
|
||||
|
@ -151,183 +155,3 @@ void GLBackend::do_setIndirectBuffer(const Batch& batch, size_t paramOffset) {
|
|||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
|
||||
// Core 41 doesn't expose the features to really separate the vertex format from the vertex buffers binding
|
||||
// Core 43 does :)
|
||||
// FIXME crashing problem with glVertexBindingDivisor / glVertexAttribFormat
|
||||
// Once resolved, break this up into the GL 4.1 and 4.5 backends
|
||||
#if 1 || (GPU_INPUT_PROFILE == GPU_CORE_41)
|
||||
#define NO_SUPPORT_VERTEX_ATTRIB_FORMAT
|
||||
#else
|
||||
#define SUPPORT_VERTEX_ATTRIB_FORMAT
|
||||
#endif
|
||||
|
||||
void GLBackend::updateInput() {
|
||||
#if defined(SUPPORT_VERTEX_ATTRIB_FORMAT)
|
||||
if (_input._invalidFormat) {
|
||||
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
// Assign the vertex format required
|
||||
if (_input._format) {
|
||||
for (auto& it : _input._format->getAttributes()) {
|
||||
const Stream::Attribute& attrib = (it).second;
|
||||
|
||||
GLuint slot = attrib._slot;
|
||||
GLuint count = attrib._element.getLocationScalarCount();
|
||||
uint8_t locationCount = attrib._element.getLocationCount();
|
||||
GLenum type = _elementTypeToGL41Type[attrib._element.getType()];
|
||||
GLuint offset = attrib._offset;;
|
||||
GLboolean isNormalized = attrib._element.isNormalized();
|
||||
|
||||
GLenum perLocationSize = attrib._element.getLocationSize();
|
||||
|
||||
for (size_t locNum = 0; locNum < locationCount; ++locNum) {
|
||||
newActivation.set(slot + locNum);
|
||||
glVertexAttribFormat(slot + locNum, count, type, isNormalized, offset + locNum * perLocationSize);
|
||||
glVertexAttribBinding(slot + locNum, attrib._channel);
|
||||
}
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
glVertexBindingDivisor(attrib._channel, attrib._frequency * (isStereo() ? 2 : 1));
|
||||
#else
|
||||
glVertexBindingDivisor(attrib._channel, attrib._frequency);
|
||||
#endif
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
for (size_t i = 0; i < newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _input._attributeActivation[i]) {
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
_input._attributeActivation.flip(i);
|
||||
}
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
_input._invalidFormat = false;
|
||||
_stats._ISNumFormatChanges++;
|
||||
}
|
||||
|
||||
if (_input._invalidBuffers.any()) {
|
||||
int numBuffers = _input._buffers.size();
|
||||
auto buffer = _input._buffers.data();
|
||||
auto vbo = _input._bufferVBOs.data();
|
||||
auto offset = _input._bufferOffsets.data();
|
||||
auto stride = _input._bufferStrides.data();
|
||||
|
||||
for (int bufferNum = 0; bufferNum < numBuffers; bufferNum++) {
|
||||
if (_input._invalidBuffers.test(bufferNum)) {
|
||||
glBindVertexBuffer(bufferNum, (*vbo), (*offset), (*stride));
|
||||
}
|
||||
buffer++;
|
||||
vbo++;
|
||||
offset++;
|
||||
stride++;
|
||||
}
|
||||
_input._invalidBuffers.reset();
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
#else
|
||||
if (_input._invalidFormat || _input._invalidBuffers.any()) {
|
||||
|
||||
if (_input._invalidFormat) {
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
_stats._ISNumFormatChanges++;
|
||||
|
||||
// Check expected activation
|
||||
if (_input._format) {
|
||||
for (auto& it : _input._format->getAttributes()) {
|
||||
const Stream::Attribute& attrib = (it).second;
|
||||
uint8_t locationCount = attrib._element.getLocationCount();
|
||||
for (int i = 0; i < locationCount; ++i) {
|
||||
newActivation.set(attrib._slot + i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
for (unsigned int i = 0; i < newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _input._attributeActivation[i]) {
|
||||
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
_input._attributeActivation.flip(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// now we need to bind the buffers and assign the attrib pointers
|
||||
if (_input._format) {
|
||||
const Buffers& buffers = _input._buffers;
|
||||
const Offsets& offsets = _input._bufferOffsets;
|
||||
const Offsets& strides = _input._bufferStrides;
|
||||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
_stats._ISNumInputBufferChanges++;
|
||||
|
||||
GLuint boundVBO = 0;
|
||||
for (auto& channelIt : inputChannels) {
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
|
||||
if ((channelIt).first < buffers.size()) {
|
||||
int bufferNum = (channelIt).first;
|
||||
|
||||
if (_input._invalidBuffers.test(bufferNum) || _input._invalidFormat) {
|
||||
// GLuint vbo = gpu::GL41Backend::getBufferID((*buffers[bufferNum]));
|
||||
GLuint vbo = _input._bufferVBOs[bufferNum];
|
||||
if (boundVBO != vbo) {
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo);
|
||||
(void)CHECK_GL_ERROR();
|
||||
boundVBO = vbo;
|
||||
}
|
||||
_input._invalidBuffers[bufferNum] = false;
|
||||
|
||||
for (unsigned int i = 0; i < channel._slots.size(); i++) {
|
||||
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
|
||||
GLuint slot = attrib._slot;
|
||||
GLuint count = attrib._element.getLocationScalarCount();
|
||||
uint8_t locationCount = attrib._element.getLocationCount();
|
||||
GLenum type = gl::ELEMENT_TYPE_TO_GL[attrib._element.getType()];
|
||||
// GLenum perLocationStride = strides[bufferNum];
|
||||
GLenum perLocationStride = attrib._element.getLocationSize();
|
||||
GLuint stride = (GLuint)strides[bufferNum];
|
||||
GLuint pointer = (GLuint)(attrib._offset + offsets[bufferNum]);
|
||||
GLboolean isNormalized = attrib._element.isNormalized();
|
||||
|
||||
for (size_t locNum = 0; locNum < locationCount; ++locNum) {
|
||||
glVertexAttribPointer(slot + (GLuint)locNum, count, type, isNormalized, stride,
|
||||
reinterpret_cast<GLvoid*>(pointer + perLocationStride * (GLuint)locNum));
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
glVertexAttribDivisor(slot + (GLuint)locNum, attrib._frequency * (isStereo() ? 2 : 1));
|
||||
#else
|
||||
glVertexAttribDivisor(slot + (GLuint)locNum, attrib._frequency);
|
||||
#endif
|
||||
}
|
||||
|
||||
// TODO: Support properly the IAttrib version
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// everything format related should be in sync now
|
||||
_input._invalidFormat = false;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,10 @@ using namespace gpu::gl;
|
|||
|
||||
// Eventually, we want to test with TIME_ELAPSED instead of TIMESTAMP
|
||||
#ifdef Q_OS_MAC
|
||||
const uint32_t MAX_RANGE_QUERY_DEPTH = 1;
|
||||
static bool timeElapsed = true;
|
||||
#else
|
||||
const uint32_t MAX_RANGE_QUERY_DEPTH = 10000;
|
||||
static bool timeElapsed = false;
|
||||
#endif
|
||||
|
||||
|
@ -25,12 +27,16 @@ void GLBackend::do_beginQuery(const Batch& batch, size_t paramOffset) {
|
|||
auto query = batch._queries.get(batch._params[paramOffset]._uint);
|
||||
GLQuery* glquery = syncGPUObject(*query);
|
||||
if (glquery) {
|
||||
++_queryStage._rangeQueryDepth;
|
||||
glGetInteger64v(GL_TIMESTAMP, (GLint64*)&glquery->_batchElapsedTime);
|
||||
if (timeElapsed) {
|
||||
glBeginQuery(GL_TIME_ELAPSED, glquery->_endqo);
|
||||
if (_queryStage._rangeQueryDepth <= MAX_RANGE_QUERY_DEPTH) {
|
||||
glBeginQuery(GL_TIME_ELAPSED, glquery->_endqo);
|
||||
}
|
||||
} else {
|
||||
glQueryCounter(glquery->_beginqo, GL_TIMESTAMP);
|
||||
}
|
||||
glquery->_rangeQueryDepth = _queryStage._rangeQueryDepth;
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
}
|
||||
|
@ -40,10 +46,13 @@ void GLBackend::do_endQuery(const Batch& batch, size_t paramOffset) {
|
|||
GLQuery* glquery = syncGPUObject(*query);
|
||||
if (glquery) {
|
||||
if (timeElapsed) {
|
||||
glEndQuery(GL_TIME_ELAPSED);
|
||||
if (_queryStage._rangeQueryDepth <= MAX_RANGE_QUERY_DEPTH) {
|
||||
glEndQuery(GL_TIME_ELAPSED);
|
||||
}
|
||||
} else {
|
||||
glQueryCounter(glquery->_endqo, GL_TIMESTAMP);
|
||||
}
|
||||
--_queryStage._rangeQueryDepth;
|
||||
GLint64 now;
|
||||
glGetInteger64v(GL_TIMESTAMP, &now);
|
||||
glquery->_batchElapsedTime = now - glquery->_batchElapsedTime;
|
||||
|
@ -55,20 +64,24 @@ void GLBackend::do_endQuery(const Batch& batch, size_t paramOffset) {
|
|||
void GLBackend::do_getQuery(const Batch& batch, size_t paramOffset) {
|
||||
auto query = batch._queries.get(batch._params[paramOffset]._uint);
|
||||
GLQuery* glquery = syncGPUObject(*query);
|
||||
if (glquery) {
|
||||
glGetQueryObjectui64v(glquery->_endqo, GL_QUERY_RESULT_AVAILABLE, &glquery->_result);
|
||||
if (glquery->_result == GL_TRUE) {
|
||||
if (timeElapsed) {
|
||||
glGetQueryObjectui64v(glquery->_endqo, GL_QUERY_RESULT, &glquery->_result);
|
||||
} else {
|
||||
GLuint64 start, end;
|
||||
glGetQueryObjectui64v(glquery->_beginqo, GL_QUERY_RESULT, &start);
|
||||
glGetQueryObjectui64v(glquery->_endqo, GL_QUERY_RESULT, &end);
|
||||
glquery->_result = end - start;
|
||||
}
|
||||
if (glquery) {
|
||||
if (glquery->_rangeQueryDepth > MAX_RANGE_QUERY_DEPTH) {
|
||||
query->triggerReturnHandler(glquery->_result, glquery->_batchElapsedTime);
|
||||
} else {
|
||||
glGetQueryObjectui64v(glquery->_endqo, GL_QUERY_RESULT_AVAILABLE, &glquery->_result);
|
||||
if (glquery->_result == GL_TRUE) {
|
||||
if (timeElapsed) {
|
||||
glGetQueryObjectui64v(glquery->_endqo, GL_QUERY_RESULT, &glquery->_result);
|
||||
} else {
|
||||
GLuint64 start, end;
|
||||
glGetQueryObjectui64v(glquery->_beginqo, GL_QUERY_RESULT, &start);
|
||||
glGetQueryObjectui64v(glquery->_endqo, GL_QUERY_RESULT, &end);
|
||||
glquery->_result = end - start;
|
||||
}
|
||||
query->triggerReturnHandler(glquery->_result, glquery->_batchElapsedTime);
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -85,6 +85,9 @@ void GLBackend::syncTransformStateCache() {
|
|||
Mat4 modelView;
|
||||
auto modelViewInv = glm::inverse(modelView);
|
||||
_transform._view.evalFromRawMatrix(modelViewInv);
|
||||
|
||||
glDisableVertexAttribArray(gpu::Stream::DRAW_CALL_INFO);
|
||||
_transform._enabledDrawcallInfoBuffer = false;
|
||||
}
|
||||
|
||||
void GLBackend::TransformStageState::preUpdate(size_t commandIndex, const StereoState& stereo) {
|
||||
|
@ -162,29 +165,7 @@ void GLBackend::TransformStageState::bindCurrentCamera(int eye) const {
|
|||
}
|
||||
}
|
||||
|
||||
void GLBackend::updateTransform(const Batch& batch) {
|
||||
_transform.update(_commandIndex, _stereo);
|
||||
|
||||
auto& drawCallInfoBuffer = batch.getDrawCallInfoBuffer();
|
||||
if (batch._currentNamedCall.empty()) {
|
||||
auto& drawCallInfo = drawCallInfoBuffer[_currentDraw];
|
||||
glDisableVertexAttribArray(gpu::Stream::DRAW_CALL_INFO); // Make sure attrib array is disabled
|
||||
glVertexAttribI2i(gpu::Stream::DRAW_CALL_INFO, drawCallInfo.index, drawCallInfo.unused);
|
||||
} else {
|
||||
glEnableVertexAttribArray(gpu::Stream::DRAW_CALL_INFO); // Make sure attrib array is enabled
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _transform._drawCallInfoBuffer);
|
||||
glVertexAttribIPointer(gpu::Stream::DRAW_CALL_INFO, 2, GL_UNSIGNED_SHORT, 0,
|
||||
_transform._drawCallInfoOffsets[batch._currentNamedCall]);
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
glVertexAttribDivisor(gpu::Stream::DRAW_CALL_INFO, (isStereo() ? 2 : 1));
|
||||
#else
|
||||
glVertexAttribDivisor(gpu::Stream::DRAW_CALL_INFO, 1);
|
||||
#endif
|
||||
}
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
void GLBackend::resetTransformStage() {
|
||||
|
||||
glDisableVertexAttribArray(gpu::Stream::DRAW_CALL_INFO);
|
||||
_transform._enabledDrawcallInfoBuffer = false;
|
||||
}
|
||||
|
|
33
libraries/gpu-gl/src/gpu/gl/GLInputFormat.cpp
Normal file
|
@ -0,0 +1,33 @@
|
|||
//
|
||||
// Created by Sam Gateau on 2016/07/21
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "GLInputFormat.h"
|
||||
#include "GLBackend.h"
|
||||
|
||||
using namespace gpu;
|
||||
using namespace gpu::gl;
|
||||
|
||||
|
||||
GLInputFormat::GLInputFormat() {
|
||||
}
|
||||
|
||||
GLInputFormat:: ~GLInputFormat() {
|
||||
|
||||
}
|
||||
|
||||
GLInputFormat* GLInputFormat::sync(const Stream::Format& inputFormat) {
|
||||
GLInputFormat* object = Backend::getGPUObject<GLInputFormat>(inputFormat);
|
||||
|
||||
if (!object) {
|
||||
object = new GLInputFormat();
|
||||
object->key = inputFormat.getKey();
|
||||
Backend::setGPUObject(inputFormat, object);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
29
libraries/gpu-gl/src/gpu/gl/GLInputFormat.h
Normal file
|
@ -0,0 +1,29 @@
|
|||
//
|
||||
// Created by Sam Gateau on 2016/07/21
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_gpu_gl_GLInputFormat_h
|
||||
#define hifi_gpu_gl_GLInputFormat_h
|
||||
|
||||
#include "GLShared.h"
|
||||
|
||||
namespace gpu {
|
||||
namespace gl {
|
||||
|
||||
class GLInputFormat : public GPUObject {
|
||||
public:
|
||||
static GLInputFormat* sync(const Stream::Format& inputFormat);
|
||||
|
||||
GLInputFormat();
|
||||
~GLInputFormat();
|
||||
|
||||
std::string key;
|
||||
};
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
|
@ -49,6 +49,7 @@ public:
|
|||
const GLuint _beginqo = { 0 };
|
||||
GLuint64 _result { (GLuint64)-1 };
|
||||
GLuint64 _batchElapsedTime { (GLuint64) 0 };
|
||||
uint32_t _rangeQueryDepth { 0 };
|
||||
|
||||
protected:
|
||||
GLQuery(const std::weak_ptr<GLBackend>& backend, const Query& query, GLuint endId, GLuint beginId) : Parent(backend, query, endId), _beginqo(beginId) {}
|
||||
|
|
|
@ -77,13 +77,13 @@ protected:
|
|||
void do_multiDrawIndexedIndirect(const Batch& batch, size_t paramOffset) override;
|
||||
|
||||
// Input Stage
|
||||
void resetInputStage() override;
|
||||
void updateInput() override;
|
||||
|
||||
// Synchronize the state cache of this Backend with the actual real state of the GL Context
|
||||
void transferTransformState(const Batch& batch) const override;
|
||||
void initTransform() override;
|
||||
void updateTransform(const Batch& batch);
|
||||
void resetTransformStage();
|
||||
void updateTransform(const Batch& batch) override;
|
||||
|
||||
// Output stage
|
||||
void do_blit(const Batch& batch, size_t paramOffset) override;
|
||||
|
|
|
@ -13,7 +13,111 @@
|
|||
using namespace gpu;
|
||||
using namespace gpu::gl41;
|
||||
|
||||
void GL41Backend::updateInput() {
|
||||
Parent::updateInput();
|
||||
|
||||
void GL41Backend::resetInputStage() {
|
||||
Parent::resetInputStage();
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
for (uint32_t i = 0; i < _input._attributeActivation.size(); i++) {
|
||||
glDisableVertexAttribArray(i);
|
||||
glVertexAttribPointer(i, 4, GL_FLOAT, GL_FALSE, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
void GL41Backend::updateInput() {
|
||||
if (_input._invalidFormat || _input._invalidBuffers.any()) {
|
||||
|
||||
if (_input._invalidFormat) {
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
_stats._ISNumFormatChanges++;
|
||||
|
||||
// Check expected activation
|
||||
if (_input._format) {
|
||||
for (auto& it : _input._format->getAttributes()) {
|
||||
const Stream::Attribute& attrib = (it).second;
|
||||
uint8_t locationCount = attrib._element.getLocationCount();
|
||||
for (int i = 0; i < locationCount; ++i) {
|
||||
newActivation.set(attrib._slot + i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
for (unsigned int i = 0; i < newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _input._attributeActivation[i]) {
|
||||
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
_input._attributeActivation.flip(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// now we need to bind the buffers and assign the attrib pointers
|
||||
if (_input._format) {
|
||||
const Buffers& buffers = _input._buffers;
|
||||
const Offsets& offsets = _input._bufferOffsets;
|
||||
const Offsets& strides = _input._bufferStrides;
|
||||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
_stats._ISNumInputBufferChanges++;
|
||||
|
||||
GLuint boundVBO = 0;
|
||||
for (auto& channelIt : inputChannels) {
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
|
||||
if ((channelIt).first < buffers.size()) {
|
||||
int bufferNum = (channelIt).first;
|
||||
|
||||
if (_input._invalidBuffers.test(bufferNum) || _input._invalidFormat) {
|
||||
// GLuint vbo = gpu::GL41Backend::getBufferID((*buffers[bufferNum]));
|
||||
GLuint vbo = _input._bufferVBOs[bufferNum];
|
||||
if (boundVBO != vbo) {
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo);
|
||||
(void)CHECK_GL_ERROR();
|
||||
boundVBO = vbo;
|
||||
}
|
||||
_input._invalidBuffers[bufferNum] = false;
|
||||
|
||||
for (unsigned int i = 0; i < channel._slots.size(); i++) {
|
||||
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
|
||||
GLuint slot = attrib._slot;
|
||||
GLuint count = attrib._element.getLocationScalarCount();
|
||||
uint8_t locationCount = attrib._element.getLocationCount();
|
||||
GLenum type = gl::ELEMENT_TYPE_TO_GL[attrib._element.getType()];
|
||||
// GLenum perLocationStride = strides[bufferNum];
|
||||
GLenum perLocationStride = attrib._element.getLocationSize();
|
||||
GLuint stride = (GLuint)strides[bufferNum];
|
||||
GLuint pointer = (GLuint)(attrib._offset + offsets[bufferNum]);
|
||||
GLboolean isNormalized = attrib._element.isNormalized();
|
||||
|
||||
for (size_t locNum = 0; locNum < locationCount; ++locNum) {
|
||||
glVertexAttribPointer(slot + (GLuint)locNum, count, type, isNormalized, stride,
|
||||
reinterpret_cast<GLvoid*>(pointer + perLocationStride * (GLuint)locNum));
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
glVertexAttribDivisor(slot + (GLuint)locNum, attrib._frequency * (isStereo() ? 2 : 1));
|
||||
#else
|
||||
glVertexAttribDivisor(slot + (GLuint)locNum, attrib._frequency);
|
||||
#endif
|
||||
}
|
||||
|
||||
// TODO: Support properly the IAttrib version
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// everything format related should be in sync now
|
||||
_input._invalidFormat = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -79,3 +79,32 @@ void GL41Backend::transferTransformState(const Batch& batch) const {
|
|||
// Make sure the current Camera offset is unknown before render Draw
|
||||
_transform._currentCameraOffset = INVALID_OFFSET;
|
||||
}
|
||||
|
||||
|
||||
void GL41Backend::updateTransform(const Batch& batch) {
|
||||
_transform.update(_commandIndex, _stereo);
|
||||
|
||||
auto& drawCallInfoBuffer = batch.getDrawCallInfoBuffer();
|
||||
if (batch._currentNamedCall.empty()) {
|
||||
auto& drawCallInfo = drawCallInfoBuffer[_currentDraw];
|
||||
if (_transform._enabledDrawcallInfoBuffer) {
|
||||
glDisableVertexAttribArray(gpu::Stream::DRAW_CALL_INFO); // Make sure attrib array is disabled
|
||||
_transform._enabledDrawcallInfoBuffer = false;
|
||||
}
|
||||
glVertexAttribI2i(gpu::Stream::DRAW_CALL_INFO, drawCallInfo.index, drawCallInfo.unused);
|
||||
} else {
|
||||
if (!_transform._enabledDrawcallInfoBuffer) {
|
||||
glEnableVertexAttribArray(gpu::Stream::DRAW_CALL_INFO); // Make sure attrib array is enabled
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _transform._drawCallInfoBuffer);
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
glVertexAttribDivisor(gpu::Stream::DRAW_CALL_INFO, (isStereo() ? 2 : 1));
|
||||
#else
|
||||
glVertexAttribDivisor(gpu::Stream::DRAW_CALL_INFO, 1);
|
||||
#endif
|
||||
_transform._enabledDrawcallInfoBuffer = true;
|
||||
}
|
||||
glVertexAttribIPointer(gpu::Stream::DRAW_CALL_INFO, 2, GL_UNSIGNED_SHORT, 0, _transform._drawCallInfoOffsets[batch._currentNamedCall]);
|
||||
}
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
|
@ -130,13 +130,13 @@ protected:
|
|||
void do_multiDrawIndexedIndirect(const Batch& batch, size_t paramOffset) override;
|
||||
|
||||
// Input Stage
|
||||
void resetInputStage() override;
|
||||
void updateInput() override;
|
||||
|
||||
// Synchronize the state cache of this Backend with the actual real state of the GL Context
|
||||
void transferTransformState(const Batch& batch) const override;
|
||||
void initTransform() override;
|
||||
void updateTransform(const Batch& batch);
|
||||
void resetTransformStage();
|
||||
void updateTransform(const Batch& batch) override;
|
||||
|
||||
// Output stage
|
||||
void do_blit(const Batch& batch, size_t paramOffset) override;
|
||||
|
|
|
@ -9,10 +9,112 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "GL45Backend.h"
|
||||
#include "../gl/GLShared.h"
|
||||
|
||||
using namespace gpu;
|
||||
using namespace gpu::gl45;
|
||||
|
||||
void GL45Backend::updateInput() {
|
||||
Parent::updateInput();
|
||||
void GL45Backend::resetInputStage() {
|
||||
Parent::resetInputStage();
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
for (uint32_t i = 0; i < _input._attributeActivation.size(); i++) {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
for (uint32_t i = 0; i < _input._attribBindingBuffers.size(); i++) {
|
||||
glBindVertexBuffer(i, 0, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
void GL45Backend::updateInput() {
|
||||
if (_input._invalidFormat) {
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
// Assign the vertex format required
|
||||
if (_input._format) {
|
||||
_input._attribBindingBuffers.reset();
|
||||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
for (auto& channelIt : inputChannels) {
|
||||
auto bufferChannelNum = (channelIt).first;
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
|
||||
_input._attribBindingBuffers.set(bufferChannelNum);
|
||||
|
||||
GLuint frequency = 0;
|
||||
for (unsigned int i = 0; i < channel._slots.size(); i++) {
|
||||
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
|
||||
|
||||
GLuint slot = attrib._slot;
|
||||
GLuint count = attrib._element.getLocationScalarCount();
|
||||
uint8_t locationCount = attrib._element.getLocationCount();
|
||||
GLenum type = gl::ELEMENT_TYPE_TO_GL[attrib._element.getType()];
|
||||
|
||||
GLuint offset = (GLuint)attrib._offset;;
|
||||
GLboolean isNormalized = attrib._element.isNormalized();
|
||||
|
||||
GLenum perLocationSize = attrib._element.getLocationSize();
|
||||
for (GLuint locNum = 0; locNum < locationCount; ++locNum) {
|
||||
GLuint attriNum = (GLuint)(slot + locNum);
|
||||
newActivation.set(attriNum);
|
||||
if (!_input._attributeActivation[attriNum]) {
|
||||
_input._attributeActivation.set(attriNum);
|
||||
glEnableVertexAttribArray(attriNum);
|
||||
}
|
||||
glVertexAttribFormat(attriNum, count, type, isNormalized, offset + locNum * perLocationSize);
|
||||
// TODO: Support properly the IAttrib version
|
||||
glVertexAttribBinding(attriNum, attrib._channel);
|
||||
}
|
||||
|
||||
if (i == 0) {
|
||||
frequency = attrib._frequency;
|
||||
} else {
|
||||
assert(frequency == attrib._frequency);
|
||||
}
|
||||
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
glVertexBindingDivisor(bufferChannelNum, frequency * (isStereo() ? 2 : 1));
|
||||
#else
|
||||
glVertexBindingDivisor(bufferChannelNum, frequency);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
// This should only disable VertexAttribs since the one in use have been disabled above
|
||||
for (GLuint i = 0; i < (GLuint)newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _input._attributeActivation[i]) {
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
_input._attributeActivation.flip(i);
|
||||
}
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
_input._invalidFormat = false;
|
||||
_stats._ISNumFormatChanges++;
|
||||
}
|
||||
|
||||
if (_input._invalidBuffers.any()) {
|
||||
auto vbo = _input._bufferVBOs.data();
|
||||
auto offset = _input._bufferOffsets.data();
|
||||
auto stride = _input._bufferStrides.data();
|
||||
|
||||
for (GLuint buffer = 0; buffer < _input._buffers.size(); buffer++, vbo++, offset++, stride++) {
|
||||
if (_input._invalidBuffers.test(buffer)) {
|
||||
glBindVertexBuffer(buffer, (*vbo), (*offset), (GLsizei)(*stride));
|
||||
}
|
||||
}
|
||||
|
||||
_input._invalidBuffers.reset();
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,3 +66,36 @@ void GL45Backend::transferTransformState(const Batch& batch) const {
|
|||
// Make sure the current Camera offset is unknown before render Draw
|
||||
_transform._currentCameraOffset = INVALID_OFFSET;
|
||||
}
|
||||
|
||||
|
||||
void GL45Backend::updateTransform(const Batch& batch) {
|
||||
_transform.update(_commandIndex, _stereo);
|
||||
|
||||
auto& drawCallInfoBuffer = batch.getDrawCallInfoBuffer();
|
||||
if (batch._currentNamedCall.empty()) {
|
||||
auto& drawCallInfo = drawCallInfoBuffer[_currentDraw];
|
||||
if (_transform._enabledDrawcallInfoBuffer) {
|
||||
glDisableVertexAttribArray(gpu::Stream::DRAW_CALL_INFO); // Make sure attrib array is disabled
|
||||
_transform._enabledDrawcallInfoBuffer = false;
|
||||
}
|
||||
glVertexAttribI2i(gpu::Stream::DRAW_CALL_INFO, drawCallInfo.index, drawCallInfo.unused);
|
||||
} else {
|
||||
if (!_transform._enabledDrawcallInfoBuffer) {
|
||||
glEnableVertexAttribArray(gpu::Stream::DRAW_CALL_INFO); // Make sure attrib array is enabled
|
||||
glVertexAttribIFormat(gpu::Stream::DRAW_CALL_INFO, 2, GL_UNSIGNED_SHORT, 0);
|
||||
glVertexAttribBinding(gpu::Stream::DRAW_CALL_INFO, gpu::Stream::DRAW_CALL_INFO);
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
glVertexBindingDivisor(gpu::Stream::DRAW_CALL_INFO, (isStereo() ? 2 : 1));
|
||||
#else
|
||||
glVertexBindingDivisor(gpu::Stream::DRAW_CALL_INFO, 1);
|
||||
#endif
|
||||
_transform._enabledDrawcallInfoBuffer = true;
|
||||
}
|
||||
// NOTE: A stride of zero in BindVertexBuffer signifies that all elements are sourced from the same location,
|
||||
// so we must provide a stride.
|
||||
// This is in contrast to VertexAttrib*Pointer, where a zero signifies tightly-packed elements.
|
||||
glBindVertexBuffer(gpu::Stream::DRAW_CALL_INFO, _transform._drawCallInfoBuffer, (GLintptr)_transform._drawCallInfoOffsets[batch._currentNamedCall], 2 * sizeof(GLushort));
|
||||
}
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
|
@ -25,6 +25,14 @@ void Buffer::updateBufferCPUMemoryUsage(Size prevObjectSize, Size newObjectSize)
|
|||
}
|
||||
}
|
||||
|
||||
void Buffer::incrementBufferCPUCount() {
|
||||
_bufferCPUCount++;
|
||||
}
|
||||
|
||||
void Buffer::decrementBufferCPUCount() {
|
||||
_bufferCPUCount--;
|
||||
}
|
||||
|
||||
uint32_t Buffer::getBufferCPUCount() {
|
||||
return _bufferCPUCount.load();
|
||||
}
|
||||
|
@ -43,7 +51,7 @@ Buffer::Size Buffer::getBufferGPUMemoryUsage() {
|
|||
|
||||
Buffer::Buffer(Size pageSize) :
|
||||
_renderPages(pageSize), _pages(pageSize) {
|
||||
_bufferCPUCount++;
|
||||
Buffer::incrementBufferCPUCount();
|
||||
}
|
||||
|
||||
Buffer::Buffer(Size size, const Byte* bytes, Size pageSize) : Buffer(pageSize) {
|
||||
|
@ -61,7 +69,7 @@ Buffer& Buffer::operator=(const Buffer& buf) {
|
|||
}
|
||||
|
||||
Buffer::~Buffer() {
|
||||
_bufferCPUCount--;
|
||||
Buffer::decrementBufferCPUCount();
|
||||
Buffer::updateBufferCPUMemoryUsage(_sysmem.getSize(), 0);
|
||||
}
|
||||
|
||||
|
|
|
@ -27,6 +27,8 @@ class Buffer : public Resource {
|
|||
static std::atomic<uint32_t> _bufferCPUCount;
|
||||
static std::atomic<Size> _bufferCPUMemoryUsage;
|
||||
static void updateBufferCPUMemoryUsage(Size prevObjectSize, Size newObjectSize);
|
||||
static void incrementBufferCPUCount();
|
||||
static void decrementBufferCPUCount();
|
||||
|
||||
public:
|
||||
using Flag = PageManager::Flag;
|
||||
|
|
|
@ -13,6 +13,23 @@
|
|||
#include "GPULogging.h"
|
||||
using namespace gpu;
|
||||
|
||||
|
||||
void ContextStats::evalDelta(const ContextStats& begin, const ContextStats& end) {
|
||||
_ISNumFormatChanges = end._ISNumFormatChanges - begin._ISNumFormatChanges;
|
||||
_ISNumInputBufferChanges = end._ISNumInputBufferChanges - begin._ISNumInputBufferChanges;
|
||||
_ISNumIndexBufferChanges = end._ISNumIndexBufferChanges - begin._ISNumIndexBufferChanges;
|
||||
|
||||
_RSNumTextureBounded = end._RSNumTextureBounded - begin._RSNumTextureBounded;
|
||||
_RSAmountTextureMemoryBounded = end._RSAmountTextureMemoryBounded - begin._RSAmountTextureMemoryBounded;
|
||||
|
||||
_DSNumAPIDrawcalls = end._DSNumAPIDrawcalls - begin._DSNumAPIDrawcalls;
|
||||
_DSNumDrawcalls = end._DSNumDrawcalls - begin._DSNumDrawcalls;
|
||||
_DSNumTriangles= end._DSNumTriangles - begin._DSNumTriangles;
|
||||
|
||||
_PSNumSetPipelines = end._PSNumSetPipelines - begin._PSNumSetPipelines;
|
||||
}
|
||||
|
||||
|
||||
Context::CreateBackend Context::_createBackendCallback = nullptr;
|
||||
Context::MakeProgram Context::_makeProgramCallback = nullptr;
|
||||
std::once_flag Context::_initialized;
|
||||
|
@ -73,6 +90,10 @@ void Context::consumeFrameUpdates(const FramePointer& frame) const {
|
|||
}
|
||||
|
||||
void Context::executeFrame(const FramePointer& frame) const {
|
||||
// Grab the stats at the around the frame and delta to have a consistent sampling
|
||||
ContextStats beginStats;
|
||||
getStats(beginStats);
|
||||
|
||||
// FIXME? probably not necessary, but safe
|
||||
consumeFrameUpdates(frame);
|
||||
_backend->setStereoState(frame->stereoState);
|
||||
|
@ -90,6 +111,10 @@ void Context::executeFrame(const FramePointer& frame) const {
|
|||
_frameRangeTimer->end(endBatch);
|
||||
_backend->render(endBatch);
|
||||
}
|
||||
|
||||
ContextStats endStats;
|
||||
getStats(endStats);
|
||||
_frameStats.evalDelta(beginStats, endStats);
|
||||
}
|
||||
|
||||
bool Context::makeProgram(Shader& shader, const Shader::BindingSet& bindings) {
|
||||
|
@ -135,10 +160,18 @@ void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, cons
|
|||
_backend->downloadFramebuffer(srcFramebuffer, region, destImage);
|
||||
}
|
||||
|
||||
void Context::resetStats() const {
|
||||
_backend->resetStats();
|
||||
}
|
||||
|
||||
void Context::getStats(ContextStats& stats) const {
|
||||
_backend->getStats(stats);
|
||||
}
|
||||
|
||||
void Context::getFrameStats(ContextStats& stats) const {
|
||||
stats = _frameStats;
|
||||
}
|
||||
|
||||
double Context::getFrameTimerGPUAverage() const {
|
||||
if (_frameRangeTimer) {
|
||||
return _frameRangeTimer->getGPUAverage();
|
||||
|
|
|
@ -45,6 +45,8 @@ public:
|
|||
|
||||
ContextStats() {}
|
||||
ContextStats(const ContextStats& stats) = default;
|
||||
|
||||
void evalDelta(const ContextStats& begin, const ContextStats& end);
|
||||
};
|
||||
|
||||
class Backend {
|
||||
|
@ -83,6 +85,7 @@ public:
|
|||
return reinterpret_cast<T*>(object.gpuObject.getGPUObject());
|
||||
}
|
||||
|
||||
void resetStats() const { _stats = ContextStats(); }
|
||||
void getStats(ContextStats& stats) const { stats = _stats; }
|
||||
|
||||
virtual bool isTextureManagementSparseEnabled() const = 0;
|
||||
|
@ -123,7 +126,7 @@ protected:
|
|||
}
|
||||
|
||||
friend class Context;
|
||||
ContextStats _stats;
|
||||
mutable ContextStats _stats;
|
||||
StereoState _stereo;
|
||||
|
||||
};
|
||||
|
@ -201,8 +204,11 @@ public:
|
|||
void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage);
|
||||
|
||||
// Repporting stats of the context
|
||||
void resetStats() const;
|
||||
void getStats(ContextStats& stats) const;
|
||||
|
||||
// Same as above but grabbed at every end of a frame
|
||||
void getFrameStats(ContextStats& stats) const;
|
||||
|
||||
double getFrameTimerGPUAverage() const;
|
||||
double getFrameTimerBatchAverage() const;
|
||||
|
@ -229,8 +235,8 @@ protected:
|
|||
RangeTimerPointer _frameRangeTimer;
|
||||
StereoState _stereo;
|
||||
|
||||
double getGPUAverage() const;
|
||||
double getBatchAverage() const;
|
||||
// Sampled at the end of every frame, the stats of all the counters
|
||||
mutable ContextStats _frameStats;
|
||||
|
||||
// This function can only be called by "static Shader::makeProgram()"
|
||||
// makeProgramShader(...) make a program shader ready to be used in a Batch.
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
#include "Stream.h"
|
||||
|
||||
#include <algorithm> //min max and more
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
|
||||
using namespace gpu;
|
||||
|
||||
|
@ -39,9 +41,21 @@ const ElementArray& getDefaultElements() {
|
|||
return defaultElements;
|
||||
}
|
||||
|
||||
std::string Stream::Attribute::getKey() const {
|
||||
std::stringstream skey;
|
||||
|
||||
skey << std::hex;
|
||||
skey << std::setw(8) << std::setfill('0') << (uint32)((((uint32)_slot) << 24) | (((uint32)_channel) << 16) | ((uint32)_element.getRaw()));
|
||||
skey << _offset;
|
||||
skey << _frequency;
|
||||
return skey.str();
|
||||
}
|
||||
|
||||
void Stream::Format::evaluateCache() {
|
||||
_key.clear();
|
||||
_channels.clear();
|
||||
_elementTotalSize = 0;
|
||||
|
||||
for(AttributeMap::iterator it = _attributes.begin(); it != _attributes.end(); it++) {
|
||||
Attribute& attrib = (*it).second;
|
||||
ChannelInfo& channel = _channels[attrib._channel];
|
||||
|
@ -49,6 +63,8 @@ void Stream::Format::evaluateCache() {
|
|||
channel._stride = std::max(channel._stride, attrib.getSize() + attrib._offset);
|
||||
channel._netSize += attrib.getSize();
|
||||
_elementTotalSize += attrib.getSize();
|
||||
|
||||
_key += attrib.getKey();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include <vector>
|
||||
#include <map>
|
||||
#include <array>
|
||||
#include <string>
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
|
@ -73,6 +74,9 @@ public:
|
|||
|
||||
// Size of the
|
||||
uint32 getSize() const { return _element.getSize(); }
|
||||
|
||||
// Generate a string key describing the attribute uniquely
|
||||
std::string getKey() const;
|
||||
};
|
||||
|
||||
// Stream Format is describing how to feed a list of attributes from a bunch of stream buffer channels
|
||||
|
@ -106,10 +110,15 @@ public:
|
|||
|
||||
bool hasAttribute(Slot slot) const { return (_attributes.find(slot) != _attributes.end()); }
|
||||
|
||||
const std::string& getKey() const { return _key; }
|
||||
|
||||
const GPUObjectPointer gpuObject{};
|
||||
|
||||
protected:
|
||||
AttributeMap _attributes;
|
||||
ChannelMap _channels;
|
||||
uint32 _elementTotalSize { 0 };
|
||||
std::string _key;
|
||||
|
||||
void evaluateCache();
|
||||
};
|
||||
|
|
|
@ -1593,7 +1593,10 @@ void GeometryCache::renderGlowLine(gpu::Batch& batch, const glm::vec3& p1, const
|
|||
glowIntensity = 0.0f;
|
||||
#endif
|
||||
|
||||
glowIntensity = 0.0f;
|
||||
|
||||
if (glowIntensity <= 0) {
|
||||
bindSimpleProgram(batch, false, false, false, true, false);
|
||||
renderLine(batch, p1, p2, color, id);
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -35,21 +35,21 @@ void EngineStats::run(const SceneContextPointer& sceneContext, const RenderConte
|
|||
config->textureGPUVirtualMemoryUsage = gpu::Texture::getTextureGPUVirtualMemoryUsage();
|
||||
config->textureGPUTransferCount = gpu::Texture::getTextureGPUTransferCount();
|
||||
|
||||
gpu::ContextStats gpuStats(_gpuStats);
|
||||
renderContext->args->_context->getStats(_gpuStats);
|
||||
renderContext->args->_context->getFrameStats(_gpuStats);
|
||||
|
||||
config->frameAPIDrawcallCount = _gpuStats._DSNumAPIDrawcalls - gpuStats._DSNumAPIDrawcalls;
|
||||
config->frameDrawcallCount = _gpuStats._DSNumDrawcalls - gpuStats._DSNumDrawcalls;
|
||||
config->frameAPIDrawcallCount = _gpuStats._DSNumAPIDrawcalls;
|
||||
config->frameDrawcallCount = _gpuStats._DSNumDrawcalls;
|
||||
config->frameDrawcallRate = config->frameDrawcallCount * frequency;
|
||||
|
||||
config->frameTriangleCount = _gpuStats._DSNumTriangles - gpuStats._DSNumTriangles;
|
||||
config->frameTriangleCount = _gpuStats._DSNumTriangles;
|
||||
config->frameTriangleRate = config->frameTriangleCount * frequency;
|
||||
|
||||
config->frameTextureCount = _gpuStats._RSNumTextureBounded - gpuStats._RSNumTextureBounded;
|
||||
config->frameTextureCount = _gpuStats._RSNumTextureBounded;
|
||||
config->frameTextureRate = config->frameTextureCount * frequency;
|
||||
config->frameTextureMemoryUsage = _gpuStats._RSAmountTextureMemoryBounded - gpuStats._RSAmountTextureMemoryBounded;
|
||||
config->frameTextureMemoryUsage = _gpuStats._RSAmountTextureMemoryBounded;
|
||||
|
||||
config->frameSetPipelineCount = _gpuStats._PSNumSetPipelines - gpuStats._PSNumSetPipelines;
|
||||
config->frameSetPipelineCount = _gpuStats._PSNumSetPipelines;
|
||||
config->frameSetInputFormatCount = _gpuStats._ISNumFormatChanges;
|
||||
|
||||
config->emitDirty();
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ namespace render {
|
|||
Q_PROPERTY(quint32 frameTextureMemoryUsage MEMBER frameTextureMemoryUsage NOTIFY dirty)
|
||||
|
||||
Q_PROPERTY(quint32 frameSetPipelineCount MEMBER frameSetPipelineCount NOTIFY dirty)
|
||||
Q_PROPERTY(quint32 frameSetInputFormatCount MEMBER frameSetInputFormatCount NOTIFY dirty)
|
||||
|
||||
|
||||
public:
|
||||
|
@ -78,6 +79,8 @@ namespace render {
|
|||
|
||||
quint32 frameSetPipelineCount{ 0 };
|
||||
|
||||
quint32 frameSetInputFormatCount{ 0 };
|
||||
|
||||
|
||||
|
||||
void emitDirty() { emit dirty(); }
|
||||
|
|
|
@ -28,7 +28,7 @@ MenuItemProperties::MenuItemProperties(const QString& menuName, const QString& m
|
|||
{
|
||||
}
|
||||
|
||||
MenuItemProperties::MenuItemProperties(const QString& menuName, const QString& menuItemName,
|
||||
MenuItemProperties::MenuItemProperties(const QString& menuName, const QString& menuItemName,
|
||||
const KeyEvent& shortcutKeyEvent, bool checkable, bool checked, bool separator) :
|
||||
menuName(menuName),
|
||||
menuItemName(menuItemName),
|
||||
|
@ -50,13 +50,31 @@ QScriptValue menuItemPropertiesToScriptValue(QScriptEngine* engine, const MenuIt
|
|||
return obj;
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* `MenuItemProperties` is a list of properties that can be passed to Menu.addMenuItem
|
||||
* to create a new menu item.
|
||||
*
|
||||
* If none of position, beforeItem, afterItem, or grouping are specified, the
|
||||
* menu item will be placed in the last position.
|
||||
*
|
||||
* @typedef {Object} Menu.MenuItemProperties
|
||||
* @property {string} menuName Name of the top-level menu
|
||||
* @property {string} menuItemName Name of the menu item
|
||||
* @property {bool} isCheckable Whether the menu item is checkable or not
|
||||
* @property {bool} isChecked Where the menu item is checked or not
|
||||
* @property {string} shortcutKey An optional shortcut key to trigger the menu item.
|
||||
* @property {int} position The position to place the new menu item. `0` is the first menu item.
|
||||
* @property {string} beforeItem The name of the menu item to place this menu item before.
|
||||
* @property {string} afterItem The name of the menu item to place this menu item after.
|
||||
* @property {string} grouping The name of grouping to add this menu item to.
|
||||
*/
|
||||
void menuItemPropertiesFromScriptValue(const QScriptValue& object, MenuItemProperties& properties) {
|
||||
properties.menuName = object.property("menuName").toVariant().toString();
|
||||
properties.menuItemName = object.property("menuItemName").toVariant().toString();
|
||||
properties.isCheckable = object.property("isCheckable").toVariant().toBool();
|
||||
properties.isChecked = object.property("isChecked").toVariant().toBool();
|
||||
properties.isSeparator = object.property("isSeparator").toVariant().toBool();
|
||||
|
||||
|
||||
// handle the shortcut key options in order...
|
||||
QScriptValue shortcutKeyValue = object.property("shortcutKey");
|
||||
if (shortcutKeyValue.isValid()) {
|
||||
|
|
|
@ -20,6 +20,24 @@
|
|||
|
||||
#include "GLMHelpers.h"
|
||||
|
||||
/**jsdoc
|
||||
* A 2-dimensional vector.
|
||||
*
|
||||
* @typedef Vec2
|
||||
* @property {float} x X-coordinate of the vector.
|
||||
* @property {float} y Y-coordinate of the vector.
|
||||
*/
|
||||
|
||||
/**jsdoc
|
||||
* A 3-dimensional vector.
|
||||
*
|
||||
* @typedef Vec3
|
||||
* @property {float} x X-coordinate of the vector.
|
||||
* @property {float} y Y-coordinate of the vector.
|
||||
* @property {float} z Z-coordinate of the vector.
|
||||
*/
|
||||
|
||||
|
||||
/// Scriptable interface a Vec3ernion helper class object. Used exclusively in the JavaScript API
|
||||
class Vec3 : public QObject {
|
||||
Q_OBJECT
|
||||
|
|
|
@ -197,7 +197,7 @@ GPUIdent* GPUIdent::ensureQuery(const QString& vendor, const QString& renderer)
|
|||
|
||||
ULONG uNumOfInstances = 0;
|
||||
CComPtr<IWbemClassObject> spInstance = NULL;
|
||||
hr = spEnumInst->Next(WBEM_INFINITE, 1, &spInstance, &uNumOfInstances);
|
||||
hr = spEnumInst->Next(WBEM_INFINITE, 1, &spInstance.p, &uNumOfInstances);
|
||||
while (hr == S_OK && spInstance && uNumOfInstances) {
|
||||
// Get properties from the object
|
||||
CComVariant var;
|
||||
|
|
|
@ -16,6 +16,11 @@
|
|||
|
||||
#include "DependencyManager.h"
|
||||
|
||||
/**jsdoc
|
||||
* @namespace Paths
|
||||
* @readonly
|
||||
* @property {string} resources The path to the resources directory.
|
||||
*/
|
||||
class PathUtils : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
|
|
@ -173,6 +173,11 @@ Item {
|
|||
prop: "frameSetPipelineCount",
|
||||
label: "Pipelines",
|
||||
color: "#E2334D"
|
||||
},
|
||||
{
|
||||
prop: "frameSetInputFormatCount",
|
||||
label: "Input Formats",
|
||||
color: "#1AC567"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -240,11 +240,8 @@ var toolBar = (function () {
|
|||
hoverState: 3,
|
||||
defaultState: 1
|
||||
});
|
||||
activeButton.clicked.connect(function () {
|
||||
that.setActive(!isActive);
|
||||
activeButton.writeProperty("buttonState", isActive ? 0 : 1);
|
||||
activeButton.writeProperty("defaultState", isActive ? 0 : 1);
|
||||
activeButton.writeProperty("hoverState", isActive ? 2 : 3);
|
||||
activeButton.clicked.connect(function() {
|
||||
that.toggle();
|
||||
});
|
||||
|
||||
toolBar = Toolbars.getToolbar(EDIT_TOOLBAR);
|
||||
|
@ -440,6 +437,14 @@ var toolBar = (function () {
|
|||
entityListTool.clearEntityList();
|
||||
};
|
||||
|
||||
|
||||
that.toggle = function () {
|
||||
that.setActive(!isActive);
|
||||
activeButton.writeProperty("buttonState", isActive ? 0 : 1);
|
||||
activeButton.writeProperty("defaultState", isActive ? 0 : 1);
|
||||
activeButton.writeProperty("hoverState", isActive ? 2 : 3);
|
||||
};
|
||||
|
||||
that.setActive = function (active) {
|
||||
if (active === isActive) {
|
||||
return;
|
||||
|
@ -1093,7 +1098,6 @@ function handeMenuEvent(menuItem) {
|
|||
}
|
||||
}
|
||||
} else if (menuItem === "Import Entities" || menuItem === "Import Entities from URL") {
|
||||
|
||||
var importURL = null;
|
||||
if (menuItem === "Import Entities") {
|
||||
var fullPath = Window.browse("Select Model to Import", "", "*.json");
|
||||
|
@ -1105,6 +1109,9 @@ function handeMenuEvent(menuItem) {
|
|||
}
|
||||
|
||||
if (importURL) {
|
||||
if (!isActive && (Entities.canRez() && Entities.canRezTmp())) {
|
||||
toolBar.toggle();
|
||||
}
|
||||
importSVO(importURL);
|
||||
}
|
||||
} else if (menuItem === "Entity List...") {
|
||||
|
@ -1185,8 +1192,6 @@ function importSVO(importURL) {
|
|||
if (isActive) {
|
||||
selectionManager.setSelections(pastedEntityIDs);
|
||||
}
|
||||
|
||||
Window.raiseMainWindow();
|
||||
} else {
|
||||
Window.notifyEditError("Can't import objects: objects would be out of bounds.");
|
||||
}
|
||||
|
|
|
@ -1,48 +1,48 @@
|
|||
<html>
|
||||
<head>
|
||||
<head>
|
||||
<title>Share</title>
|
||||
<link rel="stylesheet" type="text/css" href="css/edit-style.css">
|
||||
<link rel="stylesheet" type="text/css" href="css/SnapshotReview.css">
|
||||
<script type="text/javascript" src="qrc:///qtwebchannel/qwebchannel.js"></script>
|
||||
<script type="text/javascript" src="js/eventBridgeLoader.js"></script>
|
||||
<script type="text/javascript" src="js/SnapshotReview.js"></script>
|
||||
</head>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<body>
|
||||
<div class="snapshot-container">
|
||||
<div class="snapshot-column-left">
|
||||
<div class="snapsection">
|
||||
<label class="title">Snapshot successfully saved!</label>
|
||||
</div>
|
||||
<hr />
|
||||
<div class="snapsection">
|
||||
<div id="sharing">
|
||||
<div class="prompt">Would you like to share your pic in the Snapshots feed?</div>
|
||||
<div class="button">
|
||||
<span class="compound-button">
|
||||
<input type="button" class="blue" id="share" value="Share in Feed" onclick="shareSelected()"/>
|
||||
<span class="glyph"></span>
|
||||
</span>
|
||||
<div class="snapshot-column-left">
|
||||
<div class="snapsection">
|
||||
<label class="title">Snapshot successfully saved!</label>
|
||||
</div>
|
||||
<hr />
|
||||
<div class="snapsection">
|
||||
<div id="sharing">
|
||||
<div class="prompt">Would you like to share your pics in the Snapshots feed?</div>
|
||||
<div class="button">
|
||||
<span class="compound-button">
|
||||
<input type="button" class="blue" id="share" value="Share in Feed" onclick="shareSelected()" />
|
||||
<span class="glyph"></span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="button">
|
||||
<input type="button" class="black" id="close" value="Don't Share" onclick="doNotShare()" />
|
||||
</div>
|
||||
</div>
|
||||
<hr />
|
||||
<div class="snapsection">
|
||||
<span class="setting">
|
||||
<input type="button" class="glyph naked" id="snapshotSettings" value="@" onclick="snapshotSettings()" />
|
||||
<label for="snapshotSettings">Snapshot settings</label>
|
||||
</span>
|
||||
<span class="setting checkbox">
|
||||
<input id="openFeed" type="checkbox" checked />
|
||||
<label for="openFeed">Open feed after</label>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="button">
|
||||
<input type="button" class="black" id="close" value="Don't Share" onclick="doNotShare()"/>
|
||||
</div>
|
||||
</div>
|
||||
<hr />
|
||||
<div class="snapsection">
|
||||
<span class="setting">
|
||||
<input type="button" class="glyph naked" id="snapshotSettings" value="@" onclick="snapshotSettings()" />
|
||||
<label for="snapshotSettings">Snapshot settings</label>
|
||||
</span>
|
||||
<span class="setting checkbox">
|
||||
<input id="openFeed" type="checkbox" checked/>
|
||||
<label for="openFeed">Open feed after</label>
|
||||
</span>
|
||||
<div id="snapshot-images" class="snapshot-column-right">
|
||||
</div>
|
||||
</div>
|
||||
<div id="snapshot-images" class="snapshot-column-right"/>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -12,6 +12,9 @@
|
|||
|
||||
var paths = [], idCounter = 0, useCheckboxes;
|
||||
function addImage(data) {
|
||||
if (!data.localPath) {
|
||||
return;
|
||||
}
|
||||
var div = document.createElement("DIV"),
|
||||
input = document.createElement("INPUT"),
|
||||
label = document.createElement("LABEL"),
|
||||
|
@ -20,21 +23,22 @@ function addImage(data) {
|
|||
function toggle() { data.share = input.checked; }
|
||||
img.src = data.localPath;
|
||||
div.appendChild(img);
|
||||
data.share = true;
|
||||
if (useCheckboxes) { // I'd rather use css, but the included stylesheet is quite particular.
|
||||
// Our stylesheet(?) requires input.id to match label.for. Otherwise input doesn't display the check state.
|
||||
label.setAttribute('for', id); // cannot do label.for =
|
||||
input.id = id;
|
||||
input.type = "checkbox";
|
||||
input.checked = true;
|
||||
input.checked = (id === "p0");
|
||||
data.share = input.checked;
|
||||
input.addEventListener('change', toggle);
|
||||
div.class = "property checkbox";
|
||||
div.appendChild(input);
|
||||
div.appendChild(label);
|
||||
} else {
|
||||
data.share = true;
|
||||
}
|
||||
document.getElementById("snapshot-images").appendChild(div);
|
||||
paths.push(data);
|
||||
|
||||
}
|
||||
function handleShareButtons(shareMsg) {
|
||||
var openFeed = document.getElementById('openFeed');
|
||||
|
@ -49,7 +53,7 @@ function handleShareButtons(shareMsg) {
|
|||
window.onload = function () {
|
||||
// Something like the following will allow testing in a browser.
|
||||
//addImage({localPath: 'c:/Users/howar/OneDrive/Pictures/hifi-snap-by--on-2016-07-27_12-58-43.jpg'});
|
||||
//addImage({localPath: 'http://lorempixel.com/1512/1680'});
|
||||
//addImage({ localPath: 'http://lorempixel.com/1512/1680' });
|
||||
openEventBridge(function () {
|
||||
// Set up a handler for receiving the data, and tell the .js we are ready to receive it.
|
||||
EventBridge.scriptEventReceived.connect(function (message) {
|
||||
|
|
|
@ -522,13 +522,13 @@ function onEditError(msg) {
|
|||
}
|
||||
|
||||
|
||||
function onSnapshotTaken(path, notify) {
|
||||
function onSnapshotTaken(pathStillSnapshot, pathAnimatedSnapshot, notify) {
|
||||
if (notify) {
|
||||
var imageProperties = {
|
||||
path: "file:///" + path,
|
||||
path: "file:///" + pathStillSnapshot,
|
||||
aspectRatio: Window.innerWidth / Window.innerHeight
|
||||
};
|
||||
createNotification(wordWrap("Snapshot saved to " + path), NotificationType.SNAPSHOT, imageProperties);
|
||||
createNotification(wordWrap("Snapshot saved to " + pathStillSnapshot), NotificationType.SNAPSHOT, imageProperties);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ var SNAPSHOT_REVIEW_URL = Script.resolvePath("html/SnapshotReview.html");
|
|||
|
||||
var outstanding;
|
||||
function confirmShare(data) {
|
||||
var dialog = new OverlayWebWindow('Snapshot Review', SNAPSHOT_REVIEW_URL, 800, 320);
|
||||
var dialog = new OverlayWebWindow('Snapshot Review', SNAPSHOT_REVIEW_URL, 800, 520);
|
||||
function onMessage(message) {
|
||||
// Receives message from the html dialog via the qwebchannel EventBridge. This is complicated by the following:
|
||||
// 1. Although we can send POJOs, we cannot receive a toplevel object. (Arrays of POJOs are fine, though.)
|
||||
|
@ -120,7 +120,7 @@ function onClicked() {
|
|||
|
||||
// take snapshot (with no notification)
|
||||
Script.setTimeout(function () {
|
||||
Window.takeSnapshot(false, 1.91);
|
||||
Window.takeSnapshot(false, true, 1.91);
|
||||
}, SNAPSHOT_DELAY);
|
||||
}
|
||||
|
||||
|
@ -144,7 +144,7 @@ function isDomainOpen(id) {
|
|||
response.total_entries;
|
||||
}
|
||||
|
||||
function resetButtons(path, notify) {
|
||||
function resetButtons(pathStillSnapshot, pathAnimatedSnapshot, notify) {
|
||||
// show overlays if they were on
|
||||
if (resetOverlays) {
|
||||
Menu.setIsOptionChecked("Overlays", true);
|
||||
|
@ -161,7 +161,8 @@ function resetButtons(path, notify) {
|
|||
|
||||
// last element in data array tells dialog whether we can share or not
|
||||
confirmShare([
|
||||
{ localPath: path },
|
||||
{ localPath: pathAnimatedSnapshot },
|
||||
{ localPath: pathStillSnapshot },
|
||||
{
|
||||
canShare: !!isDomainOpen(location.domainId),
|
||||
openFeedAfterShare: shouldOpenFeedAfterShare()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Server Backup</title>
|
||||
<title>High Fidelity Sandbox</title>
|
||||
<script src="content-update.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="content-update.css"></link>
|
||||
</head>
|
||||
|
|
|
@ -606,7 +606,7 @@ function checkNewContent() {
|
|||
buttons: ['Yes', 'No'],
|
||||
defaultId: 1,
|
||||
cancelId: 1,
|
||||
title: 'New home content',
|
||||
title: 'High Fidelity Sandbox',
|
||||
message: 'A newer version of the home content set is available.\nDo you wish to update?',
|
||||
noLink: true,
|
||||
}, function(idx) {
|
||||
|
|
|
@ -16,6 +16,7 @@ exports.handlers = {
|
|||
var dirList = [
|
||||
'../../interface/src',
|
||||
'../../interface/src/scripting',
|
||||
'../../interface/src/ui/overlays',
|
||||
'../../libraries/script-engine/src',
|
||||
'../../libraries/networking/src',
|
||||
'../../libraries/animation/src',
|
||||
|
|