Merge branch 'feature/quest_custom_plugins' into feature/quest

This commit is contained in:
Brad Davis 2019-02-12 09:53:12 -08:00
commit a6f23f48d6
136 changed files with 3190 additions and 2228 deletions

View file

@ -81,7 +81,11 @@ if (ANDROID)
set(GLES_OPTION ON)
set(PLATFORM_QT_COMPONENTS AndroidExtras WebView)
add_definitions(-DHIFI_ANDROID_APP=\"${HIFI_ANDROID_APP}\")
if (${HIFI_ANDROID_APP} STREQUAL "questInterface")
if (
(${HIFI_ANDROID_APP} STREQUAL "questInterface") OR
(${HIFI_ANDROID_APP} STREQUAL "questFramePlayer") OR
(${HIFI_ANDROID_APP} STREQUAL "framePlayer")
)
# We know the quest hardware has this extension, so we can force the use of instanced stereo
add_definitions(-DHAVE_EXT_clip_cull_distance)
# We can also use multiview stereo techniques
@ -89,9 +93,15 @@ if (ANDROID)
add_definitions(-DHAVE_OVR_multiview)
# We can also use our own foveated textures
add_definitions(-DHAVE_QCOM_texture_foveated)
# if set, the application itself or some library it depends on MUST implement
# `DisplayPluginList getDisplayPlugins()` and `InputPluginList getInputPlugins()`
add_definitions(-DCUSTOM_INPUT_PLUGINS)
add_definitions(-DCUSTOM_DISPLAY_PLUGINS)
set(PLATFORM_PLUGIN_LIBRARIES oculusMobile oculusMobilePlugin)
endif()
else ()
set(PLATFORM_QT_COMPONENTS WebEngine)
set(PLATFORM_QT_COMPONENTS WebEngine Xml)
endif ()
if (USE_GLES AND (NOT ANDROID))

View file

@ -38,6 +38,19 @@ const QString AVATAR_MIXER_LOGGING_NAME = "avatar-mixer";
// FIXME - what we'd actually like to do is send to users at ~50% of their present rate down to 30hz. Assume 90 for now.
const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
const QRegularExpression AvatarMixer::suffixedNamePattern { R"(^\s*(.+)\s*_(\d)+\s*$)" };
// Lexicographic comparison:
bool AvatarMixer::SessionDisplayName::operator<(const SessionDisplayName& rhs) const {
if (_baseName < rhs._baseName) {
return true;
} else if (rhs._baseName < _baseName) {
return false;
} else {
return _suffix < rhs._suffix;
}
}
AvatarMixer::AvatarMixer(ReceivedMessage& message) :
ThreadedAssignment(message),
_slavePool(&_slaveSharedData)
@ -313,27 +326,40 @@ void AvatarMixer::manageIdentityData(const SharedNodePointer& node) {
bool sendIdentity = false;
if (nodeData && nodeData->getAvatarSessionDisplayNameMustChange()) {
AvatarData& avatar = nodeData->getAvatar();
const QString& existingBaseDisplayName = nodeData->getBaseDisplayName();
if (--_sessionDisplayNames[existingBaseDisplayName].second <= 0) {
_sessionDisplayNames.remove(existingBaseDisplayName);
const QString& existingBaseDisplayName = nodeData->getAvatar().getSessionDisplayName();
if (!existingBaseDisplayName.isEmpty()) {
SessionDisplayName existingDisplayName { existingBaseDisplayName };
auto suffixMatch = suffixedNamePattern.match(existingBaseDisplayName);
if (suffixMatch.hasMatch()) {
existingDisplayName._baseName = suffixMatch.captured(1);
existingDisplayName._suffix = suffixMatch.captured(2).toInt();
}
_sessionDisplayNames.erase(existingDisplayName);
}
QString baseName = avatar.getDisplayName().trimmed();
const QRegularExpression curses { "fuck|shit|damn|cock|cunt" }; // POC. We may eventually want something much more elaborate (subscription?).
baseName = baseName.replace(curses, "*"); // Replace rather than remove, so that people have a clue that the person's a jerk.
const QRegularExpression trailingDigits { "\\s*(_\\d+\\s*)?(\\s*\\n[^$]*)?$" }; // trailing whitespace "_123" and any subsequent lines
static const QRegularExpression trailingDigits { R"(\s*(_\d+\s*)?(\s*\n[^$]*)?$)" }; // trailing whitespace "_123" and any subsequent lines
baseName = baseName.remove(trailingDigits);
if (baseName.isEmpty()) {
baseName = "anonymous";
}
QPair<int, int>& soFar = _sessionDisplayNames[baseName]; // Inserts and answers 0, 0 if not already present, which is what we want.
int& highWater = soFar.first;
nodeData->setBaseDisplayName(baseName);
QString sessionDisplayName = (highWater > 0) ? baseName + "_" + QString::number(highWater) : baseName;
SessionDisplayName newDisplayName { baseName };
auto nameIter = _sessionDisplayNames.lower_bound(newDisplayName);
if (nameIter != _sessionDisplayNames.end() && nameIter->_baseName == baseName) {
// Existing instance(s) of name; find first free suffix
while (*nameIter == newDisplayName && ++newDisplayName._suffix && ++nameIter != _sessionDisplayNames.end())
;
}
_sessionDisplayNames.insert(newDisplayName);
QString sessionDisplayName = (newDisplayName._suffix > 0) ? baseName + "_" + QString::number(newDisplayName._suffix) : baseName;
avatar.setSessionDisplayName(sessionDisplayName);
highWater++;
soFar.second++; // refcount
nodeData->setBaseDisplayName(baseName);
nodeData->flagIdentityChange();
nodeData->setAvatarSessionDisplayNameMustChange(false);
sendIdentity = true;
@ -409,10 +435,19 @@ void AvatarMixer::handleAvatarKilled(SharedNodePointer avatarNode) {
{ // decrement sessionDisplayNames table and possibly remove
QMutexLocker nodeDataLocker(&avatarNode->getLinkedData()->getMutex());
AvatarMixerClientData* nodeData = dynamic_cast<AvatarMixerClientData*>(avatarNode->getLinkedData());
const QString& baseDisplayName = nodeData->getBaseDisplayName();
// No sense guarding against very rare case of a node with no entry, as this will work without the guard and do one less lookup in the common case.
if (--_sessionDisplayNames[baseDisplayName].second <= 0) {
_sessionDisplayNames.remove(baseDisplayName);
const QString& displayName = nodeData->getAvatar().getSessionDisplayName();
SessionDisplayName exitingDisplayName { displayName };
auto suffixMatch = suffixedNamePattern.match(displayName);
if (suffixMatch.hasMatch()) {
exitingDisplayName._baseName = suffixMatch.captured(1);
exitingDisplayName._suffix = suffixMatch.captured(2).toInt();
}
auto displayNameIter = _sessionDisplayNames.find(exitingDisplayName);
if (displayNameIter == _sessionDisplayNames.end()) {
qCDebug(avatars) << "Exiting avatar displayname" << displayName << "not found";
} else {
_sessionDisplayNames.erase(displayNameIter);
}
}

View file

@ -15,6 +15,7 @@
#ifndef hifi_AvatarMixer_h
#define hifi_AvatarMixer_h
#include <set>
#include <shared/RateCounter.h>
#include <PortableHighResolutionClock.h>
@ -88,7 +89,24 @@ private:
RateCounter<> _broadcastRate;
p_high_resolution_clock::time_point _lastDebugMessage;
QHash<QString, QPair<int, int>> _sessionDisplayNames;
// Pair of basename + uniquifying integer suffix.
struct SessionDisplayName {
explicit SessionDisplayName(QString baseName = QString(), int suffix = 0) :
_baseName(baseName),
_suffix(suffix) { }
// Does lexicographic ordering:
bool operator<(const SessionDisplayName& rhs) const;
bool operator==(const SessionDisplayName& rhs) const {
return _baseName == rhs._baseName && _suffix == rhs._suffix;
}
QString _baseName;
int _suffix;
};
static const QRegularExpression suffixedNamePattern;
std::set<SessionDisplayName> _sessionDisplayNames;
quint64 _displayNameManagementElapsedTime { 0 }; // total time spent in broadcastAvatarData/display name management... since last stats window
quint64 _ignoreCalculationElapsedTime { 0 };

View file

@ -0,0 +1,36 @@
#
# FixupNitpick.cmake
# cmake/macros
#
# Copyright 2019 High Fidelity, Inc.
# Created by Nissim Hadar on January 14th, 2016
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(fixup_nitpick)
if (APPLE)
string(REPLACE " " "\\ " ESCAPED_BUNDLE_NAME ${NITPICK_BUNDLE_NAME})
string(REPLACE " " "\\ " ESCAPED_INSTALL_PATH ${NITPICK_INSTALL_DIR})
set(_NITPICK_INSTALL_PATH "${ESCAPED_INSTALL_PATH}/${ESCAPED_BUNDLE_NAME}.app")
find_program(MACDEPLOYQT_COMMAND macdeployqt PATHS "${QT_DIR}/bin" NO_DEFAULT_PATH)
if (NOT MACDEPLOYQT_COMMAND AND (PRODUCTION_BUILD OR PR_BUILD))
message(FATAL_ERROR "Could not find macdeployqt at ${QT_DIR}/bin.\
It is required to produce a relocatable nitpick application.\
Check that the environment variable QT_DIR points to your Qt installation.\
")
endif ()
install(CODE "
execute_process(COMMAND ${MACDEPLOYQT_COMMAND}\
\${CMAKE_INSTALL_PREFIX}/${_NITPICK_INSTALL_PATH}/\
-verbose=2 -qmldir=${CMAKE_SOURCE_DIR}/interface/resources/qml/\
)"
COMPONENT ${CLIENT_COMPONENT}
)
endif ()
endmacro()

View file

@ -77,6 +77,9 @@ macro(SET_PACKAGING_PARAMETERS)
add_definitions(-DDEV_BUILD)
endif ()
set(NITPICK_BUNDLE_NAME "nitpick")
set(NITPICK_ICON_PREFIX "nitpick")
string(TIMESTAMP BUILD_TIME "%d/%m/%Y")
# if STABLE_BUILD is 1, PRODUCTION_BUILD must be 1 and
@ -140,8 +143,9 @@ macro(SET_PACKAGING_PARAMETERS)
set(DMG_SUBFOLDER_ICON "${HF_CMAKE_DIR}/installer/install-folder.rsrc")
set(CONSOLE_INSTALL_DIR ${DMG_SUBFOLDER_NAME})
set(CONSOLE_INSTALL_DIR ${DMG_SUBFOLDER_NAME})
set(INTERFACE_INSTALL_DIR ${DMG_SUBFOLDER_NAME})
set(NITPICK_INSTALL_DIR ${DMG_SUBFOLDER_NAME})
if (CLIENT_ONLY)
set(CONSOLE_EXEC_NAME "Console.app")
@ -159,11 +163,14 @@ macro(SET_PACKAGING_PARAMETERS)
set(INTERFACE_INSTALL_APP_PATH "${CONSOLE_INSTALL_DIR}/${INTERFACE_BUNDLE_NAME}.app")
set(INTERFACE_ICON_FILENAME "${INTERFACE_ICON_PREFIX}.icns")
set(NITPICK_ICON_FILENAME "${NITPICK_ICON_PREFIX}.icns")
else ()
if (WIN32)
set(CONSOLE_INSTALL_DIR "server-console")
set(NITPICK_INSTALL_DIR "nitpick")
else ()
set(CONSOLE_INSTALL_DIR ".")
set(NITPICK_INSTALL_DIR ".")
endif ()
set(COMPONENT_INSTALL_DIR ".")
@ -173,6 +180,7 @@ macro(SET_PACKAGING_PARAMETERS)
if (WIN32)
set(INTERFACE_EXEC_PREFIX "interface")
set(INTERFACE_ICON_FILENAME "${INTERFACE_ICON_PREFIX}.ico")
set(NITPICK_ICON_FILENAME "${NITPICK_ICON_PREFIX}.ico")
set(CONSOLE_EXEC_NAME "server-console.exe")

View file

@ -217,8 +217,8 @@ link_hifi_libraries(
ui-plugins display-plugins input-plugins
# Platform specific GL libraries
${PLATFORM_GL_BACKEND}
# Plaform specific display plugins libraries
${PLATFORM_DISPLAY_PLUGINS}
# Plaform specific input & display plugin libraries
${PLATFORM_PLUGIN_LIBRARIES}
shaders
)

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

View file

@ -10,10 +10,6 @@ joint = jointRoot = Hips
joint = jointLeftHand = LeftHand
joint = jointRightHand = RightHand
joint = jointHead = Head
freeJoint = LeftArm
freeJoint = LeftForeArm
freeJoint = RightArm
freeJoint = RightForeArm
bs = JawOpen = mouth_Open = 1
bs = LipsFunnel = Oo = 1
bs = BrowsU_L = brow_Up = 1

View file

@ -0,0 +1,108 @@
//
// QmlWebWindow.qml
//
// Created by Bradley Austin Davis on 17 Dec 2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtWebEngine 1.1
import QtWebChannel 1.0
import "qrc:////qml//windows" as Windows
import controlsUit 1.0 as Controls
import stylesUit 1.0
Windows.ScrollingWindow {
id: root
HifiConstants { id: hifi }
title: "WebWindow"
resizable: true
shown: false
// Don't destroy on close... otherwise the JS/C++ will have a dangling pointer
destroyOnCloseButton: false
property alias source: webview.url
property alias scriptUrl: webview.userScriptUrl
// This is for JS/QML communication, which is unused in a WebWindow,
// but not having this here results in spurious warnings about a
// missing signal
signal sendToScript(var message);
signal moved(vector2d position);
signal resized(size size);
function notifyMoved() {
moved(Qt.vector2d(x, y));
}
function notifyResized() {
resized(Qt.size(width, height));
}
onXChanged: notifyMoved();
onYChanged: notifyMoved();
onWidthChanged: notifyResized();
onHeightChanged: notifyResized();
onShownChanged: {
keyboardEnabled = HMD.active;
}
Item {
width: pane.contentWidth
implicitHeight: pane.scrollHeight
Controls.WebView {
id: webview
url: "about:blank"
anchors.fill: parent
focus: true
profile: HFWebEngineProfile;
property string userScriptUrl: ""
// Create a global EventBridge object for raiseAndLowerKeyboard.
WebEngineScript {
id: createGlobalEventBridge
sourceCode: eventBridgeJavaScriptToInject
injectionPoint: WebEngineScript.DocumentCreation
worldId: WebEngineScript.MainWorld
}
// Detect when may want to raise and lower keyboard.
WebEngineScript {
id: raiseAndLowerKeyboard
injectionPoint: WebEngineScript.Deferred
sourceUrl: resourceDirectoryUrl + "/html/raiseAndLowerKeyboard.js"
worldId: WebEngineScript.MainWorld
}
// User script.
WebEngineScript {
id: userScript
sourceUrl: webview.userScriptUrl
injectionPoint: WebEngineScript.DocumentReady // DOM ready but page load may not be finished.
worldId: WebEngineScript.MainWorld
}
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard, userScript ]
function onWebEventReceived(event) {
if (event.slice(0, 17) === "CLARA.IO DOWNLOAD") {
ApplicationInterface.addAssetToWorldFromURL(event.slice(18));
}
}
Component.onCompleted: {
webChannel.registerObject("eventBridge", eventBridge);
webChannel.registerObject("eventBridgeWrapper", eventBridgeWrapper);
eventBridge.webEventReceived.connect(onWebEventReceived);
}
}
}
}

View file

@ -9,8 +9,6 @@
//
import QtQuick 2.5
import QtWebView 1.1
import QtWebChannel 1.0
import "windows" as Windows
import controlsUit 1.0 as Controls
@ -60,22 +58,9 @@ Windows.ScrollingWindow {
Controls.WebView {
id: webview
url: "about:blank"
property string userScriptUrl: ""
anchors.fill: parent
focus: true
property string userScriptUrl: ""
function onWebEventReceived(event) {
if (event.slice(0, 17) === "CLARA.IO DOWNLOAD") {
ApplicationInterface.addAssetToWorldFromURL(event.slice(18));
}
}
Component.onCompleted: {
webChannel.registerObject("eventBridge", eventBridge);
webChannel.registerObject("eventBridgeWrapper", eventBridgeWrapper);
eventBridge.webEventReceived.connect(onWebEventReceived);
}
}
}
}

View file

@ -0,0 +1,24 @@
//
// WebSpinner.qml
//
// Created by David Rowe on 23 May 2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtWebEngine 1.5
AnimatedImage {
property WebEngineView webview: parent
source: "qrc:////icons//loader-snake-64-w.gif"
visible: webview.loading && /^(http.*|)$/i.test(webview.url.toString())
playing: visible
z: 10000
anchors {
horizontalCenter: parent.horizontalCenter
verticalCenter: parent.verticalCenter
}
}

View file

@ -9,11 +9,15 @@
//
import QtQuick 2.5
import QtWebView 1.1
AnimatedImage {
property WebView webview: parent
source: "../../icons/loader-snake-64-w.gif"
Image {
Item {
id: webView
property bool loading: false
property string url: ""
}
source: "qrc:////images//unsupportedImage.png"
visible: webview.loading && /^(http.*|)$/i.test(webview.url.toString())
playing: visible
z: 10000

View file

@ -0,0 +1,44 @@
import QtQuick 2.7
import QtWebEngine 1.5
Item {
id: root
property bool webViewProfileSetup: false
property string currentUrl: ""
property string downloadUrl: ""
property string adaptedPath: ""
property string tempDir: ""
function setupWebEngineSettings() {
WebEngine.settings.javascriptCanOpenWindows = true;
WebEngine.settings.javascriptCanAccessClipboard = false;
WebEngine.settings.spatialNavigationEnabled = false;
WebEngine.settings.localContentCanAccessRemoteUrls = true;
}
function initWebviewProfileHandlers(profile) {
downloadUrl = currentUrl;
if (webViewProfileSetup) return;
webViewProfileSetup = true;
profile.downloadRequested.connect(function(download){
adaptedPath = File.convertUrlToPath(downloadUrl);
tempDir = File.getTempDir();
download.path = tempDir + "/" + adaptedPath;
download.accept();
if (download.state === WebEngineDownloadItem.DownloadInterrupted) {
console.log("download failed to complete");
}
})
profile.downloadFinished.connect(function(download){
if (download.state === WebEngineDownloadItem.DownloadCompleted) {
File.runUnzip(download.path, downloadUrl, autoAdd);
} else {
console.log("The download was corrupted, state: " + download.state);
}
autoAdd = false;
})
}
}

View file

@ -1,5 +1,4 @@
import QtQuick 2.7
import QtWebView 1.1
import Qt.labs.settings 1.0 as QtSettings
import QtQuick.Controls 2.3
@ -87,15 +86,21 @@ OriginalDesktop.Desktop {
return map;
})({});
Component.onCompleted: {
webEngineConfig.setupWebEngineSettings();
}
// Accept a download through the webview
property bool webViewProfileSetup: false
property string currentUrl: ""
property string downloadUrl: ""
property string adaptedPath: ""
property string tempDir: ""
property alias webViewProfileSetup: webEngineConfig.webViewProfileSetup
property alias currentUrl: webEngineConfig.currentUrl
property alias downloadUrl: webEngineConfig.downloadUrl
property alias adaptedPath: webEngineConfig.adaptedPath
property alias tempDir: webEngineConfig.tempDir
property var initWebviewProfileHandlers: webEngineConfig.initWebviewProfileHandlers
property bool autoAdd: false
function initWebviewProfileHandlers(profile) {
DesktopWebEngine {
id: webEngineConfig
}
function setAutoAdd(auto) {

View file

@ -0,0 +1,17 @@
import QtQuick 2.7
Item {
id: root
property bool webViewProfileSetup: false
property string currentUrl: ""
property string downloadUrl: ""
property string adaptedPath: ""
property string tempDir: ""
function setupWebEngineSettings() {
}
function initWebviewProfileHandlers(profile) {
}
}

View file

@ -1,358 +0,0 @@
//
// WebBrowser.qml
//
//
// Created by Vlad Stelmahovsky on 06/22/2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.7
import QtQuick.Controls 2.2 as QQControls
import QtQuick.Layouts 1.3
import QtGraphicalEffects 1.0
import QtWebView 1.1
import QtWebChannel 1.0
import stylesUit 1.0
import controlsUit 1.0 as HifiControls
import "../windows"
import "../controls"
import HifiWeb 1.0
Rectangle {
id: root;
HifiConstants { id: hifi; }
property string title: "";
signal sendToScript(var message);
property bool keyboardEnabled: true // FIXME - Keyboard HMD only: Default to false
property bool keyboardRaised: false
property bool punctuationMode: false
property var suggestionsList: []
readonly property string searchUrlTemplate: "https://www.google.com/search?client=hifibrowser&q=";
WebBrowserSuggestionsEngine {
id: searchEngine
onSuggestions: {
if (suggestions.length > 0) {
suggestionsList = []
suggestionsList.push(addressBarInput.text); //do not overwrite edit text
for(var i = 0; i < suggestions.length; i++) {
suggestionsList.push(suggestions[i]);
}
addressBar.model = suggestionsList
if (!addressBar.popup.visible) {
addressBar.popup.open();
}
}
}
}
Timer {
id: suggestionRequestTimer
interval: 200
repeat: false
onTriggered: {
if (addressBar.editText !== "") {
searchEngine.querySuggestions(addressBarInput.text);
}
}
}
color: hifi.colors.baseGray;
function goTo(url) {
//must be valid attempt to open an site with dot
var urlNew = url
if (url.indexOf(".") > 0) {
if (url.indexOf("http") < 0) {
urlNew = "http://" + url;
}
} else {
urlNew = searchUrlTemplate + url
}
addressBar.model = []
//need to rebind if binfing was broken by selecting from suggestions
addressBar.editText = Qt.binding( function() { return webStack.currentItem.webEngineView.url; });
webStack.currentItem.webEngineView.url = urlNew
suggestionRequestTimer.stop();
addressBar.popup.close();
}
Column {
spacing: 2
width: parent.width;
RowLayout {
id: addressBarRow
width: parent.width;
height: 48
HifiControls.WebGlyphButton {
enabled: webStack.currentItem.webEngineView.canGoBack || webStack.depth > 1
glyph: hifi.glyphs.backward;
anchors.verticalCenter: parent.verticalCenter;
size: 38;
onClicked: {
if (webStack.currentItem.webEngineView.canGoBack) {
webStack.currentItem.webEngineView.goBack();
} else if (webStack.depth > 1) {
webStack.pop();
}
}
}
HifiControls.WebGlyphButton {
enabled: webStack.currentItem.webEngineView.canGoForward
glyph: hifi.glyphs.forward;
anchors.verticalCenter: parent.verticalCenter;
size: 38;
onClicked: {
webStack.currentItem.webEngineView.goForward();
}
}
QQControls.ComboBox {
id: addressBar
//selectByMouse: true
focus: true
editable: true
//flat: true
indicator: Item {}
background: Item {}
onActivated: {
goTo(textAt(index));
}
onHighlightedIndexChanged: {
if (highlightedIndex >= 0) {
addressBar.editText = textAt(highlightedIndex)
}
}
popup.height: webStack.height
onFocusChanged: {
if (focus) {
addressBarInput.selectAll();
}
}
contentItem: QQControls.TextField {
id: addressBarInput
leftPadding: 26
rightPadding: hifi.dimensions.controlLineHeight + 5
text: addressBar.editText
placeholderText: qsTr("Enter URL")
font: addressBar.font
selectByMouse: true
horizontalAlignment: Text.AlignLeft
verticalAlignment: Text.AlignVCenter
onFocusChanged: {
if (focus) {
selectAll();
}
}
Keys.onDeletePressed: {
addressBarInput.text = ""
}
Keys.onPressed: {
if (event.key === Qt.Key_Return) {
goTo(addressBarInput.text);
event.accepted = true;
}
}
Image {
anchors.verticalCenter: parent.verticalCenter;
x: 5
z: 2
id: faviconImage
width: 16; height: 16
sourceSize: Qt.size(width, height)
source: webStack.currentItem.webEngineView.icon
}
HifiControls.WebGlyphButton {
glyph: webStack.currentItem.WebView.Loading ? hifi.glyphs.closeSmall : hifi.glyphs.reloadSmall;
anchors.verticalCenter: parent.verticalCenter;
width: hifi.dimensions.controlLineHeight
z: 2
x: addressBarInput.width - implicitWidth
onClicked: {
if (webStack.currentItem.WebView.Loading) {
webStack.currentItem.webEngineView.stop();
} else {
webStack.currentItem.reloadTimer.start();
}
}
}
}
Component.onCompleted: ScriptDiscoveryService.scriptsModelFilter.filterRegExp = new RegExp("^.*$", "i");
Keys.onPressed: {
if (event.key === Qt.Key_Return) {
goTo(addressBarInput.text);
event.accepted = true;
}
}
onEditTextChanged: {
if (addressBar.editText !== "" && addressBar.editText !== webStack.currentItem.webEngineView.url.toString()) {
suggestionRequestTimer.restart();
} else {
addressBar.model = []
addressBar.popup.close();
}
}
Layout.fillWidth: true
editText: webStack.currentItem.webEngineView.url
onAccepted: goTo(addressBarInput.text);
}
HifiControls.WebGlyphButton {
checkable: true
checked: webStack.currentItem.webEngineView.audioMuted
glyph: checked ? hifi.glyphs.unmuted : hifi.glyphs.muted
anchors.verticalCenter: parent.verticalCenter;
width: hifi.dimensions.controlLineHeight
onClicked: {
webStack.currentItem.webEngineView.audioMuted = !webStack.currentItem.webEngineView.audioMuted
}
}
}
QQControls.ProgressBar {
id: loadProgressBar
background: Rectangle {
implicitHeight: 2
color: "#6A6A6A"
}
contentItem: Item {
implicitHeight: 2
Rectangle {
width: loadProgressBar.visualPosition * parent.width
height: parent.height
color: "#00B4EF"
}
}
width: parent.width;
from: 0
to: 100
value: webStack.currentItem.WebView.LoadProgress
height: 2
}
Component {
id: webViewComponent
Rectangle {
property alias webEngineView: webEngineView
property alias reloadTimer: reloadTimer
property var request: null
property bool isDialog: QQControls.StackView.index > 0
property real margins: isDialog ? 10 : 0
color: "#d1d1d1"
QQControls.StackView.onActivated: {
addressBar.editText = Qt.binding( function() { return webStack.currentItem.webEngineView.url; });
}
onRequestChanged: {
if (isDialog && request !== null && request !== undefined) {//is Dialog ?
request.openIn(webEngineView);
}
}
HifiControls.BaseWebView {
id: webEngineView
anchors.fill: parent
anchors.margins: parent.margins
layer.enabled: parent.isDialog
layer.effect: DropShadow {
verticalOffset: 8
horizontalOffset: 8
color: "#330066ff"
samples: 10
spread: 0.5
}
focus: true
objectName: "tabletWebEngineView"
profile.httpUserAgent: "Mozilla/5.0 (Android; Mobile; rv:13.0) Gecko/13.0 Firefox/13.0"
property string userScriptUrl: ""
onLoadingChanged: {
if (!loading) {
addressBarInput.cursorPosition = 0 //set input field cursot to beginning
suggestionRequestTimer.stop();
addressBar.popup.close();
}
}
onLinkHovered: {
//TODO: change cursor shape?
}
Component.onCompleted: {
webChannel.registerObject("eventBridge", eventBridge);
webChannel.registerObject("eventBridgeWrapper", eventBridgeWrapper);
}
}
Timer {
id: reloadTimer
interval: 0
running: false
repeat: false
onTriggered: webEngineView.reload()
}
}
}
QQControls.StackView {
id: webStack
width: parent.width;
property real webViewHeight: root.height - loadProgressBar.height - 48 - 4
height: keyboardEnabled && keyboardRaised ? webViewHeight - keyboard.height : webViewHeight
Component.onCompleted: webStack.push(webViewComponent, {"webEngineView.url": "https://www.highfidelity.com"});
}
}
HifiControls.Keyboard {
id: keyboard
raised: parent.keyboardEnabled && parent.keyboardRaised
numeric: parent.punctuationMode
anchors {
left: parent.left
right: parent.right
bottom: parent.bottom
}
}
}

View file

@ -16,7 +16,13 @@ import TabletScriptingInterface 1.0
Rectangle {
readonly property var level: AudioScriptingInterface.inputLevel;
property bool gated: false;
Component.onCompleted: {
AudioScriptingInterface.noiseGateOpened.connect(function() { gated = false; });
AudioScriptingInterface.noiseGateClosed.connect(function() { gated = true; });
}
property bool standalone: false;
property var dragTarget: null;
@ -77,6 +83,7 @@ Rectangle {
readonly property string gutter: "#575757";
readonly property string greenStart: "#39A38F";
readonly property string greenEnd: "#1FC6A6";
readonly property string yellow: "#C0C000";
readonly property string red: colors.muted;
readonly property string fill: "#55000000";
readonly property string border: standalone ? "#80FFFFFF" : "#55FFFFFF";
@ -189,7 +196,7 @@ Rectangle {
Rectangle { // mask
id: mask;
width: parent.width * level;
width: gated ? 0 : parent.width * level;
radius: 5;
anchors {
bottom: parent.bottom;
@ -212,18 +219,42 @@ Rectangle {
color: colors.greenStart;
}
GradientStop {
position: 0.8;
position: 0.5;
color: colors.greenEnd;
}
GradientStop {
position: 0.81;
color: colors.red;
}
GradientStop {
position: 1;
color: colors.red;
color: colors.yellow;
}
}
}
Rectangle {
id: gatedIndicator;
visible: gated && !AudioScriptingInterface.clipping
radius: 4;
width: 2 * radius;
height: 2 * radius;
color: "#0080FF";
anchors {
right: parent.left;
verticalCenter: parent.verticalCenter;
}
}
Rectangle {
id: clippingIndicator;
visible: AudioScriptingInterface.clipping
radius: 4;
width: 2 * radius;
height: 2 * radius;
color: colors.red;
anchors {
left: parent.right;
verticalCenter: parent.verticalCenter;
}
}
}
}

View file

@ -38,8 +38,8 @@ Rectangle {
property bool keyboardEnabled: HMD.active
property bool keyboardRaised: false
property string searchScopeString: "Featured"
property bool isLoggedIn: false;
property bool supports3DHTML: true;
property bool isLoggedIn: false
property bool supports3DHTML: true
anchors.fill: (typeof parent === undefined) ? undefined : parent
@ -49,7 +49,7 @@ Rectangle {
licenseInfo.visible = false;
marketBrowseModel.getFirstPage();
{
if(root.searchString !== undefined && root.searchString !== "") {
if(root.searchString !== undefined && root.searchString !== "") {
root.searchScopeString = "Search Results: \"" + root.searchString + "\"";
} else if (root.categoryString !== "") {
root.searchScopeString = root.categoryString;
@ -498,7 +498,7 @@ Rectangle {
"",
"",
root.sortString,
false,
WalletScriptingInterface.limitedCommerce,
marketBrowseModel.currentPageToRetrieve,
marketBrowseModel.itemsPerPage
);

View file

@ -14,8 +14,6 @@
import Hifi 1.0 as Hifi
import QtQuick 2.9
import QtQuick.Controls 2.2
import QtGraphicalEffects 1.0
import QtWebEngine 1.5
import stylesUit 1.0
import controlsUit 1.0 as HifiControlsUit
import "../../../controls" as HifiControls
@ -424,13 +422,13 @@ Rectangle {
elide: Text.ElideRight
size: 14
color: model.link ? hifi.colors.blueHighlight : hifi.colors.baseGray
color: (model.link && root.supports3DHTML)? hifi.colors.blueHighlight : hifi.colors.baseGray
verticalAlignment: Text.AlignVCenter
MouseArea {
anchors.fill: parent
onClicked: {
if (model.link) {
if (model.link && root.supports3DHTML) {
sendToScript({method: 'marketplace_open_link', link: model.link});
}
}
@ -571,12 +569,14 @@ Rectangle {
text: root.description
size: 14
color: hifi.colors.lightGray
linkColor: hifi.colors.blueHighlight
linkColor: root.supports3DHTML ? hifi.colors.blueHighlight : hifi.colors.lightGray
verticalAlignment: Text.AlignVCenter
textFormat: Text.RichText
wrapMode: Text.Wrap
onLinkActivated: {
sendToScript({method: 'marketplace_open_link', link: link});
if (root.supports3DHTML) {
sendToScript({method: 'marketplace_open_link', link: link});
}
}
onHeightChanged: { footer.evalHeight(); }

View file

@ -1,6 +1,4 @@
import QtQuick 2.0
import QtWebView 1.1
import "../../controls" as Controls
Controls.TabletWebView {

View file

@ -2,7 +2,6 @@ import QtQuick 2.5
import QtGraphicalEffects 1.0
import QtQuick.Controls 1.4
import QtQml 2.2
import QtWebChannel 1.0
import "."
import stylesUit 1.0

View file

@ -1,5 +1,4 @@
import QtQuick 2.0
import "../../controls" as Controls
Controls.TabletWebScreen {

View file

@ -1,5 +1,4 @@
import QtQuick 2.0
import "../../controls" as Controls
Controls.WebView {

View file

@ -4980,10 +4980,10 @@ void Application::idle() {
// Normally we check PipelineWarnings, but since idle will often take more than 10ms we only show these idle timing
// details if we're in ExtraDebugging mode. However, the ::update() and its subcomponents will show their timing
// details normally.
#ifndef Q_OS_ANDROID
bool showWarnings = getLogger()->extraDebugging();
#else
#ifdef Q_OS_ANDROID
bool showWarnings = false;
#else
bool showWarnings = getLogger()->extraDebugging();
#endif
PerformanceWarning warn(showWarnings, "idle()");
@ -8328,7 +8328,7 @@ void Application::toggleLogDialog() {
bool keepOnTop =_keepLogWindowOnTop.get();
#ifdef Q_OS_WIN
_logDialog = new LogDialog(keepOnTop ? qApp->getWindow() : nullptr, getLogger());
#else
#elif !defined(Q_OS_ANDROID)
_logDialog = new LogDialog(nullptr, getLogger());
if (keepOnTop) {

View file

@ -294,13 +294,6 @@ void ModelPackager::populateBasicMapping(QVariantHash& mapping, QString filename
}
mapping.insert(JOINT_FIELD, joints);
if (!mapping.contains(FREE_JOINT_FIELD)) {
mapping.insertMulti(FREE_JOINT_FIELD, "LeftArm");
mapping.insertMulti(FREE_JOINT_FIELD, "LeftForeArm");
mapping.insertMulti(FREE_JOINT_FIELD, "RightArm");
mapping.insertMulti(FREE_JOINT_FIELD, "RightForeArm");
}
// If there are no blendshape mappings, and we detect that this is likely a mixamo file,
// then we can add the default mixamo to "faceshift" mappings

View file

@ -58,11 +58,6 @@ _hfmModel(hfmModel)
form->addRow("Left Hand Joint:", _leftHandJoint = createJointBox());
form->addRow("Right Hand Joint:", _rightHandJoint = createJointBox());
form->addRow("Free Joints:", _freeJoints = new QVBoxLayout());
QPushButton* newFreeJoint = new QPushButton("New Free Joint");
_freeJoints->addWidget(newFreeJoint);
connect(newFreeJoint, SIGNAL(clicked(bool)), SLOT(createNewFreeJoint()));
QDialogButtonBox* buttons = new QDialogButtonBox(QDialogButtonBox::Ok |
QDialogButtonBox::Cancel | QDialogButtonBox::Reset);
connect(buttons, SIGNAL(accepted()), SLOT(accept()));
@ -102,11 +97,6 @@ QVariantHash ModelPropertiesDialog::getMapping() const {
insertJointMapping(joints, "jointLeftHand", _leftHandJoint->currentText());
insertJointMapping(joints, "jointRightHand", _rightHandJoint->currentText());
mapping.remove(FREE_JOINT_FIELD);
for (int i = 0; i < _freeJoints->count() - 1; i++) {
QComboBox* box = static_cast<QComboBox*>(_freeJoints->itemAt(i)->widget()->layout()->itemAt(0)->widget());
mapping.insertMulti(FREE_JOINT_FIELD, box->currentText());
}
mapping.insert(JOINT_FIELD, joints);
return mapping;
@ -133,16 +123,6 @@ void ModelPropertiesDialog::reset() {
setJointText(_headJoint, jointHash.value("jointHead").toString());
setJointText(_leftHandJoint, jointHash.value("jointLeftHand").toString());
setJointText(_rightHandJoint, jointHash.value("jointRightHand").toString());
while (_freeJoints->count() > 1) {
delete _freeJoints->itemAt(0)->widget();
}
foreach (const QVariant& joint, _originalMapping.values(FREE_JOINT_FIELD)) {
QString jointName = joint.toString();
if (_hfmModel.jointIndices.contains(jointName)) {
createNewFreeJoint(jointName);
}
}
}
void ModelPropertiesDialog::chooseTextureDirectory() {
@ -176,20 +156,6 @@ void ModelPropertiesDialog::updatePivotJoint() {
_pivotJoint->setEnabled(!_pivotAboutCenter->isChecked());
}
void ModelPropertiesDialog::createNewFreeJoint(const QString& joint) {
QWidget* freeJoint = new QWidget();
QHBoxLayout* freeJointLayout = new QHBoxLayout();
freeJointLayout->setContentsMargins(QMargins());
freeJoint->setLayout(freeJointLayout);
QComboBox* jointBox = createJointBox(false);
jointBox->setCurrentText(joint);
freeJointLayout->addWidget(jointBox, 1);
QPushButton* deleteJoint = new QPushButton("Delete");
freeJointLayout->addWidget(deleteJoint);
freeJoint->connect(deleteJoint, SIGNAL(clicked(bool)), SLOT(deleteLater()));
_freeJoints->insertWidget(_freeJoints->count() - 1, freeJoint);
}
QComboBox* ModelPropertiesDialog::createJointBox(bool withNone) const {
QComboBox* box = new QComboBox();
if (withNone) {

View file

@ -39,7 +39,6 @@ private slots:
void chooseTextureDirectory();
void chooseScriptDirectory();
void updatePivotJoint();
void createNewFreeJoint(const QString& joint = QString());
private:
QComboBox* createJointBox(bool withNone = true) const;
@ -66,7 +65,6 @@ private:
QComboBox* _headJoint = nullptr;
QComboBox* _leftHandJoint = nullptr;
QComboBox* _rightHandJoint = nullptr;
QVBoxLayout* _freeJoints = nullptr;
};
#endif // hifi_ModelPropertiesDialog_h

View file

@ -569,7 +569,7 @@ void AvatarActionHold::lateAvatarUpdate(const AnimPose& prePhysicsRoomPose, cons
}
btTransform worldTrans = rigidBody->getWorldTransform();
AnimPose worldBodyPose(glm::vec3(1), bulletToGLM(worldTrans.getRotation()), bulletToGLM(worldTrans.getOrigin()));
AnimPose worldBodyPose(1.0f, bulletToGLM(worldTrans.getRotation()), bulletToGLM(worldTrans.getOrigin()));
// transform the body transform into sensor space with the prePhysics sensor-to-world matrix.
// then transform it back into world uisng the postAvatarUpdate sensor-to-world matrix.

View file

@ -652,28 +652,25 @@ RayToAvatarIntersectionResult AvatarManager::findRayIntersectionVector(const Pic
PROFILE_RANGE(simulation_physics, __FUNCTION__);
float distance = (float)INT_MAX; // with FLT_MAX bullet rayTest does not return results
float bulletDistance = (float)INT_MAX; // with FLT_MAX bullet rayTest does not return results
glm::vec3 rayDirection = glm::normalize(ray.direction);
std::vector<MyCharacterController::RayAvatarResult> physicsResults = _myAvatar->getCharacterController()->rayTest(glmToBullet(ray.origin), glmToBullet(rayDirection), distance, QVector<uint>());
std::vector<MyCharacterController::RayAvatarResult> physicsResults = _myAvatar->getCharacterController()->rayTest(glmToBullet(ray.origin), glmToBullet(rayDirection), bulletDistance, QVector<uint>());
if (physicsResults.size() > 0) {
glm::vec3 rayDirectionInv = { rayDirection.x != 0.0f ? 1.0f / rayDirection.x : INFINITY,
rayDirection.y != 0.0f ? 1.0f / rayDirection.y : INFINITY,
rayDirection.z != 0.0f ? 1.0f / rayDirection.z : INFINITY };
MyCharacterController::RayAvatarResult rayAvatarResult;
AvatarPointer avatar = nullptr;
BoxFace face = BoxFace::UNKNOWN_FACE;
glm::vec3 surfaceNormal;
QVariantMap extraInfo;
for (auto &hit : physicsResults) {
auto avatarID = hit._intersectWithAvatar;
if ((avatarsToInclude.size() > 0 && !avatarsToInclude.contains(avatarID)) ||
(avatarsToDiscard.size() > 0 && avatarsToDiscard.contains(avatarID))) {
continue;
}
MyCharacterController::RayAvatarResult rayAvatarResult;
BoxFace face = BoxFace::UNKNOWN_FACE;
QVariantMap extraInfo;
AvatarPointer avatar = nullptr;
if (_myAvatar->getSessionUUID() != avatarID) {
auto avatarMap = getHashCopy();
AvatarHash::iterator itr = avatarMap.find(avatarID);
@ -683,46 +680,45 @@ RayToAvatarIntersectionResult AvatarManager::findRayIntersectionVector(const Pic
} else {
avatar = _myAvatar;
}
if (!hit._isBound) {
rayAvatarResult = hit;
} else if (avatar) {
auto &multiSpheres = avatar->getMultiSphereShapes();
if (multiSpheres.size() > 0) {
std::vector<MyCharacterController::RayAvatarResult> boxHits;
MyCharacterController::RayAvatarResult boxHit;
boxHit._distance = FLT_MAX;
for (size_t i = 0; i < hit._boundJoints.size(); i++) {
assert(hit._boundJoints[i] < multiSpheres.size());
auto &mSphere = multiSpheres[hit._boundJoints[i]];
if (mSphere.isValid()) {
float boundDistance = FLT_MAX;
BoxFace face;
glm::vec3 surfaceNormal;
BoxFace boundFace = BoxFace::UNKNOWN_FACE;
glm::vec3 boundSurfaceNormal;
auto &bbox = mSphere.getBoundingBox();
if (bbox.findRayIntersection(ray.origin, rayDirection, rayDirectionInv, boundDistance, face, surfaceNormal)) {
MyCharacterController::RayAvatarResult boxHit;
boxHit._distance = boundDistance;
boxHit._intersect = true;
boxHit._intersectionNormal = surfaceNormal;
boxHit._intersectionPoint = ray.origin + boundDistance * rayDirection;
boxHit._intersectWithAvatar = avatarID;
boxHit._intersectWithJoint = mSphere.getJointIndex();
boxHits.push_back(boxHit);
if (bbox.findRayIntersection(ray.origin, rayDirection, rayDirectionInv, boundDistance, boundFace, boundSurfaceNormal)) {
if (boundDistance < boxHit._distance) {
boxHit._intersect = true;
boxHit._intersectWithAvatar = avatarID;
boxHit._intersectWithJoint = mSphere.getJointIndex();
boxHit._distance = boundDistance;
boxHit._intersectionPoint = ray.origin + boundDistance * rayDirection;
boxHit._intersectionNormal = boundSurfaceNormal;
face = boundFace;
}
}
}
}
if (boxHits.size() > 0) {
if (boxHits.size() > 1) {
std::sort(boxHits.begin(), boxHits.end(), [](const MyCharacterController::RayAvatarResult& hitA,
const MyCharacterController::RayAvatarResult& hitB) {
return hitA._distance < hitB._distance;
});
}
rayAvatarResult = boxHits[0];
if (boxHit._distance < FLT_MAX) {
rayAvatarResult = boxHit;
}
}
}
if (pickAgainstMesh) {
if (rayAvatarResult._intersect && pickAgainstMesh) {
glm::vec3 localRayOrigin = avatar->worldToJointPoint(ray.origin, rayAvatarResult._intersectWithJoint);
glm::vec3 localRayPoint = avatar->worldToJointPoint(ray.origin + rayDirection, rayAvatarResult._intersectWithJoint);
glm::vec3 localRayPoint = avatar->worldToJointPoint(ray.origin + rayAvatarResult._distance * rayDirection, rayAvatarResult._intersectWithJoint);
auto avatarOrientation = avatar->getWorldOrientation();
auto avatarPosition = avatar->getWorldPosition();
@ -732,31 +728,37 @@ RayToAvatarIntersectionResult AvatarManager::findRayIntersectionVector(const Pic
auto defaultFrameRayOrigin = jointPosition + jointOrientation * localRayOrigin;
auto defaultFrameRayPoint = jointPosition + jointOrientation * localRayPoint;
auto defaultFrameRayDirection = defaultFrameRayPoint - defaultFrameRayOrigin;
auto defaultFrameRayDirection = glm::normalize(defaultFrameRayPoint - defaultFrameRayOrigin);
if (avatar->getSkeletonModel()->findRayIntersectionAgainstSubMeshes(defaultFrameRayOrigin, defaultFrameRayDirection, distance, face, surfaceNormal, extraInfo, true, false)) {
auto newDistance = glm::length(vec3FromVariant(extraInfo["worldIntersectionPoint"]) - defaultFrameRayOrigin);
rayAvatarResult._distance = newDistance;
rayAvatarResult._intersectionPoint = ray.origin + newDistance * rayDirection;
rayAvatarResult._intersectionNormal = surfaceNormal;
extraInfo["worldIntersectionPoint"] = vec3toVariant(rayAvatarResult._intersectionPoint);
break;
float subMeshDistance = FLT_MAX;
BoxFace subMeshFace = BoxFace::UNKNOWN_FACE;
glm::vec3 subMeshSurfaceNormal;
QVariantMap subMeshExtraInfo;
if (avatar->getSkeletonModel()->findRayIntersectionAgainstSubMeshes(defaultFrameRayOrigin, defaultFrameRayDirection, subMeshDistance, subMeshFace, subMeshSurfaceNormal, subMeshExtraInfo, true, false)) {
rayAvatarResult._distance = subMeshDistance;
rayAvatarResult._intersectionPoint = ray.origin + subMeshDistance * rayDirection;
rayAvatarResult._intersectionNormal = subMeshSurfaceNormal;
face = subMeshFace;
extraInfo = subMeshExtraInfo;
} else {
rayAvatarResult._intersect = false;
}
} else if (rayAvatarResult._intersect){
}
if (rayAvatarResult._intersect) {
result.intersects = true;
result.avatarID = rayAvatarResult._intersectWithAvatar;
result.distance = rayAvatarResult._distance;
result.face = face;
result.intersection = ray.origin + rayAvatarResult._distance * rayDirection;
result.surfaceNormal = rayAvatarResult._intersectionNormal;
result.jointIndex = rayAvatarResult._intersectWithJoint;
result.extraInfo = extraInfo;
break;
}
}
if (rayAvatarResult._intersect) {
result.intersects = true;
result.avatarID = rayAvatarResult._intersectWithAvatar;
result.distance = rayAvatarResult._distance;
result.surfaceNormal = rayAvatarResult._intersectionNormal;
result.jointIndex = rayAvatarResult._intersectWithJoint;
result.intersection = ray.origin + rayAvatarResult._distance * rayDirection;
result.extraInfo = extraInfo;
result.face = face;
}
}
return result;
}

View file

@ -915,14 +915,9 @@ void MyAvatar::simulate(float deltaTime, bool inView) {
auto entityTreeRenderer = qApp->getEntities();
EntityTreePointer entityTree = entityTreeRenderer ? entityTreeRenderer->getTree() : nullptr;
if (entityTree) {
bool zoneAllowsFlying = true;
bool collisionlessAllowed = true;
std::pair<bool, bool> zoneInteractionProperties;
entityTree->withWriteLock([&] {
std::shared_ptr<ZoneEntityItem> zone = entityTreeRenderer->myAvatarZone();
if (zone) {
zoneAllowsFlying = zone->getFlyingAllowed();
collisionlessAllowed = zone->getGhostingAllowed();
}
zoneInteractionProperties = entityTreeRenderer->getZoneInteractionProperties();
EntityEditPacketSender* packetSender = qApp->getEntityEditPacketSender();
forEachDescendant([&](SpatiallyNestablePointer object) {
// we need to update attached queryAACubes in our own local tree so point-select always works
@ -935,6 +930,8 @@ void MyAvatar::simulate(float deltaTime, bool inView) {
});
});
bool isPhysicsEnabled = qApp->isPhysicsEnabled();
bool zoneAllowsFlying = zoneInteractionProperties.first;
bool collisionlessAllowed = zoneInteractionProperties.second;
_characterController.setFlyingAllowed((zoneAllowsFlying && _enableFlying) || !isPhysicsEnabled);
_characterController.setCollisionlessAllowed(collisionlessAllowed);
}
@ -2983,7 +2980,7 @@ void MyAvatar::postUpdate(float deltaTime, const render::ScenePointer& scene) {
auto animSkeleton = _skeletonModel->getRig().getAnimSkeleton();
// the rig is in the skeletonModel frame
AnimPose xform(glm::vec3(1), _skeletonModel->getRotation(), _skeletonModel->getTranslation());
AnimPose xform(1.0f, _skeletonModel->getRotation(), _skeletonModel->getTranslation());
if (_enableDebugDrawDefaultPose && animSkeleton) {
glm::vec4 gray(0.2f, 0.2f, 0.2f, 0.2f);
@ -3028,7 +3025,7 @@ void MyAvatar::postUpdate(float deltaTime, const render::ScenePointer& scene) {
updateHoldActions(_prePhysicsRoomPose, postUpdateRoomPose);
if (_enableDebugDrawDetailedCollision) {
AnimPose rigToWorldPose(glm::vec3(1.0f), getWorldOrientation() * Quaternions::Y_180, getWorldPosition());
AnimPose rigToWorldPose(1.0f, getWorldOrientation() * Quaternions::Y_180, getWorldPosition());
const int NUM_DEBUG_COLORS = 8;
const glm::vec4 DEBUG_COLORS[NUM_DEBUG_COLORS] = {
glm::vec4(1.0f, 1.0f, 1.0f, 1.0f),
@ -4824,7 +4821,7 @@ void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat
swingTwistDecomposition(hipsinWorldSpace, avatarUpWorld, resultingSwingInWorld, resultingTwistInWorld);
// remove scale present from sensorToWorldMatrix
followWorldPose.scale() = glm::vec3(1.0f);
followWorldPose.scale() = 1.0f;
if (isActive(Rotation)) {
//use the hmd reading for the hips follow

View file

@ -41,7 +41,7 @@ static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
if (myAvatar->isJointPinned(hipsIndex)) {
Transform avatarTransform = myAvatar->getTransform();
AnimPose result = AnimPose(worldToSensorMat * avatarTransform.getMatrix() * Matrices::Y_180);
result.scale() = glm::vec3(1.0f, 1.0f, 1.0f);
result.scale() = 1.0f;
return result;
}
@ -108,7 +108,7 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Rig::ControllerParameters params;
AnimPose avatarToRigPose(glm::vec3(1.0f), Quaternions::Y_180, glm::vec3(0.0f));
AnimPose avatarToRigPose(1.0f, Quaternions::Y_180, glm::vec3(0.0f));
glm::mat4 rigToAvatarMatrix = Matrices::Y_180;
glm::mat4 avatarToWorldMatrix = createMatFromQuatAndPos(myAvatar->getWorldOrientation(), myAvatar->getWorldPosition());
@ -127,7 +127,7 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
// preMult 180 is necessary to convert from avatar to rig coordinates.
// postMult 180 is necessary to convert head from -z forward to z forward.
glm::quat headRot = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
params.primaryControllerPoses[Rig::PrimaryControllerType_Head] = AnimPose(glm::vec3(1.0f), headRot, glm::vec3(0.0f));
params.primaryControllerPoses[Rig::PrimaryControllerType_Head] = AnimPose(1.0f, headRot, glm::vec3(0.0f));
params.primaryControllerFlags[Rig::PrimaryControllerType_Head] = 0;
}

View file

@ -72,20 +72,17 @@ void QmlMarketplace::getMarketplaceItems(
void QmlMarketplace::getMarketplaceItem(const QString& marketplaceItemId) {
QString endpoint = QString("items/") + marketplaceItemId;
QUrlQuery request;
send(endpoint, "getMarketplaceItemSuccess", "getMarketplaceItemFailure", QNetworkAccessManager::GetOperation, AccountManagerAuth::Optional, request);
send(endpoint, "getMarketplaceItemSuccess", "getMarketplaceItemFailure", QNetworkAccessManager::GetOperation, AccountManagerAuth::Optional);
}
void QmlMarketplace::marketplaceItemLike(const QString& marketplaceItemId, const bool like) {
QString endpoint = QString("items/") + marketplaceItemId + "/like";
QUrlQuery request;
send(endpoint, "marketplaceItemLikeSuccess", "marketplaceItemLikeFailure", like ? QNetworkAccessManager::PostOperation : QNetworkAccessManager::DeleteOperation, AccountManagerAuth::Required, request);
send(endpoint, "marketplaceItemLikeSuccess", "marketplaceItemLikeFailure", like ? QNetworkAccessManager::PostOperation : QNetworkAccessManager::DeleteOperation, AccountManagerAuth::Required);
}
void QmlMarketplace::getMarketplaceCategories() {
QString endpoint = "categories";
QUrlQuery request;
send(endpoint, "getMarketplaceCategoriesSuccess", "getMarketplaceCategoriesFailure", QNetworkAccessManager::GetOperation, AccountManagerAuth::None, request);
send(endpoint, "getMarketplaceCategoriesSuccess", "getMarketplaceCategoriesFailure", QNetworkAccessManager::GetOperation, AccountManagerAuth::None);
}
@ -94,14 +91,13 @@ void QmlMarketplace::send(const QString& endpoint, const QString& success, const
const QString URL = "/api/v1/marketplace/";
JSONCallbackParameters callbackParams(this, success, fail);
accountManager->sendRequest(URL + endpoint,
accountManager->sendRequest(URL + endpoint + "?" + request.toString(),
authType,
method,
callbackParams,
QByteArray(),
NULL,
QVariantMap(),
request);
QVariantMap());
}

View file

@ -60,7 +60,12 @@ signals:
void marketplaceItemLikeResult(QJsonObject result);
private:
void send(const QString& endpoint, const QString& success, const QString& fail, QNetworkAccessManager::Operation method, AccountManagerAuth::Type authType, const QUrlQuery & request);
void send(const QString& endpoint,
const QString& success,
const QString& fail,
QNetworkAccessManager::Operation method,
AccountManagerAuth::Type authType,
const QUrlQuery& request = QUrlQuery());
QJsonObject apiResponse(const QString& label, QNetworkReply* reply);
QJsonObject failResponse(const QString& label, QNetworkReply* reply);
};

View file

@ -44,7 +44,7 @@ void OctreePacketProcessor::processPacket(QSharedPointer<ReceivedMessage> messag
qDebug("OctreePacketProcessor::processPacket() packets to process=%d", packetsToProcessCount());
}
#endif
bool wasStatsPacket = false;
PacketType octreePacketType = message->getType();

View file

@ -56,7 +56,8 @@ PickResultPointer RayPick::getOverlayIntersection(const PickRay& pick) {
}
PickResultPointer RayPick::getAvatarIntersection(const PickRay& pick) {
RayToAvatarIntersectionResult avatarRes = DependencyManager::get<AvatarManager>()->findRayIntersectionVector(pick, getIncludeItemsAs<EntityItemID>(), getIgnoreItemsAs<EntityItemID>(), true);
bool precisionPicking = !(getFilter().isCoarse() || DependencyManager::get<PickManager>()->getForceCoarsePicking());
RayToAvatarIntersectionResult avatarRes = DependencyManager::get<AvatarManager>()->findRayIntersectionVector(pick, getIncludeItemsAs<EntityItemID>(), getIgnoreItemsAs<EntityItemID>(), precisionPicking);
if (avatarRes.intersects) {
return std::make_shared<RayPickResult>(IntersectionType::AVATAR, avatarRes.avatarID, avatarRes.distance, avatarRes.intersection, pick, avatarRes.surfaceNormal, avatarRes.extraInfo);
} else {

View file

@ -15,6 +15,7 @@
#include "Application.h"
#include "AudioClient.h"
#include "AudioHelpers.h"
#include "ui/AvatarInputs.h"
using namespace scripting;
@ -26,26 +27,9 @@ QString Audio::HMD { "VR" };
Setting::Handle<bool> enableNoiseReductionSetting { QStringList { Audio::AUDIO, "NoiseReduction" }, true };
float Audio::loudnessToLevel(float loudness) {
const float LOG2 = log(2.0f);
const float METER_LOUDNESS_SCALE = 2.8f / 5.0f;
const float LOG2_LOUDNESS_FLOOR = 11.0f;
float level = 0.0f;
loudness += 1.0f;
float log2loudness = logf(loudness) / LOG2;
if (log2loudness <= LOG2_LOUDNESS_FLOOR) {
level = (log2loudness / LOG2_LOUDNESS_FLOOR) * METER_LOUDNESS_SCALE;
} else {
level = (log2loudness - (LOG2_LOUDNESS_FLOOR - 1.0f)) * METER_LOUDNESS_SCALE;
}
if (level > 1.0f) {
level = 1.0;
}
return level;
float level = 6.02059991f * fastLog2f(loudness); // level in dBFS
level = (level + 48.0f) * (1/39.0f); // map [-48, -9] dBFS to [0, 1]
return glm::clamp(level, 0.0f, 1.0f);
}
Audio::Audio() : _devices(_contextIsHMD) {
@ -150,18 +134,33 @@ float Audio::getInputLevel() const {
});
}
void Audio::onInputLoudnessChanged(float loudness) {
bool Audio::isClipping() const {
return resultWithReadLock<bool>([&] {
return _isClipping;
});
}
void Audio::onInputLoudnessChanged(float loudness, bool isClipping) {
float level = loudnessToLevel(loudness);
bool changed = false;
bool levelChanged = false;
bool isClippingChanged = false;
withWriteLock([&] {
if (_inputLevel != level) {
_inputLevel = level;
changed = true;
levelChanged = true;
}
if (_isClipping != isClipping) {
_isClipping = isClipping;
isClippingChanged = true;
}
});
if (changed) {
if (levelChanged) {
emit inputLevelChanged(level);
}
if (isClippingChanged) {
emit clippingChanged(isClipping);
}
}
QString Audio::getContext() const {

View file

@ -41,6 +41,7 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
* above the noise floor.
* @property {number} inputLevel - The loudness of the audio input, range <code>0.0</code> (no sound) &ndash;
* <code>1.0</code> (the onset of clipping). <em>Read-only.</em>
* @property {boolean} clipping - <code>true</code> if the audio input is clipping, otherwise <code>false</code>.
* @property {number} inputVolume - Adjusts the volume of the input audio; range <code>0.0</code> &ndash; <code>1.0</code>.
* If set to a value, the resulting value depends on the input device: for example, the volume can't be changed on some
* devices, and others might only support values of <code>0.0</code> and <code>1.0</code>.
@ -58,6 +59,7 @@ class Audio : public AudioScriptingInterface, protected ReadWriteLockable {
Q_PROPERTY(bool noiseReduction READ noiseReductionEnabled WRITE enableNoiseReduction NOTIFY noiseReductionChanged)
Q_PROPERTY(float inputVolume READ getInputVolume WRITE setInputVolume NOTIFY inputVolumeChanged)
Q_PROPERTY(float inputLevel READ getInputLevel NOTIFY inputLevelChanged)
Q_PROPERTY(bool clipping READ isClipping NOTIFY clippingChanged)
Q_PROPERTY(QString context READ getContext NOTIFY contextChanged)
Q_PROPERTY(AudioDevices* devices READ getDevices NOTIFY nop)
@ -74,6 +76,7 @@ public:
bool noiseReductionEnabled() const;
float getInputVolume() const;
float getInputLevel() const;
bool isClipping() const;
QString getContext() const;
void showMicMeter(bool show);
@ -217,6 +220,14 @@ signals:
*/
void inputLevelChanged(float level);
/**jsdoc
* Triggered when the clipping state of the input audio changes.
* @function Audio.clippingChanged
* @param {boolean} isClipping - <code>true</code> if the audio input is clipping, otherwise <code>false</code>.
* @returns {Signal}
*/
void clippingChanged(bool isClipping);
/**jsdoc
* Triggered when the current context of the audio changes.
* @function Audio.contextChanged
@ -237,7 +248,7 @@ private slots:
void setMuted(bool muted);
void enableNoiseReduction(bool enable);
void setInputVolume(float volume);
void onInputLoudnessChanged(float loudness);
void onInputLoudnessChanged(float loudness, bool isClipping);
protected:
// Audio must live on a separate thread from AudioClient to avoid deadlocks
@ -247,6 +258,7 @@ private:
float _inputVolume { 1.0f };
float _inputLevel { 0.0f };
bool _isClipping { false };
bool _isMuted { false };
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
bool _contextIsHMD { false };

View file

@ -140,10 +140,10 @@ void AnimClip::copyFromNetworkAnim() {
postRot = animSkeleton.getPostRotationPose(animJoint);
// cancel out scale
preRot.scale() = glm::vec3(1.0f);
postRot.scale() = glm::vec3(1.0f);
preRot.scale() = 1.0f;
postRot.scale() = 1.0f;
AnimPose rot(glm::vec3(1.0f), hfmAnimRot, glm::vec3());
AnimPose rot(1.0f, hfmAnimRot, glm::vec3());
// adjust translation offsets, so large translation animatons on the reference skeleton
// will be adjusted when played on a skeleton with short limbs.
@ -155,7 +155,7 @@ void AnimClip::copyFromNetworkAnim() {
boneLengthScale = glm::length(relDefaultPose.trans()) / glm::length(hfmZeroTrans);
}
AnimPose trans = AnimPose(glm::vec3(1.0f), glm::quat(), relDefaultPose.trans() + boneLengthScale * (hfmAnimTrans - hfmZeroTrans));
AnimPose trans = AnimPose(1.0f, glm::quat(), relDefaultPose.trans() + boneLengthScale * (hfmAnimTrans - hfmZeroTrans));
_anim[frame][skeletonJoint] = trans * preRot * rot * postRot;
}

View file

@ -552,7 +552,7 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
AnimPose accum = absolutePoses[_hipsIndex];
AnimPose baseParentPose = absolutePoses[_hipsIndex];
for (int i = (int)chainDepth - 1; i >= 0; i--) {
accum = accum * AnimPose(glm::vec3(1.0f), jointChainInfoOut.jointInfoVec[i].rot, jointChainInfoOut.jointInfoVec[i].trans);
accum = accum * AnimPose(1.0f, jointChainInfoOut.jointInfoVec[i].rot, jointChainInfoOut.jointInfoVec[i].trans);
postAbsPoses[i] = accum;
if (jointChainInfoOut.jointInfoVec[i].jointIndex == topJointIndex) {
topChainIndex = i;
@ -734,7 +734,7 @@ void AnimInverseKinematics::computeAndCacheSplineJointInfosForIKTarget(const Ani
glm::mat3 m(u, v, glm::cross(u, v));
glm::quat rot = glm::normalize(glm::quat_cast(m));
AnimPose pose(glm::vec3(1.0f), rot, spline(t));
AnimPose pose(1.0f, rot, spline(t));
AnimPose offsetPose = pose.inverse() * defaultPose;
SplineJointInfo splineJointInfo = { index, ratio, offsetPose };
@ -767,7 +767,7 @@ void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, co
const int baseIndex = _hipsIndex;
// build spline from tip to base
AnimPose tipPose = AnimPose(glm::vec3(1.0f), target.getRotation(), target.getTranslation());
AnimPose tipPose = AnimPose(1.0f, target.getRotation(), target.getTranslation());
AnimPose basePose = absolutePoses[baseIndex];
CubicHermiteSplineFunctorWithArcLength spline;
if (target.getIndex() == _headIndex) {
@ -815,7 +815,7 @@ void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, co
glm::mat3 m(u, v, glm::cross(u, v));
glm::quat rot = glm::normalize(glm::quat_cast(m));
AnimPose desiredAbsPose = AnimPose(glm::vec3(1.0f), rot, trans) * splineJointInfo.offsetPose;
AnimPose desiredAbsPose = AnimPose(1.0f, rot, trans) * splineJointInfo.offsetPose;
// apply flex coefficent
AnimPose flexedAbsPose;
@ -965,7 +965,7 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
}
_relativePoses[_hipsIndex] = parentAbsPose.inverse() * absPose;
_relativePoses[_hipsIndex].scale() = glm::vec3(1.0f);
_relativePoses[_hipsIndex].scale() = 1.0f;
}
// if there is an active jointChainInfo for the hips store the post shifted hips into it.
@ -1753,7 +1753,7 @@ void AnimInverseKinematics::setSecondaryTargets(const AnimContext& context) {
AnimPose rigToGeometryPose = AnimPose(glm::inverse(context.getGeometryToRigMatrix()));
for (auto& iter : _secondaryTargetsInRigFrame) {
AnimPose absPose = rigToGeometryPose * iter.second;
absPose.scale() = glm::vec3(1.0f);
absPose.scale() = 1.0f;
AnimPose parentAbsPose;
int parentIndex = _skeleton->getParentIndex(iter.first);
@ -1825,7 +1825,7 @@ void AnimInverseKinematics::debugDrawSpineSplines(const AnimContext& context, co
const int baseIndex = _hipsIndex;
// build spline
AnimPose tipPose = AnimPose(glm::vec3(1.0f), target.getRotation(), target.getTranslation());
AnimPose tipPose = AnimPose(1.0f, target.getRotation(), target.getTranslation());
AnimPose basePose = _skeleton->getAbsolutePose(baseIndex, _relativePoses);
CubicHermiteSplineFunctorWithArcLength spline;

View file

@ -172,5 +172,5 @@ AnimPose AnimManipulator::computeRelativePoseFromJointVar(const AnimVariantMap&
break;
}
return AnimPose(glm::vec3(1), relRot, relTrans);
return AnimPose(1.0f, relRot, relTrans);
}

View file

@ -95,7 +95,7 @@ const AnimPoseVec& AnimPoleVectorConstraint::evaluate(const AnimVariantMap& anim
AnimPose tipPose = ikChain.getAbsolutePoseFromJointIndex(_tipJointIndex);
// Look up refVector from animVars, make sure to convert into geom space.
glm::vec3 refVector = midPose.xformVectorFast(_referenceVector);
glm::vec3 refVector = midPose.xformVector(_referenceVector);
float refVectorLength = glm::length(refVector);
glm::vec3 axis = basePose.trans() - tipPose.trans();

View file

@ -14,15 +14,14 @@
#include <glm/gtc/matrix_transform.hpp>
#include "AnimUtil.h"
const AnimPose AnimPose::identity = AnimPose(glm::vec3(1.0f),
glm::quat(),
glm::vec3(0.0f));
const AnimPose AnimPose::identity = AnimPose(1.0f, glm::quat(), glm::vec3(0.0f));
AnimPose::AnimPose(const glm::mat4& mat) {
static const float EPSILON = 0.0001f;
_scale = extractScale(mat);
glm::vec3 scale = extractScale(mat);
// quat_cast doesn't work so well with scaled matrices, so cancel it out.
glm::mat4 tmp = glm::scale(mat, 1.0f / _scale);
glm::mat4 tmp = glm::scale(mat, 1.0f / scale);
_scale = extractUniformScale(scale);
_rot = glm::quat_cast(tmp);
float lengthSquared = glm::length2(_rot);
if (glm::abs(lengthSquared - 1.0f) > EPSILON) {
@ -40,29 +39,22 @@ glm::vec3 AnimPose::xformPoint(const glm::vec3& rhs) const {
return *this * rhs;
}
// really slow, but accurate for transforms with non-uniform scale
glm::vec3 AnimPose::xformVector(const glm::vec3& rhs) const {
glm::vec3 xAxis = _rot * glm::vec3(_scale.x, 0.0f, 0.0f);
glm::vec3 yAxis = _rot * glm::vec3(0.0f, _scale.y, 0.0f);
glm::vec3 zAxis = _rot * glm::vec3(0.0f, 0.0f, _scale.z);
glm::mat3 mat(xAxis, yAxis, zAxis);
glm::mat3 transInvMat = glm::inverse(glm::transpose(mat));
return transInvMat * rhs;
}
// faster, but does not handle non-uniform scale correctly.
glm::vec3 AnimPose::xformVectorFast(const glm::vec3& rhs) const {
return _rot * (_scale * rhs);
}
AnimPose AnimPose::operator*(const AnimPose& rhs) const {
glm::mat4 result;
glm_mat4u_mul(*this, rhs, result);
return AnimPose(result);
float scale = _scale * rhs._scale;
glm::quat rot = _rot * rhs._rot;
glm::vec3 trans = _trans + (_rot * (_scale * rhs._trans));
return AnimPose(scale, rot, trans);
}
AnimPose AnimPose::inverse() const {
return AnimPose(glm::inverse(static_cast<glm::mat4>(*this)));
float invScale = 1.0f / _scale;
glm::quat invRot = glm::inverse(_rot);
glm::vec3 invTrans = invScale * (invRot * -_trans);
return AnimPose(invScale, invRot, invTrans);
}
// mirror about x-axis without applying negative scale.
@ -71,11 +63,10 @@ AnimPose AnimPose::mirror() const {
}
AnimPose::operator glm::mat4() const {
glm::vec3 xAxis = _rot * glm::vec3(_scale.x, 0.0f, 0.0f);
glm::vec3 yAxis = _rot * glm::vec3(0.0f, _scale.y, 0.0f);
glm::vec3 zAxis = _rot * glm::vec3(0.0f, 0.0f, _scale.z);
return glm::mat4(glm::vec4(xAxis, 0.0f), glm::vec4(yAxis, 0.0f),
glm::vec4(zAxis, 0.0f), glm::vec4(_trans, 1.0f));
glm::vec3 xAxis = _rot * glm::vec3(_scale, 0.0f, 0.0f);
glm::vec3 yAxis = _rot * glm::vec3(0.0f, _scale, 0.0f);
glm::vec3 zAxis = _rot * glm::vec3(0.0f, 0.0f, _scale);
return glm::mat4(glm::vec4(xAxis, 0.0f), glm::vec4(yAxis, 0.0f), glm::vec4(zAxis, 0.0f), glm::vec4(_trans, 1.0f));
}
void AnimPose::blend(const AnimPose& srcPose, float alpha) {

View file

@ -21,14 +21,13 @@ class AnimPose {
public:
AnimPose() {}
explicit AnimPose(const glm::mat4& mat);
explicit AnimPose(const glm::quat& rotIn) : _scale(1.0f), _rot(rotIn), _trans(0.0f) {}
AnimPose(const glm::quat& rotIn, const glm::vec3& transIn) : _scale(1.0f), _rot(rotIn), _trans(transIn) {}
AnimPose(const glm::vec3& scaleIn, const glm::quat& rotIn, const glm::vec3& transIn) : _scale(scaleIn), _rot(rotIn), _trans(transIn) {}
explicit AnimPose(const glm::quat& rotIn) : _rot(rotIn), _trans(0.0f), _scale(1.0f) {}
AnimPose(const glm::quat& rotIn, const glm::vec3& transIn) : _rot(rotIn), _trans(transIn), _scale(1.0f) {}
AnimPose(float scaleIn, const glm::quat& rotIn, const glm::vec3& transIn) : _rot(rotIn), _trans(transIn), _scale(scaleIn) {}
static const AnimPose identity;
glm::vec3 xformPoint(const glm::vec3& rhs) const;
glm::vec3 xformVector(const glm::vec3& rhs) const; // really slow, but accurate for transforms with non-uniform scale
glm::vec3 xformVectorFast(const glm::vec3& rhs) const; // faster, but does not handle non-uniform scale correctly.
glm::vec3 operator*(const glm::vec3& rhs) const; // same as xformPoint
AnimPose operator*(const AnimPose& rhs) const;
@ -37,8 +36,8 @@ public:
AnimPose mirror() const;
operator glm::mat4() const;
const glm::vec3& scale() const { return _scale; }
glm::vec3& scale() { return _scale; }
float scale() const { return _scale; }
float& scale() { return _scale; }
const glm::quat& rot() const { return _rot; }
glm::quat& rot() { return _rot; }
@ -50,13 +49,13 @@ public:
private:
friend QDebug operator<<(QDebug debug, const AnimPose& pose);
glm::vec3 _scale { 1.0f };
glm::quat _rot;
glm::vec3 _trans;
float _scale { 1.0f }; // uniform scale only.
};
inline QDebug operator<<(QDebug debug, const AnimPose& pose) {
debug << "AnimPose, trans = (" << pose.trans().x << pose.trans().y << pose.trans().z << "), rot = (" << pose.rot().x << pose.rot().y << pose.rot().z << pose.rot().w << "), scale = (" << pose.scale().x << pose.scale().y << pose.scale().z << ")";
debug << "AnimPose, trans = (" << pose.trans().x << pose.trans().y << pose.trans().z << "), rot = (" << pose.rot().x << pose.rot().y << pose.rot().z << pose.rot().w << "), scale =" << pose.scale();
return debug;
}

View file

@ -76,7 +76,7 @@ int AnimSkeleton::getChainDepth(int jointIndex) const {
int index = jointIndex;
do {
chainDepth++;
index = _joints[index].parentIndex;
index = _parentIndices[index];
} while (index != -1);
return chainDepth;
} else {
@ -102,17 +102,12 @@ const AnimPose& AnimSkeleton::getPostRotationPose(int jointIndex) const {
return _relativePostRotationPoses[jointIndex];
}
int AnimSkeleton::getParentIndex(int jointIndex) const {
return _joints[jointIndex].parentIndex;
}
std::vector<int> AnimSkeleton::getChildrenOfJoint(int jointIndex) const {
// Children and grandchildren, etc.
std::vector<int> result;
if (jointIndex != -1) {
for (int i = jointIndex + 1; i < (int)_joints.size(); i++) {
if (_joints[i].parentIndex == jointIndex
|| (std::find(result.begin(), result.end(), _joints[i].parentIndex) != result.end())) {
for (int i = jointIndex + 1; i < (int)_parentIndices.size(); i++) {
if (_parentIndices[i] == jointIndex || (std::find(result.begin(), result.end(), _parentIndices[i]) != result.end())) {
result.push_back(i);
}
}
@ -128,7 +123,7 @@ AnimPose AnimSkeleton::getAbsolutePose(int jointIndex, const AnimPoseVec& relati
if (jointIndex < 0 || jointIndex >= (int)relativePoses.size() || jointIndex >= _jointsSize) {
return AnimPose::identity;
} else {
return getAbsolutePose(_joints[jointIndex].parentIndex, relativePoses) * relativePoses[jointIndex];
return getAbsolutePose(_parentIndices[jointIndex], relativePoses) * relativePoses[jointIndex];
}
}
@ -136,7 +131,7 @@ void AnimSkeleton::convertRelativePosesToAbsolute(AnimPoseVec& poses) const {
// poses start off relative and leave in absolute frame
int lastIndex = std::min((int)poses.size(), _jointsSize);
for (int i = 0; i < lastIndex; ++i) {
int parentIndex = _joints[i].parentIndex;
int parentIndex = _parentIndices[i];
if (parentIndex != -1) {
poses[i] = poses[parentIndex] * poses[i];
}
@ -147,7 +142,7 @@ void AnimSkeleton::convertAbsolutePosesToRelative(AnimPoseVec& poses) const {
// poses start off absolute and leave in relative frame
int lastIndex = std::min((int)poses.size(), _jointsSize);
for (int i = lastIndex - 1; i >= 0; --i) {
int parentIndex = _joints[i].parentIndex;
int parentIndex = _parentIndices[i];
if (parentIndex != -1) {
poses[i] = poses[parentIndex].inverse() * poses[i];
}
@ -158,7 +153,7 @@ void AnimSkeleton::convertAbsoluteRotationsToRelative(std::vector<glm::quat>& ro
// poses start off absolute and leave in relative frame
int lastIndex = std::min((int)rotations.size(), _jointsSize);
for (int i = lastIndex - 1; i >= 0; --i) {
int parentIndex = _joints[i].parentIndex;
int parentIndex = _parentIndices[i];
if (parentIndex != -1) {
rotations[i] = glm::inverse(rotations[parentIndex]) * rotations[i];
}
@ -197,6 +192,14 @@ void AnimSkeleton::mirrorAbsolutePoses(AnimPoseVec& poses) const {
void AnimSkeleton::buildSkeletonFromJoints(const std::vector<HFMJoint>& joints, const QMap<int, glm::quat> jointOffsets) {
_joints = joints;
// build a seperate vector of parentIndices for cache coherency
// AnimSkeleton::getParentIndex is called very frequently in tight loops.
_parentIndices.reserve(_joints.size());
for (auto& joint : _joints) {
_parentIndices.push_back(joint.parentIndex);
}
_jointsSize = (int)joints.size();
// build a cache of bind poses
@ -289,8 +292,6 @@ void AnimSkeleton::dump(bool verbose) const {
qCDebug(animation) << " relDefaultPose =" << getRelativeDefaultPose(i);
if (verbose) {
qCDebug(animation) << " hfmJoint =";
qCDebug(animation) << " isFree =" << _joints[i].isFree;
qCDebug(animation) << " freeLineage =" << _joints[i].freeLineage;
qCDebug(animation) << " parentIndex =" << _joints[i].parentIndex;
qCDebug(animation) << " translation =" << _joints[i].translation;
qCDebug(animation) << " preTransform =" << _joints[i].preTransform;

View file

@ -43,7 +43,10 @@ public:
// get post transform which might include FBX offset transformations
const AnimPose& getPostRotationPose(int jointIndex) const;
int getParentIndex(int jointIndex) const;
int getParentIndex(int jointIndex) const {
return _parentIndices[jointIndex];
}
std::vector<int> getChildrenOfJoint(int jointIndex) const;
AnimPose getAbsolutePose(int jointIndex, const AnimPoseVec& relativePoses) const;
@ -69,6 +72,7 @@ protected:
void buildSkeletonFromJoints(const std::vector<HFMJoint>& joints, const QMap<int, glm::quat> jointOffsets);
std::vector<HFMJoint> _joints;
std::vector<int> _parentIndices;
int _jointsSize { 0 };
AnimPoseVec _relativeDefaultPoses;
AnimPoseVec _absoluteDefaultPoses;

View file

@ -39,14 +39,13 @@ static int nextRigId = 1;
static std::map<int, Rig*> rigRegistry;
static std::mutex rigRegistryMutex;
static bool isEqual(const float p, float q) {
const float EPSILON = 0.00001f;
return fabsf(p - q) <= EPSILON;
}
static bool isEqual(const glm::vec3& u, const glm::vec3& v) {
const float EPSILON = 0.0001f;
float uLen = glm::length(u);
if (uLen == 0.0f) {
return glm::length(v) <= EPSILON;
} else {
return (glm::length(u - v) / uLen) <= EPSILON;
}
return isEqual(u.x, v.x) && isEqual(u.y, v.y) && isEqual(u.z, v.z);
}
static bool isEqual(const glm::quat& p, const glm::quat& q) {
@ -494,10 +493,8 @@ std::shared_ptr<AnimInverseKinematics> Rig::getAnimInverseKinematicsNode() const
std::shared_ptr<AnimInverseKinematics> result;
if (_animNode) {
_animNode->traverse([&](AnimNode::Pointer node) {
// only report clip nodes as valid roles.
auto ikNode = std::dynamic_pointer_cast<AnimInverseKinematics>(node);
if (ikNode) {
result = ikNode;
if (node->getType() == AnimNodeType::InverseKinematics) {
result = std::dynamic_pointer_cast<AnimInverseKinematics>(node);
return false;
} else {
return true;
@ -1329,7 +1326,7 @@ static bool findPointKDopDisplacement(const glm::vec3& point, const AnimPose& sh
}
if (slabCount == (DOP14_COUNT / 2) && minDisplacementLen != FLT_MAX) {
// we are within the k-dop so push the point along the minimum displacement found
displacementOut = shapePose.xformVectorFast(minDisplacement);
displacementOut = shapePose.xformVector(minDisplacement);
return true;
} else {
// point is outside of kdop
@ -1338,7 +1335,7 @@ static bool findPointKDopDisplacement(const glm::vec3& point, const AnimPose& sh
} else {
// point is directly on top of shapeInfo.avgPoint.
// push the point out along the x axis.
displacementOut = shapePose.xformVectorFast(shapeInfo.points[0]);
displacementOut = shapePose.xformVector(shapeInfo.points[0]);
return true;
}
}

View file

@ -170,6 +170,57 @@ static void channelDownmix(int16_t* source, int16_t* dest, int numSamples) {
}
}
static float computeLoudness(int16_t* samples, int numSamples, int numChannels, bool& isClipping) {
const int32_t CLIPPING_THRESHOLD = 32392; // -0.1 dBFS
const int32_t CLIPPING_DETECTION = 3; // consecutive samples over threshold
float scale = numSamples ? 1.0f / (numSamples * 32768.0f) : 0.0f;
int32_t loudness = 0;
isClipping = false;
if (numChannels == 2) {
int32_t oversLeft = 0;
int32_t oversRight = 0;
for (int i = 0; i < numSamples/2; i++) {
int32_t left = std::abs((int32_t)samples[2*i+0]);
int32_t right = std::abs((int32_t)samples[2*i+1]);
loudness += left;
loudness += right;
if (left > CLIPPING_THRESHOLD) {
isClipping |= (++oversLeft >= CLIPPING_DETECTION);
} else {
oversLeft = 0;
}
if (right > CLIPPING_THRESHOLD) {
isClipping |= (++oversRight >= CLIPPING_DETECTION);
} else {
oversRight = 0;
}
}
} else {
int32_t overs = 0;
for (int i = 0; i < numSamples; i++) {
int32_t sample = std::abs((int32_t)samples[i]);
loudness += sample;
if (sample > CLIPPING_THRESHOLD) {
isClipping |= (++overs >= CLIPPING_DETECTION);
} else {
overs = 0;
}
}
}
return (float)loudness * scale;
}
static inline float convertToFloat(int16_t sample) {
return (float)sample * (1 / 32768.0f);
}
@ -1075,45 +1126,25 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
void AudioClient::handleAudioInput(QByteArray& audioBuffer) {
if (!_audioPaused) {
if (_muted) {
_lastInputLoudness = 0.0f;
_timeSinceLastClip = 0.0f;
} else {
bool audioGateOpen = false;
if (!_muted) {
int16_t* samples = reinterpret_cast<int16_t*>(audioBuffer.data());
int numSamples = audioBuffer.size() / AudioConstants::SAMPLE_SIZE;
int numFrames = numSamples / (_isStereoInput ? AudioConstants::STEREO : AudioConstants::MONO);
if (_isNoiseGateEnabled) {
// The audio gate includes DC removal
_audioGate->render(samples, samples, numFrames);
audioGateOpen = _audioGate->render(samples, samples, numFrames);
} else {
_audioGate->removeDC(samples, samples, numFrames);
}
int32_t loudness = 0;
assert(numSamples < 65536); // int32_t loudness cannot overflow
bool didClip = false;
for (int i = 0; i < numSamples; ++i) {
const int32_t CLIPPING_THRESHOLD = (int32_t)(AudioConstants::MAX_SAMPLE_VALUE * 0.9f);
int32_t sample = std::abs((int32_t)samples[i]);
loudness += sample;
didClip |= (sample > CLIPPING_THRESHOLD);
}
_lastInputLoudness = (float)loudness / numSamples;
if (didClip) {
_timeSinceLastClip = 0.0f;
} else if (_timeSinceLastClip >= 0.0f) {
_timeSinceLastClip += (float)numSamples / (float)AudioConstants::SAMPLE_RATE;
audioGateOpen = _audioGate->removeDC(samples, samples, numFrames);
}
emit inputReceived(audioBuffer);
}
emit inputLoudnessChanged(_lastInputLoudness);
// state machine to detect gate opening and closing
bool audioGateOpen = (_lastInputLoudness != 0.0f);
// detect gate opening and closing
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
_audioGateOpen = audioGateOpen;
@ -1186,10 +1217,27 @@ void AudioClient::handleMicAudioInput() {
static int16_t networkAudioSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
while (_inputRingBuffer.samplesAvailable() >= inputSamplesRequired) {
if (_muted) {
_inputRingBuffer.shiftReadPosition(inputSamplesRequired);
} else {
_inputRingBuffer.readSamples(inputAudioSamples.get(), inputSamplesRequired);
_inputRingBuffer.readSamples(inputAudioSamples.get(), inputSamplesRequired);
// detect loudness and clipping on the raw input
bool isClipping = false;
float inputLoudness = computeLoudness(inputAudioSamples.get(), inputSamplesRequired, _inputFormat.channelCount(), isClipping);
float tc = (inputLoudness > _lastInputLoudness) ? 0.378f : 0.967f; // 10ms attack, 300ms release @ 100Hz
inputLoudness += tc * (_lastInputLoudness - inputLoudness);
_lastInputLoudness = inputLoudness;
if (isClipping) {
_timeSinceLastClip = 0.0f;
} else if (_timeSinceLastClip >= 0.0f) {
_timeSinceLastClip += AudioConstants::NETWORK_FRAME_SECS;
}
isClipping = (_timeSinceLastClip >= 0.0f) && (_timeSinceLastClip < 2.0f); // 2 second hold time
emit inputLoudnessChanged(_lastInputLoudness, isClipping);
if (!_muted) {
possibleResampling(_inputToNetworkResampler,
inputAudioSamples.get(), networkAudioSamples,
inputSamplesRequired, numNetworkSamples,

View file

@ -248,7 +248,7 @@ signals:
void noiseReductionChanged(bool noiseReductionEnabled);
void mutedByMixer();
void inputReceived(const QByteArray& inputSamples);
void inputLoudnessChanged(float loudness);
void inputLoudnessChanged(float loudness, bool isClipping);
void outputBytesToNetwork(int numBytes);
void inputBytesFromNetwork(int numBytes);
void noiseGateOpened();

View file

@ -138,8 +138,8 @@ public:
int32_t hysteresis(int32_t peak);
int32_t envelope(int32_t attn);
virtual void process(int16_t* input, int16_t* output, int numFrames) = 0;
virtual void removeDC(int16_t* input, int16_t* output, int numFrames) = 0;
virtual bool process(int16_t* input, int16_t* output, int numFrames) = 0;
virtual bool removeDC(int16_t* input, int16_t* output, int numFrames) = 0;
};
GateImpl::GateImpl(int sampleRate) {
@ -403,14 +403,15 @@ public:
GateMono(int sampleRate) : GateImpl(sampleRate) {}
// mono input/output (in-place is allowed)
void process(int16_t* input, int16_t* output, int numFrames) override;
void removeDC(int16_t* input, int16_t* output, int numFrames) override;
bool process(int16_t* input, int16_t* output, int numFrames) override;
bool removeDC(int16_t* input, int16_t* output, int numFrames) override;
};
template<int N>
void GateMono<N>::process(int16_t* input, int16_t* output, int numFrames) {
bool GateMono<N>::process(int16_t* input, int16_t* output, int numFrames) {
clearHistogram();
int32_t mask = 0;
for (int n = 0; n < numFrames; n++) {
@ -453,15 +454,21 @@ void GateMono<N>::process(int16_t* input, int16_t* output, int numFrames) {
x = MULQ31(x, attn);
// store 16-bit output
output[n] = (int16_t)saturateQ30(x);
x = saturateQ30(x);
output[n] = (int16_t)x;
mask |= x;
}
// update adaptive threshold
processHistogram(numFrames);
return mask != 0;
}
template<int N>
void GateMono<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
bool GateMono<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
int32_t mask = 0;
for (int n = 0; n < numFrames; n++) {
@ -471,8 +478,13 @@ void GateMono<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
_dc.process(x);
// store 16-bit output
output[n] = (int16_t)saturateQ30(x);
x = saturateQ30(x);
output[n] = (int16_t)x;
mask |= x;
}
return mask != 0;
}
//
@ -489,14 +501,15 @@ public:
GateStereo(int sampleRate) : GateImpl(sampleRate) {}
// interleaved stereo input/output (in-place is allowed)
void process(int16_t* input, int16_t* output, int numFrames) override;
void removeDC(int16_t* input, int16_t* output, int numFrames) override;
bool process(int16_t* input, int16_t* output, int numFrames) override;
bool removeDC(int16_t* input, int16_t* output, int numFrames) override;
};
template<int N>
void GateStereo<N>::process(int16_t* input, int16_t* output, int numFrames) {
bool GateStereo<N>::process(int16_t* input, int16_t* output, int numFrames) {
clearHistogram();
int32_t mask = 0;
for (int n = 0; n < numFrames; n++) {
@ -541,16 +554,23 @@ void GateStereo<N>::process(int16_t* input, int16_t* output, int numFrames) {
x1 = MULQ31(x1, attn);
// store 16-bit output
output[2*n+0] = (int16_t)saturateQ30(x0);
output[2*n+1] = (int16_t)saturateQ30(x1);
x0 = saturateQ30(x0);
x1 = saturateQ30(x1);
output[2*n+0] = (int16_t)x0;
output[2*n+1] = (int16_t)x1;
mask |= (x0 | x1);
}
// update adaptive threshold
processHistogram(numFrames);
return mask != 0;
}
template<int N>
void GateStereo<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
bool GateStereo<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
int32_t mask = 0;
for (int n = 0; n < numFrames; n++) {
@ -561,9 +581,15 @@ void GateStereo<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
_dc.process(x0, x1);
// store 16-bit output
output[2*n+0] = (int16_t)saturateQ30(x0);
output[2*n+1] = (int16_t)saturateQ30(x1);
x0 = saturateQ30(x0);
x1 = saturateQ30(x1);
output[2*n+0] = (int16_t)x0;
output[2*n+1] = (int16_t)x1;
mask |= (x0 | x1);
}
return mask != 0;
}
//
@ -580,14 +606,15 @@ public:
GateQuad(int sampleRate) : GateImpl(sampleRate) {}
// interleaved quad input/output (in-place is allowed)
void process(int16_t* input, int16_t* output, int numFrames) override;
void removeDC(int16_t* input, int16_t* output, int numFrames) override;
bool process(int16_t* input, int16_t* output, int numFrames) override;
bool removeDC(int16_t* input, int16_t* output, int numFrames) override;
};
template<int N>
void GateQuad<N>::process(int16_t* input, int16_t* output, int numFrames) {
bool GateQuad<N>::process(int16_t* input, int16_t* output, int numFrames) {
clearHistogram();
int32_t mask = 0;
for (int n = 0; n < numFrames; n++) {
@ -636,18 +663,27 @@ void GateQuad<N>::process(int16_t* input, int16_t* output, int numFrames) {
x3 = MULQ31(x3, attn);
// store 16-bit output
output[4*n+0] = (int16_t)saturateQ30(x0);
output[4*n+1] = (int16_t)saturateQ30(x1);
output[4*n+2] = (int16_t)saturateQ30(x2);
output[4*n+3] = (int16_t)saturateQ30(x3);
x0 = saturateQ30(x0);
x1 = saturateQ30(x1);
x2 = saturateQ30(x2);
x3 = saturateQ30(x3);
output[4*n+0] = (int16_t)x0;
output[4*n+1] = (int16_t)x1;
output[4*n+2] = (int16_t)x2;
output[4*n+3] = (int16_t)x3;
mask |= (x0 | x1 | x2 | x3);
}
// update adaptive threshold
processHistogram(numFrames);
return mask != 0;
}
template<int N>
void GateQuad<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
bool GateQuad<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
int32_t mask = 0;
for (int n = 0; n < numFrames; n++) {
@ -660,11 +696,19 @@ void GateQuad<N>::removeDC(int16_t* input, int16_t* output, int numFrames) {
_dc.process(x0, x1, x2, x3);
// store 16-bit output
output[4*n+0] = (int16_t)saturateQ30(x0);
output[4*n+1] = (int16_t)saturateQ30(x1);
output[4*n+2] = (int16_t)saturateQ30(x2);
output[4*n+3] = (int16_t)saturateQ30(x3);
x0 = saturateQ30(x0);
x1 = saturateQ30(x1);
x2 = saturateQ30(x2);
x3 = saturateQ30(x3);
output[4*n+0] = (int16_t)x0;
output[4*n+1] = (int16_t)x1;
output[4*n+2] = (int16_t)x2;
output[4*n+3] = (int16_t)x3;
mask |= (x0 | x1 | x2 | x3);
}
return mask != 0;
}
//
@ -721,12 +765,12 @@ AudioGate::~AudioGate() {
delete _impl;
}
void AudioGate::render(int16_t* input, int16_t* output, int numFrames) {
_impl->process(input, output, numFrames);
bool AudioGate::render(int16_t* input, int16_t* output, int numFrames) {
return _impl->process(input, output, numFrames);
}
void AudioGate::removeDC(int16_t* input, int16_t* output, int numFrames) {
_impl->removeDC(input, output, numFrames);
bool AudioGate::removeDC(int16_t* input, int16_t* output, int numFrames) {
return _impl->removeDC(input, output, numFrames);
}
void AudioGate::setThreshold(float threshold) {

View file

@ -18,9 +18,12 @@ public:
AudioGate(int sampleRate, int numChannels);
~AudioGate();
// interleaved int16_t input/output (in-place is allowed)
void render(int16_t* input, int16_t* output, int numFrames);
void removeDC(int16_t* input, int16_t* output, int numFrames);
//
// Process interleaved int16_t input/output (in-place is allowed).
// Returns true when output is non-zero.
//
bool render(int16_t* input, int16_t* output, int numFrames);
bool removeDC(int16_t* input, int16_t* output, int numFrames);
void setThreshold(float threshold);
void setRelease(float release);

View file

@ -1736,15 +1736,17 @@ void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
void Avatar::computeDetailedShapeInfo(ShapeInfo& shapeInfo, int jointIndex) {
if (jointIndex > -1 && jointIndex < (int)_multiSphereShapes.size()) {
auto& data = _multiSphereShapes[jointIndex].getSpheresData();
std::vector<glm::vec3> positions;
std::vector<btScalar> radiuses;
positions.reserve(data.size());
radiuses.reserve(data.size());
for (auto& sphere : data) {
positions.push_back(sphere._position);
radiuses.push_back(sphere._radius);
if (data.size() > 0) {
std::vector<glm::vec3> positions;
std::vector<btScalar> radiuses;
positions.reserve(data.size());
radiuses.reserve(data.size());
for (auto& sphere : data) {
positions.push_back(sphere._position);
radiuses.push_back(sphere._radius);
}
shapeInfo.setMultiSphere(positions, radiuses);
}
shapeInfo.setMultiSphere(positions, radiuses);
}
}
@ -1975,12 +1977,12 @@ float Avatar::getUnscaledEyeHeightFromSkeleton() const {
auto& rig = _skeletonModel->getRig();
// Normally the model offset transform will contain the avatar scale factor, we explicitly remove it here.
AnimPose modelOffsetWithoutAvatarScale(glm::vec3(1.0f), rig.getModelOffsetPose().rot(), rig.getModelOffsetPose().trans());
AnimPose modelOffsetWithoutAvatarScale(1.0f, rig.getModelOffsetPose().rot(), rig.getModelOffsetPose().trans());
AnimPose geomToRigWithoutAvatarScale = modelOffsetWithoutAvatarScale * rig.getGeometryOffsetPose();
// This factor can be used to scale distances in the geometry frame into the unscaled rig frame.
// Typically it will be the unit conversion from cm to m.
float scaleFactor = geomToRigWithoutAvatarScale.scale().x; // in practice this always a uniform scale factor.
float scaleFactor = geomToRigWithoutAvatarScale.scale();
int headTopJoint = rig.indexOfJoint("HeadTop_End");
int headJoint = rig.indexOfJoint("Head");

View file

@ -249,14 +249,6 @@ bool SkeletonModel::getRightHandPosition(glm::vec3& position) const {
return getJointPositionInWorldFrame(getRightHandJointIndex(), position);
}
bool SkeletonModel::getLeftShoulderPosition(glm::vec3& position) const {
return getJointPositionInWorldFrame(getLastFreeJointIndex(getLeftHandJointIndex()), position);
}
bool SkeletonModel::getRightShoulderPosition(glm::vec3& position) const {
return getJointPositionInWorldFrame(getLastFreeJointIndex(getRightHandJointIndex()), position);
}
bool SkeletonModel::getHeadPosition(glm::vec3& headPosition) const {
return isActive() && getJointPositionInWorldFrame(_rig.indexOfJoint("Head"), headPosition);
}

View file

@ -57,17 +57,9 @@ public:
/// \return true whether or not the position was found
bool getRightHandPosition(glm::vec3& position) const;
/// Gets the position of the left shoulder.
/// \return whether or not the left shoulder joint was found
bool getLeftShoulderPosition(glm::vec3& position) const;
/// Returns the extended length from the left hand to its last free ancestor.
float getLeftArmLength() const;
/// Gets the position of the right shoulder.
/// \return whether or not the right shoulder joint was found
bool getRightShoulderPosition(glm::vec3& position) const;
/// Returns the position of the head joint.
/// \return whether or not the head was found
bool getHeadPosition(glm::vec3& headPosition) const;

View file

@ -107,11 +107,10 @@ int ClientTraitsHandler::sendChangedTraitsToMixer() {
if (initialSend || *simpleIt == Updated) {
if (traitType == AvatarTraits::SkeletonModelURL) {
bytesWritten += _owningAvatar->packTrait(traitType, *traitsPacketList);
// keep track of our skeleton version in case we get an override back
_currentSkeletonVersion = _currentTraitVersion;
}
bytesWritten += _owningAvatar->packTrait(traitType, *traitsPacketList);
}
++simpleIt;

View file

@ -1289,6 +1289,17 @@ CalculateEntityLoadingPriority EntityTreeRenderer::_calculateEntityLoadingPriori
return 0.0f;
};
std::pair<bool, bool> EntityTreeRenderer::getZoneInteractionProperties() {
for (auto& zone : _layeredZones) {
// Only domain entities control flying allowed and ghosting allowed
if (zone.zone && zone.zone->isDomainEntity()) {
return { zone.zone->getFlyingAllowed(), zone.zone->getGhostingAllowed() };
}
}
return { true, true };
}
bool EntityTreeRenderer::wantsKeyboardFocus(const EntityItemID& id) const {
auto renderable = renderableForEntityId(id);
if (!renderable) {

View file

@ -105,7 +105,7 @@ public:
// For Scene.shouldRenderEntities
QList<EntityItemID>& getEntitiesLastInScene() { return _entityIDsLastInScene; }
std::shared_ptr<ZoneEntityItem> myAvatarZone() { return _layeredZones.getZone(); }
std::pair<bool, bool> getZoneInteractionProperties();
bool wantsKeyboardFocus(const EntityItemID& id) const;
QObject* getEventHandler(const EntityItemID& id);

View file

@ -959,23 +959,6 @@ QStringList RenderableModelEntityItem::getJointNames() const {
return result;
}
void RenderableModelEntityItem::setAnimationURL(const QString& url) {
QString oldURL = getAnimationURL();
ModelEntityItem::setAnimationURL(url);
if (oldURL != getAnimationURL()) {
_needsAnimationReset = true;
}
}
bool RenderableModelEntityItem::needsAnimationReset() const {
return _needsAnimationReset;
}
QString RenderableModelEntityItem::getAnimationURLAndReset() {
_needsAnimationReset = false;
return getAnimationURL();
}
scriptable::ScriptableModelBase render::entities::ModelEntityRenderer::getScriptableModel() {
auto model = resultWithReadLock<ModelPointer>([this]{ return _model; });
@ -1264,7 +1247,7 @@ bool ModelEntityRenderer::needsRenderUpdateFromTypedEntity(const TypedEntityPoin
return false;
}
if (_lastTextures != entity->getTextures()) {
if (_textures != entity->getTextures()) {
return true;
}
@ -1418,15 +1401,14 @@ void ModelEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& sce
entity->_originalTexturesRead = true;
}
if (_lastTextures != entity->getTextures()) {
if (_textures != entity->getTextures()) {
QVariantMap newTextures;
withWriteLock([&] {
_texturesLoaded = false;
_lastTextures = entity->getTextures();
_textures = entity->getTextures();
newTextures = parseTexturesToMap(_textures, entity->_originalTextures);
});
auto newTextures = parseTexturesToMap(_lastTextures, entity->_originalTextures);
if (newTextures != model->getTextures()) {
model->setTextures(newTextures);
}
model->setTextures(newTextures);
}
if (entity->_needsJointSimulation) {
entity->copyAnimationJointDataToModel();
@ -1494,11 +1476,17 @@ void ModelEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& sce
if (_animating) {
DETAILED_PROFILE_RANGE(simulation_physics, "Animate");
if (_animation && entity->needsAnimationReset()) {
//(_animation->getURL().toString() != entity->getAnimationURL())) { // bad check
// the joints have been mapped before but we have a new animation to load
_animation.reset();
_jointMappingCompleted = false;
auto animationURL = entity->getAnimationURL();
bool animationChanged = _animationURL != animationURL;
if (animationChanged) {
_animationURL = animationURL;
if (_animation) {
//(_animation->getURL().toString() != entity->getAnimationURL())) { // bad check
// the joints have been mapped before but we have a new animation to load
_animation.reset();
_jointMappingCompleted = false;
}
}
if (!_jointMappingCompleted) {
@ -1563,7 +1551,7 @@ void ModelEntityRenderer::mapJoints(const TypedEntityPointer& entity, const Mode
}
if (!_animation) {
_animation = DependencyManager::get<AnimationCache>()->getAnimation(entity->getAnimationURLAndReset());
_animation = DependencyManager::get<AnimationCache>()->getAnimation(_animationURL);
}
if (_animation && _animation->isLoaded()) {

View file

@ -113,10 +113,6 @@ public:
virtual int getJointIndex(const QString& name) const override;
virtual QStringList getJointNames() const override;
void setAnimationURL(const QString& url) override;
bool needsAnimationReset() const;
QString getAnimationURLAndReset();
private:
bool needsUpdateModelBounds() const;
void autoResizeJointArrays();
@ -131,7 +127,6 @@ private:
bool _originalTexturesRead { false };
bool _dimensionsInitialized { true };
bool _needsJointSimulation { false };
bool _needsAnimationReset { false };
};
namespace render { namespace entities {
@ -181,7 +176,7 @@ private:
bool _hasModel { false };
ModelPointer _model;
QString _lastTextures;
QString _textures;
bool _texturesLoaded { false };
int _lastKnownCurrentFrame { -1 };
#ifdef MODEL_ENTITY_USE_FADE_EFFECT
@ -190,12 +185,12 @@ private:
const void* _collisionMeshKey { nullptr };
// used on client side
QUrl _parsedModelURL;
bool _jointMappingCompleted { false };
QVector<int> _jointMapping; // domain is index into model-joints, range is index into animation-joints
AnimationPointer _animation;
QUrl _parsedModelURL;
bool _animating { false };
QString _animationURL;
uint64_t _lastAnimated { 0 };
render::ItemKey _itemKey { render::ItemKey::Builder().withTypeMeta() };

View file

@ -198,24 +198,33 @@ void ZoneEntityRenderer::removeFromScene(const ScenePointer& scene, Transaction&
void ZoneEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) {
DependencyManager::get<EntityTreeRenderer>()->updateZone(entity->getID());
auto position = entity->getWorldPosition();
auto rotation = entity->getWorldOrientation();
auto dimensions = entity->getScaledDimensions();
bool rotationChanged = rotation != _lastRotation;
bool transformChanged = rotationChanged || position != _lastPosition || dimensions != _lastDimensions;
auto proceduralUserData = entity->getUserData();
bool proceduralUserDataChanged = _proceduralUserData != proceduralUserData;
// FIXME one of the bools here could become true between being fetched and being reset,
// resulting in a lost update
bool keyLightChanged = entity->keyLightPropertiesChanged();
bool ambientLightChanged = entity->ambientLightPropertiesChanged();
bool skyboxChanged = entity->skyboxPropertiesChanged();
bool keyLightChanged = entity->keyLightPropertiesChanged() || rotationChanged;
bool ambientLightChanged = entity->ambientLightPropertiesChanged() || transformChanged;
bool skyboxChanged = entity->skyboxPropertiesChanged() || proceduralUserDataChanged;
bool hazeChanged = entity->hazePropertiesChanged();
bool bloomChanged = entity->bloomPropertiesChanged();
entity->resetRenderingPropertiesChanged();
_lastPosition = entity->getWorldPosition();
_lastRotation = entity->getWorldOrientation();
_lastDimensions = entity->getScaledDimensions();
_keyLightProperties = entity->getKeyLightProperties();
_ambientLightProperties = entity->getAmbientLightProperties();
_skyboxProperties = entity->getSkyboxProperties();
_hazeProperties = entity->getHazeProperties();
_bloomProperties = entity->getBloomProperties();
if (transformChanged) {
_lastPosition = entity->getWorldPosition();
_lastRotation = entity->getWorldOrientation();
_lastDimensions = entity->getScaledDimensions();
}
if (proceduralUserDataChanged) {
_proceduralUserData = entity->getUserData();
}
#if 0
if (_lastShapeURL != _typedEntity->getCompoundShapeURL()) {
@ -239,21 +248,29 @@ void ZoneEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scen
updateKeyZoneItemFromEntity(entity);
if (keyLightChanged) {
_keyLightProperties = entity->getKeyLightProperties();
updateKeySunFromEntity(entity);
}
if (ambientLightChanged) {
_ambientLightProperties = entity->getAmbientLightProperties();
updateAmbientLightFromEntity(entity);
}
if (skyboxChanged || _proceduralUserData != entity->getUserData()) {
if (skyboxChanged) {
_skyboxProperties = entity->getSkyboxProperties();
updateKeyBackgroundFromEntity(entity);
}
if (hazeChanged) {
_hazeProperties = entity->getHazeProperties();
updateHazeFromEntity(entity);
}
if (bloomChanged) {
_bloomProperties = entity->getBloomProperties();
updateBloomFromEntity(entity);
}
bool visuallyReady = true;
uint32_t skyboxMode = entity->getSkyboxMode();
@ -264,10 +281,6 @@ void ZoneEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scen
}
entity->setVisuallyReady(visuallyReady);
if (bloomChanged) {
updateBloomFromEntity(entity);
}
}
void ZoneEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) {
@ -344,7 +357,7 @@ void ZoneEntityRenderer::updateKeySunFromEntity(const TypedEntityPointer& entity
// Set the keylight
sunLight->setColor(ColorUtils::toVec3(_keyLightProperties.getColor()));
sunLight->setIntensity(_keyLightProperties.getIntensity());
sunLight->setDirection(entity->getTransform().getRotation() * _keyLightProperties.getDirection());
sunLight->setDirection(_lastRotation * _keyLightProperties.getDirection());
sunLight->setCastShadows(_keyLightProperties.getCastShadows());
}
@ -356,7 +369,6 @@ void ZoneEntityRenderer::updateAmbientLightFromEntity(const TypedEntityPointer&
ambientLight->setPosition(_lastPosition);
ambientLight->setOrientation(_lastRotation);
// Set the ambient light
ambientLight->setAmbientIntensity(_ambientLightProperties.getAmbientIntensity());
@ -395,8 +407,6 @@ void ZoneEntityRenderer::updateHazeFromEntity(const TypedEntityPointer& entity)
haze->setHazeAttenuateKeyLight(_hazeProperties.getHazeAttenuateKeyLight());
haze->setHazeKeyLightRangeFactor(graphics::Haze::convertHazeRangeToHazeRangeFactor(_hazeProperties.getHazeKeyLightRange()));
haze->setHazeKeyLightAltitudeFactor(graphics::Haze::convertHazeAltitudeToHazeAltitudeFactor(_hazeProperties.getHazeKeyLightAltitude()));
haze->setTransform(entity->getTransform().getMatrix());
}
void ZoneEntityRenderer::updateBloomFromEntity(const TypedEntityPointer& entity) {
@ -414,13 +424,13 @@ void ZoneEntityRenderer::updateKeyBackgroundFromEntity(const TypedEntityPointer&
editBackground();
setSkyboxColor(toGlm(_skyboxProperties.getColor()));
setProceduralUserData(entity->getUserData());
setProceduralUserData(_proceduralUserData);
setSkyboxURL(_skyboxProperties.getURL());
}
void ZoneEntityRenderer::updateKeyZoneItemFromEntity(const TypedEntityPointer& entity) {
// Update rotation values
editSkybox()->setOrientation(entity->getTransform().getRotation());
editSkybox()->setOrientation(_lastRotation);
/* TODO: Implement the sun model behavior / Keep this code here for reference, this is how we
{
@ -540,9 +550,6 @@ void ZoneEntityRenderer::setSkyboxColor(const glm::vec3& color) {
}
void ZoneEntityRenderer::setProceduralUserData(const QString& userData) {
if (_proceduralUserData != userData) {
_proceduralUserData = userData;
std::dynamic_pointer_cast<ProceduralSkybox>(editSkybox())->parse(_proceduralUserData);
}
std::dynamic_pointer_cast<ProceduralSkybox>(editSkybox())->parse(userData);
}

View file

@ -1414,8 +1414,9 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
* @property {Entities.Bloom} bloom - The bloom properties of the zone.
*
* @property {boolean} flyingAllowed=true - If <code>true</code> then visitors can fly in the zone; otherwise they cannot.
* Only works on domain entities.
* @property {boolean} ghostingAllowed=true - If <code>true</code> then visitors with avatar collisions turned off will not
* collide with content in the zone; otherwise visitors will always collide with content in the zone.
* collide with content in the zone; otherwise visitors will always collide with content in the zone. Only works on domain entities.
* @property {string} filterURL="" - The URL of a JavaScript file that filters changes to properties of entities within the
* zone. It is periodically executed for each entity in the zone. It can, for example, be used to not allow changes to

View file

@ -43,14 +43,15 @@ ModelEntityItem::ModelEntityItem(const EntityItemID& entityItemID) : EntityItem(
}
const QString ModelEntityItem::getTextures() const {
QReadLocker locker(&_texturesLock);
auto textures = _textures;
return textures;
return resultWithReadLock<QString>([&] {
return _textures;
});
}
void ModelEntityItem::setTextures(const QString& textures) {
QWriteLocker locker(&_texturesLock);
_textures = textures;
withWriteLock([&] {
_textures = textures;
});
}
EntityItemProperties ModelEntityItem::getProperties(const EntityPropertyFlags& desiredProperties, bool allowEmptyDesiredProperties) const {

View file

@ -163,7 +163,6 @@ protected:
AnimationPropertyGroup _animationProperties;
mutable QReadWriteLock _texturesLock;
QString _textures;
ShapeType _shapeType = SHAPE_TYPE_NONE;

View file

@ -119,7 +119,7 @@ bool ZoneEntityItem::setSubClassProperties(const EntityItemProperties& propertie
SET_ENTITY_PROPERTY_FROM_PROPERTIES(bloomMode, setBloomMode);
somethingChanged = somethingChanged || _keyLightPropertiesChanged || _ambientLightPropertiesChanged ||
_stagePropertiesChanged || _skyboxPropertiesChanged || _hazePropertiesChanged || _bloomPropertiesChanged;
_skyboxPropertiesChanged || _hazePropertiesChanged || _bloomPropertiesChanged;
return somethingChanged;
}
@ -394,7 +394,6 @@ void ZoneEntityItem::resetRenderingPropertiesChanged() {
_skyboxPropertiesChanged = false;
_hazePropertiesChanged = false;
_bloomPropertiesChanged = false;
_stagePropertiesChanged = false;
});
}

View file

@ -102,8 +102,6 @@ public:
bool hazePropertiesChanged() const { return _hazePropertiesChanged; }
bool bloomPropertiesChanged() const { return _bloomPropertiesChanged; }
bool stagePropertiesChanged() const { return _stagePropertiesChanged; }
void resetRenderingPropertiesChanged();
virtual bool supportsDetailedIntersection() const override { return true; }
@ -155,7 +153,6 @@ protected:
bool _skyboxPropertiesChanged { false };
bool _hazePropertiesChanged{ false };
bool _bloomPropertiesChanged { false };
bool _stagePropertiesChanged { false };
static bool _drawZoneBoundaries;
static bool _zonesArePickable;

View file

@ -384,43 +384,6 @@ QByteArray fileOnUrl(const QByteArray& filepath, const QString& url) {
return filepath.mid(filepath.lastIndexOf('/') + 1);
}
QMap<QString, QString> getJointNameMapping(const QVariantHash& mapping) {
static const QString JOINT_NAME_MAPPING_FIELD = "jointMap";
QMap<QString, QString> hfmToHifiJointNameMap;
if (!mapping.isEmpty() && mapping.contains(JOINT_NAME_MAPPING_FIELD) && mapping[JOINT_NAME_MAPPING_FIELD].type() == QVariant::Hash) {
auto jointNames = mapping[JOINT_NAME_MAPPING_FIELD].toHash();
for (auto itr = jointNames.begin(); itr != jointNames.end(); itr++) {
hfmToHifiJointNameMap.insert(itr.key(), itr.value().toString());
qCDebug(modelformat) << "the mapped key " << itr.key() << " has a value of " << hfmToHifiJointNameMap[itr.key()];
}
}
return hfmToHifiJointNameMap;
}
QMap<QString, glm::quat> getJointRotationOffsets(const QVariantHash& mapping) {
QMap<QString, glm::quat> jointRotationOffsets;
static const QString JOINT_ROTATION_OFFSET_FIELD = "jointRotationOffset";
if (!mapping.isEmpty() && mapping.contains(JOINT_ROTATION_OFFSET_FIELD) && mapping[JOINT_ROTATION_OFFSET_FIELD].type() == QVariant::Hash) {
auto offsets = mapping[JOINT_ROTATION_OFFSET_FIELD].toHash();
for (auto itr = offsets.begin(); itr != offsets.end(); itr++) {
QString jointName = itr.key();
QString line = itr.value().toString();
auto quatCoords = line.split(',');
if (quatCoords.size() == 4) {
float quatX = quatCoords[0].mid(1).toFloat();
float quatY = quatCoords[1].toFloat();
float quatZ = quatCoords[2].toFloat();
float quatW = quatCoords[3].mid(0, quatCoords[3].size() - 1).toFloat();
if (!isNaN(quatX) && !isNaN(quatY) && !isNaN(quatZ) && !isNaN(quatW)) {
glm::quat rotationOffset = glm::quat(quatW, quatX, quatY, quatZ);
jointRotationOffsets.insert(jointName, rotationOffset);
}
}
}
}
return jointRotationOffsets;
}
HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QString& url) {
const FBXNode& node = _rootNode;
QMap<QString, ExtractedMesh> meshes;
@ -444,8 +407,6 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
std::map<QString, HFMLight> lights;
QVariantHash joints = mapping.value("joint").toHash();
QVariantHash blendshapeMappings = mapping.value("bs").toHash();
QMultiHash<QByteArray, WeightedIndex> blendshapeIndices;
@ -473,8 +434,6 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
HFMModel& hfmModel = *hfmModelPtr;
hfmModel.originalURL = url;
hfmModel.hfmToHifiJointNameMapping.clear();
hfmModel.hfmToHifiJointNameMapping = getJointNameMapping(mapping);
float unitScaleFactor = 1.0f;
glm::vec3 ambientColor;
@ -1287,26 +1246,14 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
}
// convert the models to joints
QVariantList freeJoints = mapping.values("freeJoint");
hfmModel.hasSkeletonJoints = false;
foreach (const QString& modelID, modelIDs) {
const FBXModel& fbxModel = fbxModels[modelID];
HFMJoint joint;
joint.isFree = freeJoints.contains(fbxModel.name);
joint.parentIndex = fbxModel.parentIndex;
// get the indices of all ancestors starting with the first free one (if any)
int jointIndex = hfmModel.joints.size();
joint.freeLineage.append(jointIndex);
int lastFreeIndex = joint.isFree ? 0 : -1;
for (int index = joint.parentIndex; index != -1; index = hfmModel.joints.at(index).parentIndex) {
if (hfmModel.joints.at(index).isFree) {
lastFreeIndex = joint.freeLineage.size();
}
joint.freeLineage.append(index);
}
joint.freeLineage.remove(lastFreeIndex + 1, joint.freeLineage.size() - lastFreeIndex - 1);
joint.translation = fbxModel.translation; // these are usually in centimeters
joint.preTransform = fbxModel.preTransform;
joint.preRotation = fbxModel.preRotation;
@ -1341,14 +1288,10 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
}
joint.inverseBindRotation = joint.inverseDefaultRotation;
joint.name = fbxModel.name;
if (hfmModel.hfmToHifiJointNameMapping.contains(hfmModel.hfmToHifiJointNameMapping.key(joint.name))) {
joint.name = hfmModel.hfmToHifiJointNameMapping.key(fbxModel.name);
}
joint.bindTransformFoundInCluster = false;
hfmModel.joints.append(joint);
hfmModel.jointIndices.insert(joint.name, hfmModel.joints.size());
QString rotationID = localRotations.value(modelID);
AnimationCurve xRotCurve = animationCurves.value(xComponents.value(rotationID));
@ -1704,7 +1647,6 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
generateBoundryLinesForDop14(joint.shapeInfo.dots, joint.shapeInfo.avgPoint, joint.shapeInfo.debugLines);
}
}
hfmModel.palmDirection = parseVec3(mapping.value("palmDirection", "0, -1, 0").toString());
// attempt to map any meshes to a named model
for (QHash<QString, int>::const_iterator m = meshIDsToMeshIndices.constBegin();
@ -1722,21 +1664,6 @@ HFMModel* FBXSerializer::extractHFMModel(const QVariantHash& mapping, const QStr
}
}
auto offsets = getJointRotationOffsets(mapping);
hfmModel.jointRotationOffsets.clear();
for (auto itr = offsets.begin(); itr != offsets.end(); itr++) {
QString jointName = itr.key();
glm::quat rotationOffset = itr.value();
int jointIndex = hfmModel.getJointIndex(jointName);
if (hfmModel.hfmToHifiJointNameMapping.contains(jointName)) {
jointIndex = hfmModel.getJointIndex(jointName);
}
if (jointIndex != -1) {
hfmModel.jointRotationOffsets.insert(jointIndex, rotationOffset);
}
qCDebug(modelformat) << "Joint Rotation Offset added to Rig._jointRotationOffsets : " << " jointName: " << jointName << " jointIndex: " << jointIndex << " rotation offset: " << rotationOffset;
}
return hfmModelPtr;
}

View file

@ -82,11 +82,6 @@ FST* FST::createFSTFromModel(const QString& fstPath, const QString& modelFilePat
}
mapping.insert(JOINT_INDEX_FIELD, jointIndices);
mapping.insertMulti(FREE_JOINT_FIELD, "LeftArm");
mapping.insertMulti(FREE_JOINT_FIELD, "LeftForeArm");
mapping.insertMulti(FREE_JOINT_FIELD, "RightArm");
mapping.insertMulti(FREE_JOINT_FIELD, "RightForeArm");
// If there are no blendshape mappings, and we detect that this is likely a mixamo file,
// then we can add the default mixamo to "faceshift" mappings

View file

@ -84,7 +84,7 @@ void FSTReader::writeVariant(QBuffer& buffer, QVariantHash::const_iterator& it)
QByteArray FSTReader::writeMapping(const QVariantHash& mapping) {
static const QStringList PREFERED_ORDER = QStringList() << NAME_FIELD << TYPE_FIELD << SCALE_FIELD << FILENAME_FIELD
<< MARKETPLACE_ID_FIELD << TEXDIR_FIELD << SCRIPT_FIELD << JOINT_FIELD << FREE_JOINT_FIELD
<< MARKETPLACE_ID_FIELD << TEXDIR_FIELD << SCRIPT_FIELD << JOINT_FIELD
<< BLENDSHAPE_FIELD << JOINT_INDEX_FIELD;
QBuffer buffer;
buffer.open(QIODevice::WriteOnly);
@ -92,7 +92,7 @@ QByteArray FSTReader::writeMapping(const QVariantHash& mapping) {
for (auto key : PREFERED_ORDER) {
auto it = mapping.find(key);
if (it != mapping.constEnd()) {
if (key == FREE_JOINT_FIELD || key == SCRIPT_FIELD) { // writeVariant does not handle strings added using insertMulti.
if (key == SCRIPT_FIELD) { // writeVariant does not handle strings added using insertMulti.
for (auto multi : mapping.values(key)) {
buffer.write(key.toUtf8());
buffer.write(" = ");

View file

@ -27,7 +27,6 @@ static const QString TRANSLATION_X_FIELD = "tx";
static const QString TRANSLATION_Y_FIELD = "ty";
static const QString TRANSLATION_Z_FIELD = "tz";
static const QString JOINT_FIELD = "joint";
static const QString FREE_JOINT_FIELD = "freeJoint";
static const QString BLENDSHAPE_FIELD = "bs";
static const QString SCRIPT_FIELD = "script";
static const QString JOINT_NAME_MAPPING_FIELD = "jointMap";

50
libraries/fbx/src/GLTFSerializer.cpp Normal file → Executable file
View file

@ -352,9 +352,14 @@ bool GLTFSerializer::addImage(const QJsonObject& object) {
QString mime;
getStringVal(object, "uri", image.uri, image.defined);
if (image.uri.contains("data:image/png;base64,")) {
image.mimeType = getImageMimeType("image/png");
} else if (image.uri.contains("data:image/jpeg;base64,")) {
image.mimeType = getImageMimeType("image/jpeg");
}
if (getStringVal(object, "mimeType", mime, image.defined)) {
image.mimeType = getImageMimeType(mime);
}
}
getIntVal(object, "bufferView", image.bufferView, image.defined);
_file.images.push_back(image);
@ -719,7 +724,6 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const QUrl& url) {
//Build default joints
hfmModel.joints.resize(1);
hfmModel.joints[0].isFree = false;
hfmModel.joints[0].parentIndex = -1;
hfmModel.joints[0].distanceToParent = 0;
hfmModel.joints[0].translation = glm::vec3(0, 0, 0);
@ -831,6 +835,22 @@ bool GLTFSerializer::buildGeometry(HFMModel& hfmModel, const QUrl& url) {
for (int n = 0; n < normals.size(); n = n + 3) {
mesh.normals.push_back(glm::vec3(normals[n], normals[n + 1], normals[n + 2]));
}
} else if (key == "COLOR_0") {
QVector<float> colors;
success = addArrayOfType(buffer.blob,
bufferview.byteOffset + accBoffset,
accessor.count,
colors,
accessor.type,
accessor.componentType);
if (!success) {
qWarning(modelformat) << "There was a problem reading glTF COLOR_0 data for model " << _url;
continue;
}
int stride = (accessor.type == GLTFAccessorType::VEC4) ? 4 : 3;
for (int n = 0; n < colors.size() - 3; n += stride) {
mesh.colors.push_back(glm::vec3(colors[n], colors[n + 1], colors[n + 2]));
}
} else if (key == "TEXCOORD_0") {
QVector<float> texcoords;
success = addArrayOfType(buffer.blob,
@ -926,7 +946,6 @@ HFMModel::Pointer GLTFSerializer::read(const QByteArray& data, const QVariantHas
//_file.dump();
auto hfmModelPtr = std::make_shared<HFMModel>();
HFMModel& hfmModel = *hfmModelPtr;
buildGeometry(hfmModel, _url);
//hfmDebugDump(data);
@ -939,10 +958,15 @@ HFMModel::Pointer GLTFSerializer::read(const QByteArray& data, const QVariantHas
}
bool GLTFSerializer::readBinary(const QString& url, QByteArray& outdata) {
QUrl binaryUrl = _url.resolved(url);
bool success;
std::tie<bool, QByteArray>(success, outdata) = requestData(binaryUrl);
if (url.contains("data:application/octet-stream;base64,")) {
outdata = requestEmbeddedData(url);
success = !outdata.isEmpty();
} else {
QUrl binaryUrl = _url.resolved(url);
std::tie<bool, QByteArray>(success, outdata) = requestData(binaryUrl);
}
return success;
}
@ -975,6 +999,11 @@ std::tuple<bool, QByteArray> GLTFSerializer::requestData(QUrl& url) {
}
}
QByteArray GLTFSerializer::requestEmbeddedData(const QString& url) {
QString binaryUrl = url.split(",")[1];
return binaryUrl.isEmpty() ? QByteArray() : QByteArray::fromBase64(binaryUrl.toUtf8());
}
QNetworkReply* GLTFSerializer::request(QUrl& url, bool isTest) {
if (!qApp) {
@ -1006,11 +1035,16 @@ HFMTexture GLTFSerializer::getHFMTexture(const GLTFTexture& texture) {
if (texture.defined["source"]) {
QString url = _file.images[texture.source].uri;
QString fname = QUrl(url).fileName();
QUrl textureUrl = _url.resolved(url);
qCDebug(modelformat) << "fname: " << fname;
fbxtex.name = fname;
fbxtex.filename = textureUrl.toEncoded();
if (url.contains("data:image/jpeg;base64,") || url.contains("data:image/png;base64,")) {
fbxtex.content = requestEmbeddedData(url);
}
}
return fbxtex;
}
@ -1187,8 +1221,6 @@ void GLTFSerializer::hfmDebugDump(const HFMModel& hfmModel) {
qCDebug(modelformat) << " hasSkeletonJoints =" << hfmModel.hasSkeletonJoints;
qCDebug(modelformat) << " offset =" << hfmModel.offset;
qCDebug(modelformat) << " palmDirection = " << hfmModel.palmDirection;
qCDebug(modelformat) << " neckPivot = " << hfmModel.neckPivot;
qCDebug(modelformat) << " bindExtents.size() = " << hfmModel.bindExtents.size();
@ -1301,8 +1333,6 @@ void GLTFSerializer::hfmDebugDump(const HFMModel& hfmModel) {
qCDebug(modelformat) << " shapeInfo.dots =" << joint.shapeInfo.dots;
qCDebug(modelformat) << " shapeInfo.points =" << joint.shapeInfo.points;
qCDebug(modelformat) << " isFree =" << joint.isFree;
qCDebug(modelformat) << " freeLineage" << joint.freeLineage;
qCDebug(modelformat) << " parentIndex" << joint.parentIndex;
qCDebug(modelformat) << " distanceToParent" << joint.distanceToParent;
qCDebug(modelformat) << " translation" << joint.translation;

2
libraries/fbx/src/GLTFSerializer.h Normal file → Executable file
View file

@ -772,6 +772,8 @@ private:
QVector<glm::vec3>& out_vertices, QVector<glm::vec3>& out_normals);
std::tuple<bool, QByteArray> requestData(QUrl& url);
QByteArray requestEmbeddedData(const QString& url);
QNetworkReply* request(QUrl& url, bool isTest);
bool doesResourceExist(const QString& url);

View file

@ -687,7 +687,6 @@ HFMModel::Pointer OBJSerializer::read(const QByteArray& data, const QVariantHash
mesh.meshIndex = 0;
hfmModel.joints.resize(1);
hfmModel.joints[0].isFree = false;
hfmModel.joints[0].parentIndex = -1;
hfmModel.joints[0].distanceToParent = 0;
hfmModel.joints[0].translation = glm::vec3(0, 0, 0);
@ -1048,8 +1047,7 @@ void hfmDebugDump(const HFMModel& hfmModel) {
qCDebug(modelformat) << " joints.count() =" << hfmModel.joints.count();
foreach (HFMJoint joint, hfmModel.joints) {
qCDebug(modelformat) << " isFree =" << joint.isFree;
qCDebug(modelformat) << " freeLineage" << joint.freeLineage;
qCDebug(modelformat) << " parentIndex" << joint.parentIndex;
qCDebug(modelformat) << " distanceToParent" << joint.distanceToParent;
qCDebug(modelformat) << " translation" << joint.translation;

View file

@ -182,12 +182,3 @@ void Haze::setHazeBackgroundBlend(const float hazeBackgroundBlend) {
_hazeParametersBuffer.edit<Parameters>().hazeBackgroundBlend = newBlend;
}
}
void Haze::setTransform(const glm::mat4& transform) {
auto& params = _hazeParametersBuffer.get<Parameters>();
if (params.transform != transform) {
_hazeParametersBuffer.edit<Parameters>().transform = transform;
}
}

View file

@ -92,8 +92,6 @@ namespace graphics {
void setHazeBackgroundBlend(const float hazeBackgroundBlend);
void setTransform(const glm::mat4& transform);
using UniformBufferView = gpu::BufferView;
UniformBufferView getHazeParametersBuffer() const { return _hazeParametersBuffer; }
@ -101,30 +99,32 @@ namespace graphics {
class Parameters {
public:
// DO NOT CHANGE ORDER HERE WITHOUT UNDERSTANDING THE std140 LAYOUT
glm::vec3 hazeColor{ INITIAL_HAZE_COLOR };
float hazeGlareBlend{ convertGlareAngleToPower(INITIAL_HAZE_GLARE_ANGLE) };
glm::vec3 hazeColor { INITIAL_HAZE_COLOR };
float hazeGlareBlend { convertGlareAngleToPower(INITIAL_HAZE_GLARE_ANGLE) };
glm::vec3 hazeGlareColor{ INITIAL_HAZE_GLARE_COLOR };
float hazeBaseReference{ INITIAL_HAZE_BASE_REFERENCE };
glm::vec3 hazeGlareColor { INITIAL_HAZE_GLARE_COLOR };
float hazeBaseReference { INITIAL_HAZE_BASE_REFERENCE };
glm::vec3 colorModulationFactor;
int hazeMode{ 0 }; // bit 0 - set to activate haze attenuation of fragment color
int hazeMode { 0 }; // bit 0 - set to activate haze attenuation of fragment color
// bit 1 - set to add the effect of altitude to the haze attenuation
// bit 2 - set to activate directional light attenuation mode
// bit 3 - set to blend between blend-in and blend-out colours
glm::mat4 transform;
// Padding required to align the struct
#if defined(__clang__)
__attribute__((unused))
#endif
vec3 __padding;
// Amount of background (skybox) to display, overriding the haze effect for the background
float hazeBackgroundBlend{ INITIAL_HAZE_BACKGROUND_BLEND };
float hazeBackgroundBlend { INITIAL_HAZE_BACKGROUND_BLEND };
// The haze attenuation exponents used by both fragment and directional light attenuation
float hazeRangeFactor{ convertHazeRangeToHazeRangeFactor(INITIAL_HAZE_RANGE) };
float hazeHeightFactor{ convertHazeAltitudeToHazeAltitudeFactor(INITIAL_HAZE_HEIGHT) };
float hazeKeyLightRangeFactor{ convertHazeRangeToHazeRangeFactor(INITIAL_KEY_LIGHT_RANGE) };
float hazeRangeFactor { convertHazeRangeToHazeRangeFactor(INITIAL_HAZE_RANGE) };
float hazeHeightFactor { convertHazeAltitudeToHazeAltitudeFactor(INITIAL_HAZE_HEIGHT) };
float hazeKeyLightRangeFactor { convertHazeRangeToHazeRangeFactor(INITIAL_KEY_LIGHT_RANGE) };
float hazeKeyLightAltitudeFactor{ convertHazeAltitudeToHazeAltitudeFactor(INITIAL_KEY_LIGHT_ALTITUDE) };
// Padding required to align the structure to sizeof(vec4)
vec3 __padding;
float hazeKeyLightAltitudeFactor { convertHazeAltitudeToHazeAltitudeFactor(INITIAL_KEY_LIGHT_ALTITUDE) };
Parameters() {}
};

View file

@ -87,7 +87,7 @@ void Material::setUnlit(bool value) {
}
void Material::setAlbedo(const glm::vec3& albedo, bool isSRGB) {
_key.setAlbedo(glm::any(glm::greaterThan(albedo, glm::vec3(0.0f))));
_key.setAlbedo(true);
_albedo = (isSRGB ? ColorUtils::sRGBToLinearVec3(albedo) : albedo);
}

View file

@ -75,8 +75,6 @@ struct JointShapeInfo {
class Joint {
public:
JointShapeInfo shapeInfo;
QVector<int> freeLineage;
bool isFree;
int parentIndex;
float distanceToParent;
@ -291,8 +289,6 @@ public:
glm::mat4 offset; // This includes offset, rotation, and scale as specified by the FST file
glm::vec3 palmDirection;
glm::vec3 neckPivot;
Extents bindExtents;
@ -319,7 +315,6 @@ public:
QList<QString> blendshapeChannelNames;
QMap<int, glm::quat> jointRotationOffsets;
QMap<QString, QString> hfmToHifiJointNameMapping;
};
};

View file

@ -19,13 +19,14 @@
#include "CalculateMeshTangentsTask.h"
#include "CalculateBlendshapeNormalsTask.h"
#include "CalculateBlendshapeTangentsTask.h"
#include "PrepareJointsTask.h"
namespace baker {
class GetModelPartsTask {
public:
using Input = hfm::Model::Pointer;
using Output = VaryingSet5<std::vector<hfm::Mesh>, hifi::URL, baker::MeshIndicesToModelNames, baker::BlendshapesPerMesh, QHash<QString, hfm::Material>>;
using Output = VaryingSet6<std::vector<hfm::Mesh>, hifi::URL, baker::MeshIndicesToModelNames, baker::BlendshapesPerMesh, QHash<QString, hfm::Material>, std::vector<hfm::Joint>>;
using JobModel = Job::ModelIO<GetModelPartsTask, Input, Output>;
void run(const BakeContextPointer& context, const Input& input, Output& output) {
@ -39,6 +40,7 @@ namespace baker {
blendshapesPerMesh.push_back(hfmModelIn->meshes[i].blendshapes.toStdVector());
}
output.edit4() = hfmModelIn->materials;
output.edit5() = hfmModelIn->joints.toStdVector();
}
};
@ -99,23 +101,29 @@ namespace baker {
class BuildModelTask {
public:
using Input = VaryingSet2<hfm::Model::Pointer, std::vector<hfm::Mesh>>;
using Input = VaryingSet5<hfm::Model::Pointer, std::vector<hfm::Mesh>, std::vector<hfm::Joint>, QMap<int, glm::quat> /*jointRotationOffsets*/, QHash<QString, int> /*jointIndices*/>;
using Output = hfm::Model::Pointer;
using JobModel = Job::ModelIO<BuildModelTask, Input, Output>;
void run(const BakeContextPointer& context, const Input& input, Output& output) {
auto hfmModelOut = input.get0();
hfmModelOut->meshes = QVector<hfm::Mesh>::fromStdVector(input.get1());
hfmModelOut->joints = QVector<hfm::Joint>::fromStdVector(input.get2());
hfmModelOut->jointRotationOffsets = input.get3();
hfmModelOut->jointIndices = input.get4();
output = hfmModelOut;
}
};
class BakerEngineBuilder {
public:
using Input = hfm::Model::Pointer;
using Input = VaryingSet2<hfm::Model::Pointer, QVariantHash>;
using Output = hfm::Model::Pointer;
using JobModel = Task::ModelIO<BakerEngineBuilder, Input, Output>;
void build(JobModel& model, const Varying& hfmModelIn, Varying& hfmModelOut) {
void build(JobModel& model, const Varying& input, Varying& hfmModelOut) {
const auto& hfmModelIn = input.getN<Input>(0);
const auto& mapping = input.getN<Input>(1);
// Split up the inputs from hfm::Model
const auto modelPartsIn = model.addJob<GetModelPartsTask>("GetModelParts", hfmModelIn);
const auto meshesIn = modelPartsIn.getN<GetModelPartsTask::Output>(0);
@ -123,6 +131,7 @@ namespace baker {
const auto meshIndicesToModelNames = modelPartsIn.getN<GetModelPartsTask::Output>(2);
const auto blendshapesPerMeshIn = modelPartsIn.getN<GetModelPartsTask::Output>(3);
const auto materials = modelPartsIn.getN<GetModelPartsTask::Output>(4);
const auto jointsIn = modelPartsIn.getN<GetModelPartsTask::Output>(5);
// Calculate normals and tangents for meshes and blendshapes if they do not exist
// Note: Normals are never calculated here for OBJ models. OBJ files optionally define normals on a per-face basis, so for consistency normals are calculated beforehand in OBJSerializer.
@ -138,19 +147,27 @@ namespace baker {
const auto buildGraphicsMeshInputs = BuildGraphicsMeshTask::Input(meshesIn, url, meshIndicesToModelNames, normalsPerMesh, tangentsPerMesh).asVarying();
const auto graphicsMeshes = model.addJob<BuildGraphicsMeshTask>("BuildGraphicsMesh", buildGraphicsMeshInputs);
// Prepare joint information
const auto prepareJointsInputs = PrepareJointsTask::Input(jointsIn, mapping).asVarying();
const auto jointInfoOut = model.addJob<PrepareJointsTask>("PrepareJoints", prepareJointsInputs);
const auto jointsOut = jointInfoOut.getN<PrepareJointsTask::Output>(0);
const auto jointRotationOffsets = jointInfoOut.getN<PrepareJointsTask::Output>(1);
const auto jointIndices = jointInfoOut.getN<PrepareJointsTask::Output>(2);
// Combine the outputs into a new hfm::Model
const auto buildBlendshapesInputs = BuildBlendshapesTask::Input(blendshapesPerMeshIn, normalsPerBlendshapePerMesh, tangentsPerBlendshapePerMesh).asVarying();
const auto blendshapesPerMeshOut = model.addJob<BuildBlendshapesTask>("BuildBlendshapes", buildBlendshapesInputs);
const auto buildMeshesInputs = BuildMeshesTask::Input(meshesIn, graphicsMeshes, normalsPerMesh, tangentsPerMesh, blendshapesPerMeshOut).asVarying();
const auto meshesOut = model.addJob<BuildMeshesTask>("BuildMeshes", buildMeshesInputs);
const auto buildModelInputs = BuildModelTask::Input(hfmModelIn, meshesOut).asVarying();
const auto buildModelInputs = BuildModelTask::Input(hfmModelIn, meshesOut, jointsOut, jointRotationOffsets, jointIndices).asVarying();
hfmModelOut = model.addJob<BuildModelTask>("BuildModel", buildModelInputs);
}
};
Baker::Baker(const hfm::Model::Pointer& hfmModel) :
Baker::Baker(const hfm::Model::Pointer& hfmModel, const QVariantHash& mapping) :
_engine(std::make_shared<Engine>(BakerEngineBuilder::JobModel::create("Baker"), std::make_shared<BakeContext>())) {
_engine->feedInput<BakerEngineBuilder::Input>(hfmModel);
_engine->feedInput<BakerEngineBuilder::Input>(0, hfmModel);
_engine->feedInput<BakerEngineBuilder::Input>(1, mapping);
}
void Baker::run() {

View file

@ -12,6 +12,8 @@
#ifndef hifi_baker_Baker_h
#define hifi_baker_Baker_h
#include <QMap>
#include <hfm/HFM.h>
#include "Engine.h"
@ -19,7 +21,7 @@
namespace baker {
class Baker {
public:
Baker(const hfm::Model::Pointer& hfmModel);
Baker(const hfm::Model::Pointer& hfmModel, const QVariantHash& mapping);
void run();

View file

@ -13,6 +13,17 @@
#include "ModelMath.h"
bool needTangents(const hfm::Mesh& mesh, const QHash<QString, hfm::Material>& materials) {
// Check if we actually need to calculate the tangents
for (const auto& meshPart : mesh.parts) {
auto materialIt = materials.find(meshPart.materialID);
if (materialIt != materials.end() && (*materialIt).needTangentSpace()) {
return true;
}
}
return false;
}
void CalculateMeshTangentsTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
const auto& normalsPerMesh = input.get0();
const std::vector<hfm::Mesh>& meshes = input.get1();
@ -28,38 +39,20 @@ void CalculateMeshTangentsTask::run(const baker::BakeContextPointer& context, co
auto& tangentsOut = tangentsPerMeshOut[tangentsPerMeshOut.size()-1];
// Check if we already have tangents and therefore do not need to do any calculation
// Otherwise confirm if we have the normals needed, and need to calculate the tangents
if (!tangentsIn.empty()) {
tangentsOut = tangentsIn.toStdVector();
continue;
} else if (!normals.empty() && needTangents(mesh, materials)) {
tangentsOut.resize(normals.size());
baker::calculateTangents(mesh,
[&mesh, &normals, &tangentsOut](int firstIndex, int secondIndex, glm::vec3* outVertices, glm::vec2* outTexCoords, glm::vec3& outNormal) {
outVertices[0] = mesh.vertices[firstIndex];
outVertices[1] = mesh.vertices[secondIndex];
outNormal = normals[firstIndex];
outTexCoords[0] = mesh.texCoords[firstIndex];
outTexCoords[1] = mesh.texCoords[secondIndex];
return &(tangentsOut[firstIndex]);
});
}
// Check if we have normals, and if not then tangents can't be calculated
if (normals.empty()) {
continue;
}
// Check if we actually need to calculate the tangents
bool needTangents = false;
for (const auto& meshPart : mesh.parts) {
auto materialIt = materials.find(meshPart.materialID);
if (materialIt != materials.end() && (*materialIt).needTangentSpace()) {
needTangents = true;
break;
}
}
if (needTangents) {
continue;
}
tangentsOut.resize(normals.size());
baker::calculateTangents(mesh,
[&mesh, &normals, &tangentsOut](int firstIndex, int secondIndex, glm::vec3* outVertices, glm::vec2* outTexCoords, glm::vec3& outNormal) {
outVertices[0] = mesh.vertices[firstIndex];
outVertices[1] = mesh.vertices[secondIndex];
outNormal = normals[firstIndex];
outTexCoords[0] = mesh.texCoords[firstIndex];
outTexCoords[1] = mesh.texCoords[secondIndex];
return &(tangentsOut[firstIndex]);
});
}
}

View file

@ -0,0 +1,86 @@
//
// PrepareJointsTask.cpp
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/25.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "PrepareJointsTask.h"
#include "ModelBakerLogging.h"
QMap<QString, QString> getJointNameMapping(const QVariantHash& mapping) {
static const QString JOINT_NAME_MAPPING_FIELD = "jointMap";
QMap<QString, QString> hfmToHifiJointNameMap;
if (!mapping.isEmpty() && mapping.contains(JOINT_NAME_MAPPING_FIELD) && mapping[JOINT_NAME_MAPPING_FIELD].type() == QVariant::Hash) {
auto jointNames = mapping[JOINT_NAME_MAPPING_FIELD].toHash();
for (auto itr = jointNames.begin(); itr != jointNames.end(); itr++) {
hfmToHifiJointNameMap.insert(itr.key(), itr.value().toString());
qCDebug(model_baker) << "the mapped key " << itr.key() << " has a value of " << hfmToHifiJointNameMap[itr.key()];
}
}
return hfmToHifiJointNameMap;
}
QMap<QString, glm::quat> getJointRotationOffsets(const QVariantHash& mapping) {
QMap<QString, glm::quat> jointRotationOffsets;
static const QString JOINT_ROTATION_OFFSET_FIELD = "jointRotationOffset";
if (!mapping.isEmpty() && mapping.contains(JOINT_ROTATION_OFFSET_FIELD) && mapping[JOINT_ROTATION_OFFSET_FIELD].type() == QVariant::Hash) {
auto offsets = mapping[JOINT_ROTATION_OFFSET_FIELD].toHash();
for (auto itr = offsets.begin(); itr != offsets.end(); itr++) {
QString jointName = itr.key();
QString line = itr.value().toString();
auto quatCoords = line.split(',');
if (quatCoords.size() == 4) {
float quatX = quatCoords[0].mid(1).toFloat();
float quatY = quatCoords[1].toFloat();
float quatZ = quatCoords[2].toFloat();
float quatW = quatCoords[3].mid(0, quatCoords[3].size() - 1).toFloat();
if (!isNaN(quatX) && !isNaN(quatY) && !isNaN(quatZ) && !isNaN(quatW)) {
glm::quat rotationOffset = glm::quat(quatW, quatX, quatY, quatZ);
jointRotationOffsets.insert(jointName, rotationOffset);
}
}
}
}
return jointRotationOffsets;
}
void PrepareJointsTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
const auto& jointsIn = input.get0();
const auto& mapping = input.get1();
auto& jointsOut = output.edit0();
auto& jointRotationOffsets = output.edit1();
auto& jointIndices = output.edit2();
// Get joint renames
auto jointNameMapping = getJointNameMapping(mapping);
// Apply joint metadata from FST file mappings
for (const auto& jointIn : jointsIn) {
jointsOut.push_back(jointIn);
auto& jointOut = jointsOut.back();
auto jointNameMapKey = jointNameMapping.key(jointIn.name);
if (jointNameMapping.contains(jointNameMapKey)) {
jointOut.name = jointNameMapKey;
}
jointIndices.insert(jointOut.name, (int)jointsOut.size());
}
// Get joint rotation offsets from FST file mappings
auto offsets = getJointRotationOffsets(mapping);
for (auto itr = offsets.begin(); itr != offsets.end(); itr++) {
QString jointName = itr.key();
int jointIndex = jointIndices.value(jointName) - 1;
if (jointIndex != -1) {
glm::quat rotationOffset = itr.value();
jointRotationOffsets.insert(jointIndex, rotationOffset);
qCDebug(model_baker) << "Joint Rotation Offset added to Rig._jointRotationOffsets : " << " jointName: " << jointName << " jointIndex: " << jointIndex << " rotation offset: " << rotationOffset;
}
}
}

View file

@ -0,0 +1,30 @@
//
// PrepareJointsTask.h
// model-baker/src/model-baker
//
// Created by Sabrina Shanman on 2019/01/25.
// Copyright 2019 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_PrepareJointsTask_h
#define hifi_PrepareJointsTask_h
#include <QHash>
#include <hfm/HFM.h>
#include "Engine.h"
class PrepareJointsTask {
public:
using Input = baker::VaryingSet2<std::vector<hfm::Joint>, QVariantHash /*mapping*/>;
using Output = baker::VaryingSet3<std::vector<hfm::Joint>, QMap<int, glm::quat> /*jointRotationOffsets*/, QHash<QString, int> /*jointIndices*/>;
using JobModel = baker::Job::ModelIO<PrepareJointsTask, Input, Output>;
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
};
#endif // hifi_PrepareJointsTask_h

View file

@ -233,7 +233,7 @@ void GeometryReader::run() {
}
QMetaObject::invokeMethod(resource.data(), "setGeometryDefinition",
Q_ARG(HFMModel::Pointer, hfmModel));
Q_ARG(HFMModel::Pointer, hfmModel), Q_ARG(QVariantHash, _mapping));
} catch (const std::exception&) {
auto resource = _resource.toStrongRef();
if (resource) {
@ -261,7 +261,7 @@ public:
virtual void downloadFinished(const QByteArray& data) override;
protected:
Q_INVOKABLE void setGeometryDefinition(HFMModel::Pointer hfmModel);
Q_INVOKABLE void setGeometryDefinition(HFMModel::Pointer hfmModel, QVariantHash mapping);
private:
ModelLoader _modelLoader;
@ -277,9 +277,9 @@ void GeometryDefinitionResource::downloadFinished(const QByteArray& data) {
QThreadPool::globalInstance()->start(new GeometryReader(_modelLoader, _self, _effectiveBaseURL, _mapping, data, _combineParts, _request->getWebMediaType()));
}
void GeometryDefinitionResource::setGeometryDefinition(HFMModel::Pointer hfmModel) {
void GeometryDefinitionResource::setGeometryDefinition(HFMModel::Pointer hfmModel, QVariantHash mapping) {
// Do processing on the model
baker::Baker modelBaker(hfmModel);
baker::Baker modelBaker(hfmModel, mapping);
modelBaker.run();
// Assume ownership of the processed HFMModel

View file

@ -208,7 +208,7 @@ void AccountManager::setSessionID(const QUuid& sessionID) {
}
}
QNetworkRequest AccountManager::createRequest(QString path, AccountManagerAuth::Type authType, const QUrlQuery & query) {
QNetworkRequest AccountManager::createRequest(QString path, AccountManagerAuth::Type authType) {
QNetworkRequest networkRequest;
networkRequest.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
networkRequest.setHeader(QNetworkRequest::UserAgentHeader, _userAgentGetter());
@ -217,17 +217,22 @@ QNetworkRequest AccountManager::createRequest(QString path, AccountManagerAuth::
uuidStringWithoutCurlyBraces(_sessionID).toLocal8Bit());
QUrl requestURL = _authURL;
if (requestURL.isEmpty()) { // Assignment client doesn't set _authURL.
requestURL = getMetaverseServerURL();
}
int queryStringLocation = path.indexOf("?");
if (path.startsWith("/")) {
requestURL.setPath(path);
requestURL.setPath(path.left(queryStringLocation));
} else {
requestURL.setPath("/" + path);
requestURL.setPath("/" + path.left(queryStringLocation));
}
if (queryStringLocation >= 0) {
QUrlQuery query(path.mid(queryStringLocation+1));
requestURL.setQuery(query);
}
requestURL.setQuery(query);
if (authType != AccountManagerAuth::None ) {
if (hasValidAccessToken()) {
@ -253,8 +258,7 @@ void AccountManager::sendRequest(const QString& path,
const JSONCallbackParameters& callbackParams,
const QByteArray& dataByteArray,
QHttpMultiPart* dataMultiPart,
const QVariantMap& propertyMap,
QUrlQuery query) {
const QVariantMap& propertyMap) {
if (thread() != QThread::currentThread()) {
QMetaObject::invokeMethod(this, "sendRequest",
@ -264,14 +268,13 @@ void AccountManager::sendRequest(const QString& path,
Q_ARG(const JSONCallbackParameters&, callbackParams),
Q_ARG(const QByteArray&, dataByteArray),
Q_ARG(QHttpMultiPart*, dataMultiPart),
Q_ARG(QVariantMap, propertyMap),
Q_ARG(QUrlQuery, query));
Q_ARG(QVariantMap, propertyMap));
return;
}
QNetworkAccessManager& networkAccessManager = NetworkAccessManager::getInstance();
QNetworkRequest networkRequest = createRequest(path, authType, query);
QNetworkRequest networkRequest = createRequest(path, authType);
if (VERBOSE_HTTP_REQUEST_DEBUGGING) {
qCDebug(networking) << "Making a request to" << qPrintable(networkRequest.url().toString());

View file

@ -61,15 +61,14 @@ class AccountManager : public QObject, public Dependency {
public:
AccountManager(UserAgentGetter userAgentGetter = DEFAULT_USER_AGENT_GETTER);
QNetworkRequest createRequest(QString path, AccountManagerAuth::Type authType, const QUrlQuery & query = QUrlQuery());
QNetworkRequest createRequest(QString path, AccountManagerAuth::Type authType);
Q_INVOKABLE void sendRequest(const QString& path,
AccountManagerAuth::Type authType,
QNetworkAccessManager::Operation operation = QNetworkAccessManager::GetOperation,
const JSONCallbackParameters& callbackParams = JSONCallbackParameters(),
const QByteArray& dataByteArray = QByteArray(),
QHttpMultiPart* dataMultiPart = NULL,
const QVariantMap& propertyMap = QVariantMap(),
QUrlQuery query = QUrlQuery());
const QVariantMap& propertyMap = QVariantMap());
void setIsAgent(bool isAgent) { _isAgent = isAgent; }

View file

@ -683,7 +683,7 @@ void CharacterController::updateState() {
return;
}
if (_pendingFlags & PENDING_FLAG_RECOMPUTE_FLYING) {
SET_STATE(CharacterController::State::Hover, "recomputeFlying");
SET_STATE(CharacterController::State::Hover, "recomputeFlying");
_hasSupport = false;
_stepHeight = _minStepHeight; // clears memory of last step obstacle
_pendingFlags &= ~PENDING_FLAG_RECOMPUTE_FLYING;
@ -795,15 +795,13 @@ void CharacterController::updateState() {
// Transition to hover if we are above the fall threshold
SET_STATE(State::Hover, "above fall threshold");
}
} else if (!rayHasHit && !_hasSupport) {
SET_STATE(State::Hover, "no ground detected");
}
break;
}
case State::Hover:
btScalar horizontalSpeed = (velocity - velocity.dot(_currentUp) * _currentUp).length();
bool flyingFast = horizontalSpeed > (MAX_WALKING_SPEED * 0.75f);
if (!_flyingAllowed && rayHasHit) {
if (!_flyingAllowed) {
SET_STATE(State::InAir, "flying not allowed");
} else if ((_floorDistance < MIN_HOVER_HEIGHT) && !jumpButtonHeld && !flyingFast) {
SET_STATE(State::InAir, "near ground");

View file

@ -104,13 +104,13 @@ void ProceduralData::parse(const QJsonObject& proceduralData) {
//}
Procedural::Procedural() {
_opaqueState->setCullMode(gpu::State::CULL_BACK);
_opaqueState->setCullMode(gpu::State::CULL_NONE);
_opaqueState->setDepthTest(true, true, gpu::LESS_EQUAL);
_opaqueState->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_transparentState->setCullMode(gpu::State::CULL_BACK);
_transparentState->setCullMode(gpu::State::CULL_NONE);
_transparentState->setDepthTest(true, true, gpu::LESS_EQUAL);
_transparentState->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,

View file

@ -374,7 +374,7 @@ void AnimDebugDraw::update() {
glm::vec4 color = std::get<3>(iter.second);
for (int i = 0; i < skeleton->getNumJoints(); i++) {
const float radius = BONE_RADIUS / (absPoses[i].scale().x * rootPose.scale().x);
const float radius = BONE_RADIUS / (absPoses[i].scale() * rootPose.scale());
// draw bone
addBone(rootPose, absPoses[i], radius, color, v);
@ -394,16 +394,16 @@ void AnimDebugDraw::update() {
glm::vec3 pos = std::get<1>(iter.second);
glm::vec4 color = std::get<2>(iter.second);
const float radius = POSE_RADIUS;
addBone(AnimPose::identity, AnimPose(glm::vec3(1), rot, pos), radius, color, v);
addBone(AnimPose::identity, AnimPose(1.0f, rot, pos), radius, color, v);
}
AnimPose myAvatarPose(glm::vec3(1), DebugDraw::getInstance().getMyAvatarRot(), DebugDraw::getInstance().getMyAvatarPos());
AnimPose myAvatarPose(1.0f, DebugDraw::getInstance().getMyAvatarRot(), DebugDraw::getInstance().getMyAvatarPos());
for (auto& iter : myAvatarMarkerMap) {
glm::quat rot = std::get<0>(iter.second);
glm::vec3 pos = std::get<1>(iter.second);
glm::vec4 color = std::get<2>(iter.second);
const float radius = POSE_RADIUS;
addBone(myAvatarPose, AnimPose(glm::vec3(1), rot, pos), radius, color, v);
addBone(myAvatarPose, AnimPose(1.0f, rot, pos), radius, color, v);
}
// draw rays from shared DebugDraw singleton

View file

@ -122,7 +122,7 @@ void CauterizedModel::updateClusterMatrices() {
if (_useDualQuaternionSkinning) {
auto jointPose = _rig.getJointPose(cluster.jointIndex);
Transform jointTransform(jointPose.rot(), jointPose.scale(), jointPose.trans());
Transform jointTransform(jointPose.rot(), glm::vec3(jointPose.scale()), jointPose.trans());
Transform clusterTransform;
Transform::mult(clusterTransform, jointTransform, _rig.getAnimSkeleton()->getClusterBindMatricesOriginalValues(meshIndex, clusterIndex).inverseBindTransform);
state.clusterDualQuaternions[j] = Model::TransformDualQuaternion(clusterTransform);
@ -138,7 +138,7 @@ void CauterizedModel::updateClusterMatrices() {
if (!_cauterizeBoneSet.empty()) {
AnimPose cauterizePose = _rig.getJointPose(_rig.indexOfJoint("Neck"));
cauterizePose.scale() = glm::vec3(0.0001f, 0.0001f, 0.0001f);
cauterizePose.scale() = 0.0001f;
static const glm::mat4 zeroScale(
glm::vec4(0.0001f, 0.0f, 0.0f, 0.0f),
@ -161,7 +161,7 @@ void CauterizedModel::updateClusterMatrices() {
// not cauterized so just copy the value from the non-cauterized version.
state.clusterDualQuaternions[j] = _meshStates[i].clusterDualQuaternions[j];
} else {
Transform jointTransform(cauterizePose.rot(), cauterizePose.scale(), cauterizePose.trans());
Transform jointTransform(cauterizePose.rot(), glm::vec3(cauterizePose.scale()), cauterizePose.trans());
Transform clusterTransform;
Transform::mult(clusterTransform, jointTransform, _rig.getAnimSkeleton()->getClusterBindMatricesOriginalValues(meshIndex, clusterIndex).inverseBindTransform);
state.clusterDualQuaternions[j] = Model::TransformDualQuaternion(clusterTransform);

View file

@ -28,7 +28,7 @@ struct HazeParams {
vec3 colorModulationFactor;
int hazeMode;
mat4 transform;
vec3 spare;
float backgroundBlend;
float hazeRangeFactor;

View file

@ -1116,10 +1116,6 @@ int Model::getParentJointIndex(int jointIndex) const {
return (isActive() && jointIndex != -1) ? getHFMModel().joints.at(jointIndex).parentIndex : -1;
}
int Model::getLastFreeJointIndex(int jointIndex) const {
return (isActive() && jointIndex != -1) ? getHFMModel().joints.at(jointIndex).freeLineage.last() : -1;
}
void Model::setTextures(const QVariantMap& textures) {
if (isLoaded()) {
_needsFixupInScene = true;
@ -1368,7 +1364,7 @@ void Model::updateClusterMatrices() {
if (_useDualQuaternionSkinning) {
auto jointPose = _rig.getJointPose(cluster.jointIndex);
Transform jointTransform(jointPose.rot(), jointPose.scale(), jointPose.trans());
Transform jointTransform(jointPose.rot(), glm::vec3(jointPose.scale()), jointPose.trans());
Transform clusterTransform;
Transform::mult(clusterTransform, jointTransform, _rig.getAnimSkeleton()->getClusterBindMatricesOriginalValues(meshIndex, clusterIndex).inverseBindTransform);
state.clusterDualQuaternions[j] = Model::TransformDualQuaternion(clusterTransform);

View file

@ -298,9 +298,9 @@ public:
TransformDualQuaternion() {}
TransformDualQuaternion(const glm::mat4& m) {
AnimPose p(m);
_scale.x = p.scale().x;
_scale.y = p.scale().y;
_scale.z = p.scale().z;
_scale.x = p.scale();
_scale.y = p.scale();
_scale.z = p.scale();
_scale.w = 0.0f;
_dq = DualQuaternion(p.rot(), p.trans());
}
@ -379,9 +379,6 @@ protected:
/// Clear the joint states
void clearJointState(int index);
/// Returns the index of the last free ancestor of the indexed joint, or -1 if not found.
int getLastFreeJointIndex(int jointIndex) const;
/// \param jointIndex index of joint in model structure
/// \param position[out] position of joint in model-frame
/// \return true if joint exists

View file

@ -60,7 +60,7 @@ protected:
class CubicHermiteSplineFunctorWithArcLength : public CubicHermiteSplineFunctor {
public:
enum Constants { NUM_SUBDIVISIONS = 30 };
enum Constants { NUM_SUBDIVISIONS = 15 };
CubicHermiteSplineFunctorWithArcLength() : CubicHermiteSplineFunctor() {
memset(_values, 0, sizeof(float) * (NUM_SUBDIVISIONS + 1));
@ -71,11 +71,13 @@ public:
float alpha = 0.0f;
float accum = 0.0f;
_values[0] = 0.0f;
glm::vec3 prevValue = this->operator()(alpha);
for (int i = 1; i < NUM_SUBDIVISIONS + 1; i++) {
accum += glm::distance(this->operator()(alpha),
this->operator()(alpha + DELTA));
glm::vec3 nextValue = this->operator()(alpha + DELTA);
accum += glm::distance(prevValue, nextValue);
alpha += DELTA;
_values[i] = accum;
prevValue = nextValue;
}
}

View file

@ -1247,30 +1247,30 @@ void qVectorMeshFaceFromScriptValue(const QScriptValue& array, QVector<MeshFace>
}
}
QVariantMap parseTexturesToMap(QString textures, const QVariantMap& defaultTextures) {
QVariantMap parseTexturesToMap(QString newTextures, const QVariantMap& defaultTextures) {
// If textures are unset, revert to original textures
if (textures.isEmpty()) {
if (newTextures.isEmpty()) {
return defaultTextures;
}
// Legacy: a ,\n-delimited list of filename:"texturepath"
if (*textures.cbegin() != '{') {
textures = "{\"" + textures.replace(":\"", "\":\"").replace(",\n", ",\"") + "}";
if (*newTextures.cbegin() != '{') {
newTextures = "{\"" + newTextures.replace(":\"", "\":\"").replace(",\n", ",\"") + "}";
}
QJsonParseError error;
QJsonDocument texturesJson = QJsonDocument::fromJson(textures.toUtf8(), &error);
QJsonDocument newTexturesJson = QJsonDocument::fromJson(newTextures.toUtf8(), &error);
// If textures are invalid, revert to original textures
if (error.error != QJsonParseError::NoError) {
qWarning() << "Could not evaluate textures property value:" << textures;
qWarning() << "Could not evaluate textures property value:" << newTextures;
return defaultTextures;
}
QVariantMap texturesMap = texturesJson.toVariant().toMap();
// If textures are unset, revert to original textures
if (texturesMap.isEmpty()) {
return defaultTextures;
QVariantMap newTexturesMap = newTexturesJson.toVariant().toMap();
QVariantMap toReturn = defaultTextures;
for (auto& texture : newTexturesMap.keys()) {
toReturn[texture] = newTexturesMap[texture];
}
return texturesJson.toVariant().toMap();
return toReturn;
}

View file

@ -152,7 +152,8 @@ void ShapeInfo::setSphere(float radius) {
void ShapeInfo::setMultiSphere(const std::vector<glm::vec3>& centers, const std::vector<float>& radiuses) {
_url = "";
_type = SHAPE_TYPE_MULTISPHERE;
assert(centers.size() == radiuses.size() && centers.size() > 0);
assert(centers.size() == radiuses.size());
assert(centers.size() > 0);
for (size_t i = 0; i < centers.size(); i++) {
SphereData sphere = SphereData(centers[i], radiuses[i]);
_sphereCollection.push_back(sphere);

View file

@ -238,7 +238,10 @@ void OffscreenQmlSurface::clearFocusItem() {
void OffscreenQmlSurface::initializeEngine(QQmlEngine* engine) {
Parent::initializeEngine(engine);
QQmlFileSelector* fileSelector = new QQmlFileSelector(engine);
auto fileSelector = QQmlFileSelector::get(engine);
if (!fileSelector) {
fileSelector = new QQmlFileSelector(engine);
}
fileSelector->setExtraSelectors(FileUtils::getFileSelectors());
static std::once_flag once;

View file

@ -35,9 +35,50 @@ import re
import tempfile
import time
import functools
import subprocess
import logging
from uuid import uuid4
from contextlib import contextmanager
print = functools.partial(print, flush=True)
class TrackableLogger(logging.Logger):
guid = str(uuid4())
def _log(self, msg, *args, **kwargs):
x = {'guid': self.guid}
if 'extra' in kwargs:
kwargs['extra'].update(x)
else:
kwargs['extra'] = x
super()._log(msg, *args, **kwargs)
logging.setLoggerClass(TrackableLogger)
logger = logging.getLogger('prebuild')
def headSha():
repo_dir = os.path.dirname(os.path.abspath(__file__))
git = subprocess.Popen(
'git rev-parse --short HEAD',
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, cwd=repo_dir, universal_newlines=True,
)
stdout, _ = git.communicate()
sha = stdout.split('\n')[0]
if not sha:
raise RuntimeError("couldn't find git sha")
return sha
@contextmanager
def timer(name):
''' Print the elapsed time a context's execution takes to execute '''
start = time.time()
yield
# Please take care when modifiying this print statement.
# Log parsing logic may depend on it.
logger.info('%s took %.3f secs' % (name, time.time() - start))
def parse_args():
# our custom ports, relative to the script location
defaultPortsPath = hifi_utils.scriptRelative('cmake', 'ports')
@ -50,6 +91,7 @@ def parse_args():
parser.add_argument('--vcpkg-root', type=str, help='The location of the vcpkg distribution')
parser.add_argument('--build-root', required=True, type=str, help='The location of the cmake build')
parser.add_argument('--ports-path', type=str, default=defaultPortsPath)
parser.add_argument('--ci-build', action='store_true')
if True:
args = parser.parse_args()
else:
@ -66,11 +108,19 @@ def main():
del os.environ[var]
args = parse_args()
if args.ci_build:
logging.basicConfig(datefmt='%s', format='%(asctime)s %(guid)s %(message)s', level=logging.INFO)
logger.info('sha=%s' % headSha())
logger.info('start')
# Only allow one instance of the program to run at a time
pm = hifi_vcpkg.VcpkgRepo(args)
with hifi_singleton.Singleton(pm.lockFile) as lock:
if not pm.upToDate():
pm.bootstrap()
with timer('Bootstraping'):
if not pm.upToDate():
pm.bootstrap()
# Always write the tag, even if we changed nothing. This
# allows vcpkg to reclaim disk space by identifying directories with
@ -80,11 +130,13 @@ def main():
# Grab our required dependencies:
# * build host tools, like spirv-cross and scribe
# * build client dependencies like openssl and nvtt
pm.setupDependencies()
with timer('Setting up dependencies'):
pm.setupDependencies()
# wipe out the build directories (after writing the tag, since failure
# here shouldn't invalidte the vcpkg install)
pm.cleanBuilds()
with timer('Cleaning builds'):
pm.cleanBuilds()
# If we're running in android mode, we also need to grab a bunch of additional binaries
# (this logic is all migrated from the old setupDependencies tasks in gradle)
@ -98,7 +150,10 @@ def main():
hifi_android.QtPackager(appPath, qtPath).bundle()
# Write the vcpkg config to the build directory last
pm.writeConfig()
with timer('Writing configuration'):
pm.writeConfig()
logger.info('end')
print(sys.argv)
main()

View file

@ -681,6 +681,9 @@ function loaded() {
if (isNullOrEmpty(valueB)) {
return (isDefaultSort ? -1 : 1) * (isAscendingSort ? 1 : -1);
}
if (typeof(valueA) === "string") {
return valueA.localeCompare(valueB);
}
return valueA < valueB ? -1 : 1;
});
});

View file

@ -135,6 +135,7 @@ EntityShape.prototype = {
overlayProperties.canCastShadows = false;
overlayProperties.parentID = this.entityID;
overlayProperties.collisionless = true;
overlayProperties.ignorePickIntersection = true;
this.entity = Entities.addEntity(overlayProperties, "local");
var PROJECTED_MATERIALS = false;
this.materialEntity = Entities.addEntity({
@ -146,6 +147,7 @@ EntityShape.prototype = {
priority: 1,
materialMappingMode: PROJECTED_MATERIALS ? "projected" : "uv",
materialURL: Script.resolvePath("../assets/images/materials/GridPattern.json"),
ignorePickIntersection: true,
}, "local");
},
update: function() {

View file

@ -22,9 +22,11 @@
#include <ResourceRequestObserver.h>
#include <StatTracker.h>
#include <test-utils/QTestExtensions.h>
#include <test-utils/GLMTestUtils.h>
QTEST_MAIN(AnimTests)
const float TEST_EPSILON = 0.001f;
void AnimTests::initTestCase() {
@ -372,16 +374,10 @@ void AnimTests::testAnimPose() {
const glm::quat ROT_Y_180 = glm::angleAxis(PI, glm::vec3(0.0f, 1.0, 0.0f));
const glm::quat ROT_Z_30 = glm::angleAxis(PI / 6.0f, glm::vec3(1.0f, 0.0f, 0.0f));
std::vector<glm::vec3> scaleVec = {
glm::vec3(1),
glm::vec3(2.0f, 1.0f, 1.0f),
glm::vec3(1.0f, 0.5f, 1.0f),
glm::vec3(1.0f, 1.0f, 1.5f),
glm::vec3(2.0f, 0.5f, 1.5f),
glm::vec3(-2.0f, 0.5f, 1.5f),
glm::vec3(2.0f, -0.5f, 1.5f),
glm::vec3(2.0f, 0.5f, -1.5f),
glm::vec3(-2.0f, -0.5f, -1.5f),
std::vector<float> scaleVec = {
1.0f,
2.0f,
0.5f
};
std::vector<glm::quat> rotVec = {
@ -411,7 +407,7 @@ void AnimTests::testAnimPose() {
for (auto& trans : transVec) {
// build a matrix the old fashioned way.
glm::mat4 scaleMat = glm::scale(glm::mat4(), scale);
glm::mat4 scaleMat = glm::scale(glm::mat4(), glm::vec3(scale));
glm::mat4 rotTransMat = createMatFromQuatAndPos(rot, trans);
glm::mat4 rawMat = rotTransMat * scaleMat;
@ -429,7 +425,7 @@ void AnimTests::testAnimPose() {
for (auto& trans : transVec) {
// build a matrix the old fashioned way.
glm::mat4 scaleMat = glm::scale(glm::mat4(), scale);
glm::mat4 scaleMat = glm::scale(glm::mat4(), glm::vec3(scale));
glm::mat4 rotTransMat = createMatFromQuatAndPos(rot, trans);
glm::mat4 rawMat = rotTransMat * scaleMat;
@ -445,6 +441,145 @@ void AnimTests::testAnimPose() {
}
}
void AnimTests::testAnimPoseMultiply() {
const float PI = (float)M_PI;
const glm::quat ROT_X_90 = glm::angleAxis(PI / 2.0f, glm::vec3(1.0f, 0.0f, 0.0f));
const glm::quat ROT_Y_180 = glm::angleAxis(PI, glm::vec3(0.0f, 1.0, 0.0f));
const glm::quat ROT_Z_30 = glm::angleAxis(PI / 6.0f, glm::vec3(1.0f, 0.0f, 0.0f));
std::vector<float> scaleVec = {
1.0f,
2.0f,
0.5f,
};
std::vector<glm::quat> rotVec = {
glm::quat(),
ROT_X_90,
ROT_Y_180,
ROT_Z_30,
ROT_X_90 * ROT_Y_180 * ROT_Z_30,
-ROT_Y_180
};
std::vector<glm::vec3> transVec = {
glm::vec3(),
glm::vec3(10.0f, 0.0f, 0.0f),
glm::vec3(0.0f, 5.0f, 0.0f),
glm::vec3(0.0f, 0.0f, 7.5f),
glm::vec3(10.0f, 5.0f, 7.5f),
glm::vec3(-10.0f, 5.0f, 7.5f),
glm::vec3(10.0f, -5.0f, 7.5f),
glm::vec3(10.0f, 5.0f, -7.5f)
};
const float TEST_EPSILON = 0.001f;
std::vector<glm::mat4> matrixVec;
std::vector<AnimPose> poseVec;
for (auto& scale : scaleVec) {
for (auto& rot : rotVec) {
for (auto& trans : transVec) {
// build a matrix the old fashioned way.
glm::mat4 scaleMat = glm::scale(glm::mat4(), glm::vec3(scale));
glm::mat4 rotTransMat = createMatFromQuatAndPos(rot, trans);
glm::mat4 rawMat = rotTransMat * scaleMat;
matrixVec.push_back(rawMat);
// use an anim pose to build a matrix by parts.
AnimPose pose(scale, rot, trans);
poseVec.push_back(pose);
}
}
}
for (int i = 0; i < matrixVec.size(); i++) {
for (int j = 0; j < matrixVec.size(); j++) {
// multiply the matrices together
glm::mat4 matrix = matrixVec[i] * matrixVec[j];
// convert to matrix (note this will remove sheer from the matrix)
AnimPose resultA(matrix);
// multiply the poses together directly
AnimPose resultB = poseVec[i] * poseVec[j];
/*
qDebug() << "matrixVec[" << i << "] =" << matrixVec[i];
qDebug() << "matrixVec[" << j << "] =" << matrixVec[j];
qDebug() << "matrixResult =" << resultA;
qDebug() << "poseVec[" << i << "] =" << poseVec[i];
qDebug() << "poseVec[" << j << "] =" << poseVec[j];
qDebug() << "poseResult =" << resultB;
*/
// compare results.
QCOMPARE_WITH_ABS_ERROR(resultA.scale(), resultB.scale(), TEST_EPSILON);
QCOMPARE_WITH_ABS_ERROR(resultA.rot(), resultB.rot(), TEST_EPSILON);
QCOMPARE_WITH_ABS_ERROR(resultA.trans(), resultB.trans(), TEST_EPSILON);
}
}
}
void AnimTests::testAnimPoseInverse() {
const float PI = (float)M_PI;
const glm::quat ROT_X_90 = glm::angleAxis(PI / 2.0f, glm::vec3(1.0f, 0.0f, 0.0f));
const glm::quat ROT_Y_180 = glm::angleAxis(PI, glm::vec3(0.0f, 1.0, 0.0f));
const glm::quat ROT_Z_30 = glm::angleAxis(PI / 6.0f, glm::vec3(1.0f, 0.0f, 0.0f));
std::vector<float> scaleVec = {
1.0f,
2.0f,
0.5f
};
std::vector<glm::quat> rotVec = {
glm::quat(),
ROT_X_90,
ROT_Y_180,
ROT_Z_30,
ROT_X_90 * ROT_Y_180 * ROT_Z_30,
-ROT_Y_180
};
std::vector<glm::vec3> transVec = {
glm::vec3(),
glm::vec3(10.0f, 0.0f, 0.0f),
glm::vec3(0.0f, 5.0f, 0.0f),
glm::vec3(0.0f, 0.0f, 7.5f),
glm::vec3(10.0f, 5.0f, 7.5f),
glm::vec3(-10.0f, 5.0f, 7.5f),
glm::vec3(10.0f, -5.0f, 7.5f),
glm::vec3(10.0f, 5.0f, -7.5f)
};
const float TEST_EPSILON = 0.001f;
for (auto& scale : scaleVec) {
for (auto& rot : rotVec) {
for (auto& trans : transVec) {
// build a matrix the old fashioned way.
glm::mat4 scaleMat = glm::scale(glm::mat4(), glm::vec3(scale));
glm::mat4 rotTransMat = createMatFromQuatAndPos(rot, trans);
glm::mat4 rawMat = glm::inverse(rotTransMat * scaleMat);
// use an anim pose to build a matrix by parts.
AnimPose pose(scale, rot, trans);
glm::mat4 poseMat = pose.inverse();
QCOMPARE_WITH_ABS_ERROR(rawMat, poseMat, TEST_EPSILON);
}
}
}
}
void AnimTests::testExpressionTokenizer() {
QString str = "(10 + x) >= 20.1 && (y != !z)";
AnimExpression e("x");

View file

@ -27,6 +27,8 @@ private slots:
void testVariant();
void testAccumulateTime();
void testAnimPose();
void testAnimPoseMultiply();
void testAnimPoseInverse();
void testExpressionTokenizer();
void testExpressionParser();
void testExpressionEvaluator();

View file

@ -19,20 +19,36 @@ function(check_test name)
endfunction()
if (BUILD_TOOLS)
set(ALL_TOOLS
udt-test
vhacd-util
frame-optimizer
gpu-frame-player
ice-client
ktx-tool
ac-client
skeleton-dump
atp-client
oven
nitpick
)
# Allow different tools for production builds
if (RELEASE_TYPE STREQUAL "PRODUCTION")
set(ALL_TOOLS
udt-test
vhacd-util
frame-optimizer
gpu-frame-player
ice-client
ktx-tool
ac-client
skeleton-dump
atp-client
oven
)
else()
set(ALL_TOOLS
udt-test
vhacd-util
frame-optimizer
gpu-frame-player
ice-client
ktx-tool
ac-client
skeleton-dump
atp-client
oven
nitpick
)
endif()
foreach(TOOL ${ALL_TOOLS})
check_test(${TOOL})
if (${BUILD_TOOL_RESULT})

Some files were not shown because too many files have changed in this diff Show more