Merge branch 'master' into graphics/fetch-cull-sort

This commit is contained in:
Zach Pomerantz 2017-01-03 14:01:39 -05:00
commit 26cfc83e20
69 changed files with 4642 additions and 1151 deletions

View file

@ -38,13 +38,13 @@ set(${EXTERNAL_NAME_UPPER}_DLL_PATH ${INSTALL_DIR}/lib CACHE FILEPATH "Location
if (APPLE)
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip5.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5d.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5.1.0.0.dylib CACHE FILEPATH "Location of QuaZip release library")
elseif (WIN32)
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/quazip5.lib CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/quazip5d.lib CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/quazip5.lib CACHE FILEPATH "Location of QuaZip release library")
else ()
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libquazip5.so CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5d.so CACHE FILEPATH "Location of QuaZip release library")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG ${INSTALL_DIR}/lib/libquazip5.so CACHE FILEPATH "Location of QuaZip release library")
endif ()
include(SelectLibraryConfigurations)
@ -52,4 +52,4 @@ select_library_configurations(${EXTERNAL_NAME_UPPER})
# Force selected libraries into the cache
set(${EXTERNAL_NAME_UPPER}_LIBRARY ${${EXTERNAL_NAME_UPPER}_LIBRARY} CACHE FILEPATH "Location of QuaZip libraries")
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Location of QuaZip libraries")
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARIES} CACHE FILEPATH "Location of QuaZip libraries")

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

View file

@ -21,9 +21,9 @@ Original.CheckBox {
property int colorScheme: hifi.colorSchemes.light
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light
readonly property int boxSize: 14
property int boxSize: 14
readonly property int boxRadius: 3
readonly property int checkSize: 10
readonly property int checkSize: Math.max(boxSize - 8, 10)
readonly property int checkRadius: 2
style: CheckBoxStyle {
@ -32,21 +32,35 @@ Original.CheckBox {
width: boxSize
height: boxSize
radius: boxRadius
border.width: 1
border.color: pressed || hovered
? hifi.colors.checkboxCheckedBorder
: (checkBox.isLightColorScheme ? hifi.colors.checkboxLightFinish : hifi.colors.checkboxDarkFinish)
gradient: Gradient {
GradientStop {
position: 0.2
color: pressed || hovered
? (checkBox.isLightColorScheme ? hifi.colors.checkboxDarkStart : hifi.colors.checkboxLightStart)
? (checkBox.isLightColorScheme ? hifi.colors.checkboxChecked : hifi.colors.checkboxLightStart)
: (checkBox.isLightColorScheme ? hifi.colors.checkboxLightStart : hifi.colors.checkboxDarkStart)
}
GradientStop {
position: 1.0
color: pressed || hovered
? (checkBox.isLightColorScheme ? hifi.colors.checkboxDarkFinish : hifi.colors.checkboxLightFinish)
? (checkBox.isLightColorScheme ? hifi.colors.checkboxChecked : hifi.colors.checkboxLightFinish)
: (checkBox.isLightColorScheme ? hifi.colors.checkboxLightFinish : hifi.colors.checkboxDarkFinish)
}
}
Rectangle {
visible: pressed || hovered
anchors.centerIn: parent
id: innerBox
width: checkSize - 4
height: width
radius: checkRadius
color: hifi.colors.checkboxCheckedBorder
}
Rectangle {
id: check
width: checkSize
@ -54,7 +68,7 @@ Original.CheckBox {
radius: checkRadius
anchors.centerIn: parent
color: hifi.colors.checkboxChecked
border.width: 1
border.width: 2
border.color: hifi.colors.checkboxCheckedBorder
visible: checked && !pressed || !checked && pressed
}

View file

@ -20,6 +20,7 @@ TableView {
property int colorScheme: hifi.colorSchemes.light
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light
property bool expandSelectedRow: false
property bool centerHeaderText: false
model: ListModel { }
@ -34,9 +35,12 @@ TableView {
size: hifi.fontSizes.tableHeading
font.capitalization: Font.AllUppercase
color: hifi.colors.baseGrayHighlight
horizontalAlignment: (centerHeaderText ? Text.AlignHCenter : Text.AlignLeft)
anchors {
left: parent.left
leftMargin: hifi.dimensions.tablePadding
right: parent.right
rightMargin: hifi.dimensions.tablePadding
verticalCenter: parent.verticalCenter
}
}
@ -48,7 +52,7 @@ TableView {
size: hifi.fontSizes.tableHeadingIcon
anchors {
left: titleText.right
leftMargin: -hifi.fontSizes.tableHeadingIcon / 3
leftMargin: -hifi.fontSizes.tableHeadingIcon / 3 - (centerHeaderText ? 3 : 0)
right: parent.right
rightMargin: hifi.dimensions.tablePadding
verticalCenter: titleText.verticalCenter

View file

@ -9,28 +9,128 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0
import Hifi 1.0 as Hifi
import QtQuick 2.5
import QtGraphicalEffects 1.0
import "../styles-uit"
Column {
property string displayName: "";
property string userName: "";
property int displayTextHeight: 18;
property int usernameTextHeight: 12;
RalewaySemiBold {
text: parent.displayName;
size: parent.displayTextHeight;
elide: Text.ElideRight;
width: parent.width;
Row {
id: thisNameCard
// Spacing
spacing: 10
// Anchors
anchors.top: parent.top
anchors {
topMargin: (parent.height - contentHeight)/2
bottomMargin: (parent.height - contentHeight)/2
leftMargin: 10
rightMargin: 10
}
RalewayLight {
visible: parent.displayName;
text: parent.userName;
size: parent.usernameTextHeight;
elide: Text.ElideRight;
width: parent.width;
// Properties
property int contentHeight: 50
property string displayName: ""
property string userName: ""
property int displayTextHeight: 18
property int usernameTextHeight: 12
property real audioLevel: 0.0
Column {
id: avatarImage
// Size
height: contentHeight
width: height
Image {
id: userImage
source: "../../icons/defaultNameCardUser.png"
// Anchors
width: parent.width
height: parent.height
}
}
Column {
id: textContainer
// Size
width: parent.width - avatarImage.width - parent.anchors.leftMargin - parent.anchors.rightMargin - parent.spacing
height: contentHeight
// DisplayName Text
FiraSansSemiBold {
id: displayNameText
// Properties
text: thisNameCard.displayName
elide: Text.ElideRight
// Size
width: parent.width
// Text Size
size: thisNameCard.displayTextHeight
// Text Positioning
verticalAlignment: Text.AlignVCenter
}
// UserName Text
FiraSansRegular {
id: userNameText
// Properties
text: thisNameCard.userName
elide: Text.ElideRight
visible: thisNameCard.displayName
// Size
width: parent.width
// Text Size
size: thisNameCard.usernameTextHeight
// Text Positioning
verticalAlignment: Text.AlignVCenter
}
// Spacer
Item {
height: 4
width: parent.width
}
// VU Meter
Rectangle { // CHANGEME to the appropriate type!
id: nameCardVUMeter
// Size
width: parent.width
height: 8
// Style
radius: 4
// Rectangle for the VU meter base
Rectangle {
id: vuMeterBase
// Anchors
anchors.fill: parent
// Style
color: "#dbdbdb" // Very appropriate hex value here
radius: parent.radius
}
// Rectangle for the VU meter audio level
Rectangle {
id: vuMeterLevel
// Size
width: (thisNameCard.audioLevel) * parent.width
// Style
color: "#dbdbdb" // Very appropriate hex value here
radius: parent.radius
// Anchors
anchors.bottom: parent.bottom
anchors.top: parent.top
anchors.left: parent.left
}
// Gradient for the VU meter audio level
LinearGradient {
anchors.fill: vuMeterLevel
source: vuMeterLevel
start: Qt.point(0, 0)
end: Qt.point(parent.width, 0)
gradient: Gradient {
GradientStop { position: 0.05; color: "#00CFEF" }
GradientStop { position: 0.5; color: "#9450A5" }
GradientStop { position: 0.95; color: "#EA4C5F" }
}
}
}
}
}

View file

@ -28,19 +28,314 @@
import QtQuick 2.5
import QtQuick.Controls 1.4
import "../styles-uit"
import "../controls-uit" as HifiControls
Rectangle {
id: pal;
property int keepFromHorizontalScroll: 1;
width: parent.width - keepFromHorizontalScroll;
height: parent.height;
Item {
id: pal
// Size
width: parent.width
height: parent.height
// Properties
property int myCardHeight: 70
property int rowHeight: 70
property int actionButtonWidth: 75
property int nameCardWidth: width - actionButtonWidth*(iAmAdmin ? 4 : 2)
property int nameWidth: width/2;
property int actionWidth: nameWidth / (table.columnCount - 1);
property int rowHeight: 50;
property var userData: [];
property var myData: ({displayName: "", userName: ""}); // valid dummy until set
property bool iAmAdmin: false;
// This contains the current user's NameCard and will contain other information in the future
Rectangle {
id: myInfo
// Size
width: pal.width
height: myCardHeight + 20
// Anchors
anchors.top: pal.top
// Properties
radius: hifi.dimensions.borderRadius
// This NameCard refers to the current user's NameCard (the one above the table)
NameCard {
id: myCard
// Properties
displayName: myData.displayName
userName: myData.userName
audioLevel: myData.audioLevel
// Size
width: nameCardWidth
height: parent.height
// Anchors
anchors.left: parent.left
}
}
// Rectangles used to cover up rounded edges on bottom of MyInfo Rectangle
Rectangle {
color: "#FFFFFF"
width: pal.width
height: 10
anchors.top: myInfo.bottom
anchors.left: parent.left
}
Rectangle {
color: "#FFFFFF"
width: pal.width
height: 10
anchors.bottom: table.top
anchors.left: parent.left
}
// Rectangle that houses "ADMIN" string
Rectangle {
id: adminTab
// Size
width: actionButtonWidth * 2 - 2
height: 40
// Anchors
anchors.bottom: myInfo.bottom
anchors.bottomMargin: -10
anchors.right: myInfo.right
// Properties
visible: iAmAdmin
// Style
color: hifi.colors.tableRowLightEven
radius: hifi.dimensions.borderRadius
border.color: hifi.colors.lightGrayText
border.width: 2
// "ADMIN" text
RalewaySemiBold {
text: "ADMIN"
// Text size
size: hifi.fontSizes.tableHeading + 2
// Anchors
anchors.top: parent.top
anchors.topMargin: 8
anchors.left: parent.left
anchors.right: parent.right
// Style
font.capitalization: Font.AllUppercase
color: hifi.colors.redHighlight
// Alignment
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignTop
}
}
// This TableView refers to the table (below the current user's NameCard)
HifiControls.Table {
id: table
// Size
height: pal.height - myInfo.height - 4
width: pal.width - 4
// Anchors
anchors.left: parent.left
anchors.top: myInfo.bottom
// Properties
centerHeaderText: true
sortIndicatorVisible: true
headerVisible: true
onSortIndicatorColumnChanged: sortModel()
onSortIndicatorOrderChanged: sortModel()
TableViewColumn {
role: "displayName"
title: "NAMES"
width: nameCardWidth
movable: false
resizable: false
}
TableViewColumn {
role: "personalMute"
title: "MUTE"
width: actionButtonWidth
movable: false
resizable: false
}
TableViewColumn {
role: "ignore"
title: "IGNORE"
width: actionButtonWidth
movable: false
resizable: false
}
TableViewColumn {
visible: iAmAdmin
role: "mute"
title: "SILENCE"
width: actionButtonWidth
movable: false
resizable: false
}
TableViewColumn {
visible: iAmAdmin
role: "kick"
title: "BAN"
width: actionButtonWidth
movable: false
resizable: false
}
model: userModel
// This Rectangle refers to each Row in the table.
rowDelegate: Rectangle { // The only way I know to specify a row height.
// Size
height: rowHeight
color: styleData.selected
? "#afafaf"
: styleData.alternate ? hifi.colors.tableRowLightEven : hifi.colors.tableRowLightOdd
}
// This Item refers to the contents of each Cell
itemDelegate: Item {
id: itemCell
property bool isCheckBox: typeof(styleData.value) === 'boolean'
// This NameCard refers to the cell that contains an avatar's
// DisplayName and UserName
NameCard {
id: nameCard
// Properties
displayName: styleData.value
userName: model.userName
audioLevel: model.audioLevel
visible: !isCheckBox
// Size
width: nameCardWidth
height: parent.height
// Anchors
anchors.left: parent.left
}
// This CheckBox belongs in the columns that contain the action buttons ("Mute", "Ban", etc)
HifiControls.CheckBox {
visible: isCheckBox
anchors.centerIn: parent
boxSize: 24
onClicked: {
var newValue = !model[styleData.role]
var datum = userData[model.userIndex]
datum[styleData.role] = model[styleData.role] = newValue
Users[styleData.role](model.sessionId)
// Just for now, while we cannot undo things:
userData.splice(model.userIndex, 1)
sortModel()
}
}
}
}
// Refresh button
Rectangle {
// Size
width: hifi.dimensions.tableHeaderHeight-1
height: hifi.dimensions.tableHeaderHeight-1
// Anchors
anchors.left: table.left
anchors.leftMargin: 4
anchors.top: table.top
// Style
color: hifi.colors.tableBackgroundLight
// Actual refresh icon
HiFiGlyphs {
id: reloadButton
text: hifi.glyphs.reloadSmall
// Size
size: parent.width*1.5
// Anchors
anchors.fill: parent
// Style
horizontalAlignment: Text.AlignHCenter
color: hifi.colors.darkGray
}
MouseArea {
id: reloadButtonArea
// Anchors
anchors.fill: parent
hoverEnabled: true
// Everyone likes a responsive refresh button!
// So use onPressed instead of onClicked
onPressed: {
reloadButton.color = hifi.colors.lightGrayText
pal.sendToScript({method: 'refresh'})
}
onReleased: reloadButton.color = (containsMouse ? hifi.colors.baseGrayHighlight : hifi.colors.darkGray)
onEntered: reloadButton.color = hifi.colors.baseGrayHighlight
onExited: reloadButton.color = (pressed ? hifi.colors.lightGrayText: hifi.colors.darkGray)
}
}
// Separator between user and admin functions
Rectangle {
// Size
width: 2
height: table.height
// Anchors
anchors.left: adminTab.left
anchors.top: table.top
// Properties
visible: iAmAdmin
color: hifi.colors.lightGrayText
}
// This Rectangle refers to the [?] popup button
Rectangle {
color: hifi.colors.tableBackgroundLight
width: 20
height: hifi.dimensions.tableHeaderHeight - 2
anchors.left: table.left
anchors.top: table.top
anchors.topMargin: 1
anchors.leftMargin: nameCardWidth/2 + 24
RalewayRegular {
id: helpText
text: "[?]"
size: hifi.fontSizes.tableHeading + 2
font.capitalization: Font.AllUppercase
color: hifi.colors.darkGray
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
anchors.fill: parent
}
MouseArea {
anchors.fill: parent
acceptedButtons: Qt.LeftButton
hoverEnabled: true
onClicked: namesPopup.visible = true
onEntered: helpText.color = hifi.colors.baseGrayHighlight
onExited: helpText.color = hifi.colors.darkGray
}
}
// Explanitory popup upon clicking "[?]"
Item {
visible: false
id: namesPopup
anchors.fill: pal
Rectangle {
anchors.fill: parent
color: "black"
opacity: 0.5
radius: hifi.dimensions.borderRadius
}
Rectangle {
width: Math.min(parent.width * 0.75, 400)
height: popupText.contentHeight*2
anchors.centerIn: parent
radius: hifi.dimensions.borderRadius
color: "white"
FiraSansSemiBold {
id: popupText
text: "This is temporary text. It will eventually be used to explain what 'Names' means."
size: hifi.fontSizes.textFieldInput
color: hifi.colors.darkGray
horizontalAlignment: Text.AlignHCenter
anchors.fill: parent
wrapMode: Text.WordWrap
}
}
MouseArea {
anchors.fill: parent
acceptedButtons: Qt.LeftButton
onClicked: {
namesPopup.visible = false
}
}
}
property var userData: []
property var myData: ({displayName: "", userName: "", audioLevel: 0.0}) // valid dummy until set
property bool iAmAdmin: false
function findSessionIndex(sessionId, optionalData) { // no findIndex in .qml
var i, data = optionalData || userData, length = data.length;
for (var i = 0; i < length; i++) {
@ -90,6 +385,21 @@ Rectangle {
userData[userIndex].userName = userName; // Defensive programming
}
break;
case 'updateAudioLevel':
for (var userId in message.params) {
var audioLevel = message.params[userId];
// If the userId is 0, we're updating "myData".
if (userId == 0) {
myData.audioLevel = audioLevel;
myCard.audioLevel = audioLevel; // Defensive programming
} else {
console.log("userid:" + userId);
var userIndex = findSessionIndex(userId);
userModel.get(userIndex).audioLevel = audioLevel;
userData[userIndex].audioLevel = audioLevel; // Defensive programming
}
}
break;
default:
console.log('Unrecognized message:', JSON.stringify(message));
}
@ -118,7 +428,7 @@ Rectangle {
datum[property] = false;
}
}
['ignore', 'spacer', 'mute', 'kick'].forEach(init);
['personalMute', 'ignore', 'mute', 'kick'].forEach(init);
datum.userIndex = userIndex++;
userModel.append(datum);
});
@ -135,91 +445,4 @@ Rectangle {
target: table.selection
onSelectionChanged: pal.noticeSelection()
}
Column {
NameCard {
id: myCard;
width: nameWidth;
displayName: myData.displayName;
userName: myData.userName;
}
TableView {
id: table;
TableViewColumn {
role: "displayName";
title: "Name";
width: nameWidth
}
TableViewColumn {
role: "ignore";
title: "Ignore"
width: actionWidth
}
TableViewColumn {
title: "";
width: actionWidth
}
TableViewColumn {
visible: iAmAdmin;
role: "mute";
title: "Mute";
width: actionWidth
}
TableViewColumn {
visible: iAmAdmin;
role: "kick";
title: "Ban"
width: actionWidth
}
model: userModel;
rowDelegate: Rectangle { // The only way I know to specify a row height.
height: rowHeight;
// The rest of this is cargo-culted to restore the default styling
SystemPalette {
id: myPalette;
colorGroup: SystemPalette.Active
}
color: {
var baseColor = styleData.alternate?myPalette.alternateBase:myPalette.base
return styleData.selected?myPalette.highlight:baseColor
}
}
itemDelegate: Item {
id: itemCell;
property bool isCheckBox: typeof(styleData.value) === 'boolean';
NameCard {
id: nameCard;
visible: !isCheckBox;
width: nameWidth;
displayName: styleData.value;
userName: model.userName;
}
Rectangle {
radius: itemCell.height / 4;
visible: isCheckBox;
color: styleData.value ? "green" : "red";
anchors.fill: parent;
MouseArea {
anchors.fill: parent;
acceptedButtons: Qt.LeftButton;
hoverEnabled: true;
onClicked: {
var newValue = !model[styleData.role];
var datum = userData[model.userIndex];
datum[styleData.role] = model[styleData.role] = newValue;
Users[styleData.role](model.sessionId);
// Just for now, while we cannot undo things:
userData.splice(model.userIndex, 1);
sortModel();
}
}
}
}
height: pal.height - myCard.height;
width: pal.width;
sortIndicatorVisible: true;
onSortIndicatorColumnChanged: sortModel();
onSortIndicatorOrderChanged: sortModel();
}
}
}

View file

@ -22,7 +22,7 @@ ScrollingWindow {
objectName: "RunningScripts"
title: "Running Scripts"
resizable: true
destroyOnHidden: true
destroyOnHidden: false
implicitWidth: 424
implicitHeight: isHMD ? 695 : 728
minSize: Qt.vector2d(424, 300)

View file

@ -89,8 +89,8 @@ Item {
readonly property color transparent: "#00ffffff"
// Control specific colors
readonly property color tableRowLightOdd: "#eaeaea" // Equivalent to white50 over #e3e3e3 background
readonly property color tableRowLightEven: "#c6c6c6" // Equivavlent to "#1a575757" over #e3e3e3 background
readonly property color tableRowLightOdd: "#fafafa"
readonly property color tableRowLightEven: "#eeeeee" // Equivavlent to "#1a575757" over #e3e3e3 background
readonly property color tableRowDarkOdd: "#2e2e2e" // Equivalent to "#80393939" over #404040 background
readonly property color tableRowDarkEven: "#1c1c1c" // Equivalent to "#a6181818" over #404040 background
readonly property color tableBackgroundLight: tableRowLightEven

View file

@ -1136,7 +1136,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
connect(&_settingsThread, SIGNAL(finished()), &_settingsTimer, SLOT(stop()));
_settingsTimer.moveToThread(&_settingsThread);
_settingsTimer.setSingleShot(false);
_settingsTimer.setInterval(SAVE_SETTINGS_INTERVAL);
_settingsTimer.setInterval(SAVE_SETTINGS_INTERVAL); // 10s, Qt::CoarseTimer acceptable
_settingsThread.start();
if (Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson)) {
@ -1241,7 +1241,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// Periodically send fps as a user activity event
QTimer* sendStatsTimer = new QTimer(this);
sendStatsTimer->setInterval(SEND_STATS_INTERVAL_MS);
sendStatsTimer->setInterval(SEND_STATS_INTERVAL_MS); // 10s, Qt::CoarseTimer acceptable
connect(sendStatsTimer, &QTimer::timeout, this, [this]() {
QJsonObject properties = {};
@ -1272,6 +1272,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
properties["sim_rate"] = getAverageSimsPerSecond();
properties["avatar_sim_rate"] = getAvatarSimrate();
properties["has_async_reprojection"] = displayPlugin->hasAsyncReprojection();
properties["hardware_stats"] = displayPlugin->getHardwareStats();
auto bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
properties["packet_rate_in"] = bandwidthRecorder->getCachedTotalAverageInputPacketsPerSecond();
@ -1342,7 +1343,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// Periodically check for count of nearby avatars
static int lastCountOfNearbyAvatars = -1;
QTimer* checkNearbyAvatarsTimer = new QTimer(this);
checkNearbyAvatarsTimer->setInterval(CHECK_NEARBY_AVATARS_INTERVAL_MS);
checkNearbyAvatarsTimer->setInterval(CHECK_NEARBY_AVATARS_INTERVAL_MS); // 10 seconds, Qt::CoarseTimer ok
connect(checkNearbyAvatarsTimer, &QTimer::timeout, this, [this]() {
auto avatarManager = DependencyManager::get<AvatarManager>();
int nearbyAvatars = avatarManager->numberOfAvatarsInRange(avatarManager->getMyAvatar()->getPosition(),
@ -1500,16 +1501,16 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// Monitor model assets (e.g., from Clara.io) added to the world that may need resizing.
static const int ADD_ASSET_TO_WORLD_TIMER_INTERVAL_MS = 1000;
_addAssetToWorldResizeTimer.setInterval(ADD_ASSET_TO_WORLD_TIMER_INTERVAL_MS);
_addAssetToWorldResizeTimer.setInterval(ADD_ASSET_TO_WORLD_TIMER_INTERVAL_MS); // 1s, Qt::CoarseTimer acceptable
connect(&_addAssetToWorldResizeTimer, &QTimer::timeout, this, &Application::addAssetToWorldCheckModelSize);
// Auto-update and close adding asset to world info message box.
static const int ADD_ASSET_TO_WORLD_INFO_TIMEOUT_MS = 5000;
_addAssetToWorldInfoTimer.setInterval(ADD_ASSET_TO_WORLD_INFO_TIMEOUT_MS);
_addAssetToWorldInfoTimer.setInterval(ADD_ASSET_TO_WORLD_INFO_TIMEOUT_MS); // 5s, Qt::CoarseTimer acceptable
_addAssetToWorldInfoTimer.setSingleShot(true);
connect(&_addAssetToWorldInfoTimer, &QTimer::timeout, this, &Application::addAssetToWorldInfoTimeout);
static const int ADD_ASSET_TO_WORLD_ERROR_TIMEOUT_MS = 8000;
_addAssetToWorldErrorTimer.setInterval(ADD_ASSET_TO_WORLD_ERROR_TIMEOUT_MS);
_addAssetToWorldErrorTimer.setInterval(ADD_ASSET_TO_WORLD_ERROR_TIMEOUT_MS); // 8s, Qt::CoarseTimer acceptable
_addAssetToWorldErrorTimer.setSingleShot(true);
connect(&_addAssetToWorldErrorTimer, &QTimer::timeout, this, &Application::addAssetToWorldErrorTimeout);
@ -4310,7 +4311,7 @@ void Application::update(float deltaTime) {
// AvatarManager update
{
PerformanceTimer perfTimer("AvatarManger");
PerformanceTimer perfTimer("AvatarManager");
_avatarSimCounter.increment();
{

View file

@ -704,6 +704,15 @@ Menu::Menu() {
addActionToQMenuAndActionHash(developerMenu, MenuOption::Log, Qt::CTRL | Qt::SHIFT | Qt::Key_L,
qApp, SLOT(toggleLogDialog()));
action = addActionToQMenuAndActionHash(developerMenu, "Script Log (HMD friendly)...");
connect(action, &QAction::triggered, [] {
auto scriptEngines = DependencyManager::get<ScriptEngines>();
QUrl defaultScriptsLoc = defaultScriptsLocation();
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/debugging/debugWindow.js");
scriptEngines->loadScript(defaultScriptsLoc.toString());
});
// Developer > Stats
addCheckableActionToQMenuAndActionHash(developerMenu, MenuOption::Stats);

View file

@ -275,13 +275,16 @@ void Avatar::updateAvatarEntities() {
}
AvatarEntityIDs recentlyDettachedAvatarEntities = getAndClearRecentlyDetachedIDs();
_avatarEntitiesLock.withReadLock([&] {
foreach (auto entityID, recentlyDettachedAvatarEntities) {
if (!_avatarEntityData.contains(entityID)) {
entityTree->deleteEntity(entityID, true, true);
if (!recentlyDettachedAvatarEntities.empty()) {
// only lock this thread when absolutely necessary
_avatarEntitiesLock.withReadLock([&] {
foreach (auto entityID, recentlyDettachedAvatarEntities) {
if (!_avatarEntityData.contains(entityID)) {
entityTree->deleteEntity(entityID, true, true);
}
}
}
});
});
}
});
if (success) {
@ -299,18 +302,25 @@ void Avatar::simulate(float deltaTime) {
}
animateScaleChanges(deltaTime);
bool avatarPositionInView = false;
bool avatarMeshInView = false;
bool avatarInView = false;
{ // update the shouldAnimate flag to match whether or not we will render the avatar.
PerformanceTimer perfTimer("cull");
ViewFrustum viewFrustum;
{
PerformanceTimer perfTimer("LOD");
// simple frustum check
PerformanceTimer perfTimer("inView");
ViewFrustum viewFrustum;
qApp->copyDisplayViewFrustum(viewFrustum);
avatarInView = viewFrustum.sphereIntersectsFrustum(getPosition(), getBoundingRadius())
|| viewFrustum.boxIntersectsFrustum(_skeletonModel->getRenderableMeshBound());
}
PerformanceTimer lodPerfTimer("LOD");
if (avatarInView) {
const float MINIMUM_VISIBILITY_FOR_ON = 0.4f;
const float MAXIMUM_VISIBILITY_FOR_OFF = 0.6f;
ViewFrustum viewFrustum;
qApp->copyViewFrustum(viewFrustum);
float visibility = calculateRenderAccuracy(viewFrustum.getPosition(),
getBounds(), DependencyManager::get<LODManager>()->getOctreeSizeScale());
getBounds(), DependencyManager::get<LODManager>()->getOctreeSizeScale());
if (!_shouldAnimate) {
if (visibility > MINIMUM_VISIBILITY_FOR_ON) {
_shouldAnimate = true;
@ -321,19 +331,11 @@ void Avatar::simulate(float deltaTime) {
qCDebug(interfaceapp) << "Optimizing" << (isMyAvatar() ? "myself" : getSessionUUID()) << "for visibility" << visibility;
}
}
{
PerformanceTimer perfTimer("inView");
// simple frustum check
float boundingRadius = getBoundingRadius();
qApp->copyDisplayViewFrustum(viewFrustum);
avatarPositionInView = viewFrustum.sphereIntersectsFrustum(getPosition(), boundingRadius);
avatarMeshInView = viewFrustum.boxIntersectsFrustum(_skeletonModel->getRenderableMeshBound());
}
}
uint64_t start = usecTimestampNow();
if (_shouldAnimate && !_shouldSkipRender && (avatarPositionInView || avatarMeshInView)) {
// CRUFT? _shouldSkipRender is never set 'true'
if (_shouldAnimate && avatarInView && !_shouldSkipRender) {
{
PerformanceTimer perfTimer("skeleton");
_skeletonModel->getRig()->copyJointsFromJointData(_jointData);
@ -725,7 +727,7 @@ glm::vec3 Avatar::getDisplayNamePosition() const {
glm::vec3 bodyUpDirection = getBodyUpDirection();
DEBUG_VALUE("bodyUpDirection =", bodyUpDirection);
if (getSkeletonModel()->getNeckPosition(namePosition)) {
if (_skeletonModel->getNeckPosition(namePosition)) {
float headHeight = getHeadHeight();
DEBUG_VALUE("namePosition =", namePosition);
DEBUG_VALUE("headHeight =", headHeight);
@ -1244,8 +1246,8 @@ glm::vec3 Avatar::getUncachedLeftPalmPosition() const {
return leftPalmPosition;
}
// avatar didn't have a LeftHandMiddle1 joint, fall back on this:
getSkeletonModel()->getJointRotationInWorldFrame(getSkeletonModel()->getLeftHandJointIndex(), leftPalmRotation);
getSkeletonModel()->getLeftHandPosition(leftPalmPosition);
_skeletonModel->getJointRotationInWorldFrame(_skeletonModel->getLeftHandJointIndex(), leftPalmRotation);
_skeletonModel->getLeftHandPosition(leftPalmPosition);
leftPalmPosition += HAND_TO_PALM_OFFSET * glm::inverse(leftPalmRotation);
return leftPalmPosition;
}
@ -1253,7 +1255,7 @@ glm::vec3 Avatar::getUncachedLeftPalmPosition() const {
glm::quat Avatar::getUncachedLeftPalmRotation() const {
assert(QThread::currentThread() == thread()); // main thread access only
glm::quat leftPalmRotation;
getSkeletonModel()->getJointRotationInWorldFrame(getSkeletonModel()->getLeftHandJointIndex(), leftPalmRotation);
_skeletonModel->getJointRotationInWorldFrame(_skeletonModel->getLeftHandJointIndex(), leftPalmRotation);
return leftPalmRotation;
}
@ -1265,8 +1267,8 @@ glm::vec3 Avatar::getUncachedRightPalmPosition() const {
return rightPalmPosition;
}
// avatar didn't have a RightHandMiddle1 joint, fall back on this:
getSkeletonModel()->getJointRotationInWorldFrame(getSkeletonModel()->getRightHandJointIndex(), rightPalmRotation);
getSkeletonModel()->getRightHandPosition(rightPalmPosition);
_skeletonModel->getJointRotationInWorldFrame(_skeletonModel->getRightHandJointIndex(), rightPalmRotation);
_skeletonModel->getRightHandPosition(rightPalmPosition);
rightPalmPosition += HAND_TO_PALM_OFFSET * glm::inverse(rightPalmRotation);
return rightPalmPosition;
}
@ -1274,7 +1276,7 @@ glm::vec3 Avatar::getUncachedRightPalmPosition() const {
glm::quat Avatar::getUncachedRightPalmRotation() const {
assert(QThread::currentThread() == thread()); // main thread access only
glm::quat rightPalmRotation;
getSkeletonModel()->getJointRotationInWorldFrame(getSkeletonModel()->getRightHandJointIndex(), rightPalmRotation);
_skeletonModel->getJointRotationInWorldFrame(_skeletonModel->getRightHandJointIndex(), rightPalmRotation);
return rightPalmRotation;
}

View file

@ -170,9 +170,9 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
// simulate avatar fades
simulateAvatarFades(deltaTime);
SAMPLE_PROFILE_COUNTER(0.1f, simulation_avatar, "NumAvatarsPerSec",
PROFILE_COUNTER(simulation_avatar, "NumAvatarsPerSec",
{ { "NumAvatarsPerSec", (float)(size() * USECS_PER_SECOND) / (float)(usecTimestampNow() - start) } });
SAMPLE_PROFILE_COUNTER(0.1f, simulation_avatar, "NumJointsPerSec", { { "NumJointsPerSec", Avatar::getNumJointsProcessedPerSecond() } });
PROFILE_COUNTER(simulation_avatar, "NumJointsPerSec", { { "NumJointsPerSec", Avatar::getNumJointsProcessedPerSecond() } });
}
void AvatarManager::postUpdate(float deltaTime) {

View file

@ -5,15 +5,18 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "TestScriptingInterface.h"
#include <QtCore/QCoreApplication>
#include <QtCore/QLoggingCategory>
#include <QtCore/QThread>
#include <DependencyManager.h>
#include <Trace.h>
#include <StatTracker.h>
#include <OffscreenUi.h>
#include "Application.h"
TestScriptingInterface* TestScriptingInterface::getInstance() {
static TestScriptingInterface sharedInstance;
@ -25,12 +28,47 @@ void TestScriptingInterface::quit() {
}
void TestScriptingInterface::waitForTextureIdle() {
waitForCondition(0, []()->bool {
return (0 == gpu::Context::getTextureGPUTransferCount());
});
}
void TestScriptingInterface::waitForDownloadIdle() {
waitForCondition(0, []()->bool {
return (0 == ResourceCache::getLoadingRequestCount()) && (0 == ResourceCache::getPendingRequestCount());
});
}
void TestScriptingInterface::waitForProcessingIdle() {
auto statTracker = DependencyManager::get<StatTracker>();
waitForCondition(0, [statTracker]()->bool {
return (0 == statTracker->getStat("Processing").toInt() && 0 == statTracker->getStat("PendingProcessing").toInt());
});
}
void TestScriptingInterface::waitIdle() {
// Initial wait for some incoming work
QThread::sleep(1);
waitForDownloadIdle();
waitForProcessingIdle();
waitForTextureIdle();
}
bool TestScriptingInterface::loadTestScene(QString scene) {
static const QString TEST_ROOT = "https://raw.githubusercontent.com/highfidelity/hifi_tests/master/";
static const QString TEST_BINARY_ROOT = "https://hifi-public.s3.amazonaws.com/test_scene_data/";
static const QString TEST_SCRIPTS_ROOT = TEST_ROOT + "scripts/";
static const QString TEST_SCENES_ROOT = TEST_ROOT + "scenes/";
return DependencyManager::get<OffscreenUi>()->returnFromUiThread([scene]()->QVariant {
ResourceManager::setUrlPrefixOverride("atp:/", TEST_BINARY_ROOT + scene + ".atp/");
auto tree = qApp->getEntities()->getTree();
auto treeIsClient = tree->getIsClient();
// Force the tree to accept the load regardless of permissions
tree->setIsClient(false);
auto result = tree->readFromURL(TEST_SCENES_ROOT + scene + ".json");
tree->setIsClient(treeIsClient);
return result;
}).toBool();
}
bool TestScriptingInterface::startTracing(QString logrules) {
@ -55,4 +93,35 @@ bool TestScriptingInterface::stopTracing(QString filename) {
tracer->stopTracing();
tracer->serialize(filename);
return true;
}
}
void TestScriptingInterface::clear() {
qApp->postLambdaEvent([] {
qApp->getEntities()->clear();
});
}
bool TestScriptingInterface::waitForConnection(qint64 maxWaitMs) {
// Wait for any previous connection to die
QThread::sleep(1);
return waitForCondition(maxWaitMs, []()->bool {
return DependencyManager::get<NodeList>()->getDomainHandler().isConnected();
});
}
void TestScriptingInterface::wait(int milliseconds) {
QThread::msleep(milliseconds);
}
bool TestScriptingInterface::waitForCondition(qint64 maxWaitMs, std::function<bool()> condition) {
QElapsedTimer elapsed;
elapsed.start();
while (!condition()) {
if (maxWaitMs > 0 && elapsed.elapsed() > maxWaitMs) {
return false;
}
QThread::msleep(1);
}
return condition();
}

View file

@ -10,6 +10,7 @@
#ifndef hifi_TestScriptingInterface_h
#define hifi_TestScriptingInterface_h
#include <functional>
#include <QtCore/QObject>
class TestScriptingInterface : public QObject {
@ -34,10 +35,24 @@ public slots:
void waitForDownloadIdle();
/**jsdoc
* Waits for all pending downloads and texture transfers to be complete
* Waits for all file parsing operations to be complete
*/
void waitForProcessingIdle();
/**jsdoc
* Waits for all pending downloads, parsing and texture transfers to be complete
*/
void waitIdle();
bool waitForConnection(qint64 maxWaitMs = 10000);
void wait(int milliseconds);
bool loadTestScene(QString sceneFile);
void clear();
/**jsdoc
* Start recording Chrome compatible tracing events
* logRules can be used to specify a set of logging category rules to limit what gets captured
@ -49,6 +64,9 @@ public slots:
* Using a filename with a .gz extension will automatically compress the output file
*/
bool stopTracing(QString filename);
private:
bool waitForCondition(qint64 maxWaitMs, std::function<bool()> condition);
};
#endif // hifi_TestScriptingInterface_h

View file

@ -91,7 +91,7 @@ void DiskCacheEditor::makeDialog() {
static const int REFRESH_INTERVAL = 100; // msec
_refreshTimer = new QTimer(_dialog);
_refreshTimer->setInterval(REFRESH_INTERVAL);
_refreshTimer->setInterval(REFRESH_INTERVAL); // Qt::CoarseTimer acceptable, no need for real time accuracy
_refreshTimer->setSingleShot(false);
QObject::connect(_refreshTimer.data(), &QTimer::timeout, this, &DiskCacheEditor::refresh);
_refreshTimer->start();

View file

@ -161,6 +161,7 @@ void Rig::destroyAnimGraph() {
_internalPoseSet._absolutePoses.clear();
_internalPoseSet._overridePoses.clear();
_internalPoseSet._overrideFlags.clear();
_numOverrides = 0;
}
void Rig::initJointStates(const FBXGeometry& geometry, const glm::mat4& modelOffset) {
@ -180,6 +181,7 @@ void Rig::initJointStates(const FBXGeometry& geometry, const glm::mat4& modelOff
_internalPoseSet._overrideFlags.clear();
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints(), false);
_numOverrides = 0;
buildAbsoluteRigPoses(_animSkeleton->getRelativeDefaultPoses(), _absoluteDefaultPoses);
@ -207,6 +209,7 @@ void Rig::reset(const FBXGeometry& geometry) {
_internalPoseSet._overrideFlags.clear();
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints(), false);
_numOverrides = 0;
buildAbsoluteRigPoses(_animSkeleton->getRelativeDefaultPoses(), _absoluteDefaultPoses);
@ -276,13 +279,17 @@ void Rig::setModelOffset(const glm::mat4& modelOffsetMat) {
void Rig::clearJointState(int index) {
if (isIndexValid(index)) {
_internalPoseSet._overrideFlags[index] = false;
if (_internalPoseSet._overrideFlags[index]) {
_internalPoseSet._overrideFlags[index] = false;
--_numOverrides;
}
_internalPoseSet._overridePoses[index] = _animSkeleton->getRelativeDefaultPose(index);
}
}
void Rig::clearJointStates() {
_internalPoseSet._overrideFlags.clear();
_numOverrides = 0;
if (_animSkeleton) {
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints());
_internalPoseSet._overridePoses = _animSkeleton->getRelativeDefaultPoses();
@ -291,7 +298,10 @@ void Rig::clearJointStates() {
void Rig::clearJointAnimationPriority(int index) {
if (isIndexValid(index)) {
_internalPoseSet._overrideFlags[index] = false;
if (_internalPoseSet._overrideFlags[index]) {
_internalPoseSet._overrideFlags[index] = false;
--_numOverrides;
}
_internalPoseSet._overridePoses[index] = _animSkeleton->getRelativeDefaultPose(index);
}
}
@ -320,7 +330,10 @@ void Rig::setJointTranslation(int index, bool valid, const glm::vec3& translatio
if (isIndexValid(index)) {
if (valid) {
assert(_internalPoseSet._overrideFlags.size() == _internalPoseSet._overridePoses.size());
_internalPoseSet._overrideFlags[index] = true;
if (!_internalPoseSet._overrideFlags[index]) {
_internalPoseSet._overrideFlags[index] = true;
++_numOverrides;
}
_internalPoseSet._overridePoses[index].trans = translation;
}
}
@ -329,7 +342,10 @@ void Rig::setJointTranslation(int index, bool valid, const glm::vec3& translatio
void Rig::setJointState(int index, bool valid, const glm::quat& rotation, const glm::vec3& translation, float priority) {
if (isIndexValid(index)) {
assert(_internalPoseSet._overrideFlags.size() == _internalPoseSet._overridePoses.size());
_internalPoseSet._overrideFlags[index] = true;
if (!_internalPoseSet._overrideFlags[index]) {
_internalPoseSet._overrideFlags[index] = true;
++_numOverrides;
}
_internalPoseSet._overridePoses[index].rot = rotation;
_internalPoseSet._overridePoses[index].trans = translation;
}
@ -339,7 +355,10 @@ void Rig::setJointRotation(int index, bool valid, const glm::quat& rotation, flo
if (isIndexValid(index)) {
if (valid) {
ASSERT(_internalPoseSet._overrideFlags.size() == _internalPoseSet._overridePoses.size());
_internalPoseSet._overrideFlags[index] = true;
if (!_internalPoseSet._overrideFlags[index]) {
_internalPoseSet._overrideFlags[index] = true;
++_numOverrides;
}
_internalPoseSet._overridePoses[index].rot = rotation;
}
}
@ -518,7 +537,7 @@ void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPos
// sine wave LFO var for testing.
static float t = 0.0f;
_animVars.set("sine", 2.0f * static_cast<float>(0.5 * sin(t) + 0.5));
_animVars.set("sine", 2.0f * 0.5f * sinf(t) + 0.5f);
float moveForwardAlpha = 0.0f;
float moveBackwardAlpha = 0.0f;
@ -884,10 +903,12 @@ void Rig::updateAnimationStateHandlers() { // called on avatar update thread (wh
void Rig::updateAnimations(float deltaTime, glm::mat4 rootTransform) {
PROFILE_RANGE_EX(simulation_animation, __FUNCTION__, 0xffff00ff, 0);
PerformanceTimer perfTimer("updateAnimations");
setModelOffset(rootTransform);
if (_animNode) {
PerformanceTimer perfTimer("handleTriggers");
updateAnimationStateHandlers();
_animVars.setRigToGeometryTransform(_rigToGeometryTransform);
@ -904,7 +925,6 @@ void Rig::updateAnimations(float deltaTime, glm::mat4 rootTransform) {
_animVars.setTrigger(trigger);
}
}
applyOverridePoses();
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
@ -1176,7 +1196,8 @@ bool Rig::getModelRegistrationPoint(glm::vec3& modelRegistrationPointOut) const
}
void Rig::applyOverridePoses() {
if (!_animSkeleton) {
PerformanceTimer perfTimer("override");
if (_numOverrides == 0 || !_animSkeleton) {
return;
}
@ -1192,28 +1213,24 @@ void Rig::applyOverridePoses() {
}
void Rig::buildAbsoluteRigPoses(const AnimPoseVec& relativePoses, AnimPoseVec& absolutePosesOut) {
PerformanceTimer perfTimer("buildAbsolute");
if (!_animSkeleton) {
return;
}
ASSERT(_animSkeleton->getNumJoints() == (int)relativePoses.size());
// flatten all poses out so they are absolute not relative
absolutePosesOut.resize(relativePoses.size());
AnimPose geometryToRigTransform(_geometryToRigTransform);
for (int i = 0; i < (int)relativePoses.size(); i++) {
int parentIndex = _animSkeleton->getParentIndex(i);
if (parentIndex == -1) {
absolutePosesOut[i] = relativePoses[i];
// transform all root absolute poses into rig space
absolutePosesOut[i] = geometryToRigTransform * relativePoses[i];
} else {
absolutePosesOut[i] = absolutePosesOut[parentIndex] * relativePoses[i];
}
}
// transform all absolute poses into rig space.
AnimPose geometryToRigTransform(_geometryToRigTransform);
for (int i = 0; i < (int)absolutePosesOut.size(); i++) {
absolutePosesOut[i] = geometryToRigTransform * absolutePosesOut[i];
}
}
glm::mat4 Rig::getJointTransform(int jointIndex) const {
@ -1251,62 +1268,36 @@ void Rig::copyJointsIntoJointData(QVector<JointData>& jointDataVec) const {
void Rig::copyJointsFromJointData(const QVector<JointData>& jointDataVec) {
PerformanceTimer perfTimer("copyJoints");
if (_animSkeleton && jointDataVec.size() == (int)_internalPoseSet._overrideFlags.size()) {
// transform all the default poses into rig space.
const AnimPose geometryToRigPose(_geometryToRigTransform);
std::vector<bool> overrideFlags(_internalPoseSet._overridePoses.size(), false);
// start with the default rotations in absolute rig frame
if (_animSkeleton && jointDataVec.size() == (int)_internalPoseSet._relativePoses.size()) {
// make a vector of rotations in absolute-geometry-frame
const AnimPoseVec& absoluteDefaultPoses = _animSkeleton->getAbsoluteDefaultPoses();
std::vector<glm::quat> rotations;
rotations.reserve(_animSkeleton->getAbsoluteDefaultPoses().size());
for (auto& pose : _animSkeleton->getAbsoluteDefaultPoses()) {
rotations.push_back(geometryToRigPose.rot * pose.rot);
}
// start translations in relative frame but scaled to meters.
std::vector<glm::vec3> translations;
translations.reserve(_animSkeleton->getRelativeDefaultPoses().size());
for (auto& pose : _animSkeleton->getRelativeDefaultPoses()) {
translations.push_back(_geometryOffset.scale * pose.trans);
}
ASSERT(overrideFlags.size() == rotations.size());
// copy over rotations from the jointDataVec, which is also in absolute rig frame
const glm::quat rigToGeometryRot(glmExtractRotation(_rigToGeometryTransform));
for (int i = 0; i < jointDataVec.size(); i++) {
if (isIndexValid(i)) {
const JointData& data = jointDataVec.at(i);
if (data.rotationSet) {
overrideFlags[i] = true;
rotations[i] = data.rotation;
}
if (data.translationSet) {
overrideFlags[i] = true;
translations[i] = data.translation;
}
const JointData& data = jointDataVec.at(i);
if (data.rotationSet) {
// JointData rotations are in absolute rig-frame so we rotate them to absolute geometry-frame
rotations.push_back(rigToGeometryRot * data.rotation);
} else {
rotations.push_back(absoluteDefaultPoses[i].rot);
}
}
ASSERT(_internalPoseSet._overrideFlags.size() == _internalPoseSet._overridePoses.size());
// convert resulting rotations into geometry space.
const glm::quat rigToGeometryRot(glmExtractRotation(_rigToGeometryTransform));
for (auto& rot : rotations) {
rot = rigToGeometryRot * rot;
}
// convert all rotations from absolute to parent relative.
// convert rotations from absolute to parent relative.
_animSkeleton->convertAbsoluteRotationsToRelative(rotations);
// copy the geometry space parent relative poses into _overridePoses
// store new relative poses
const AnimPoseVec& relativeDefaultPoses = _animSkeleton->getRelativeDefaultPoses();
for (int i = 0; i < jointDataVec.size(); i++) {
if (overrideFlags[i]) {
_internalPoseSet._overrideFlags[i] = true;
_internalPoseSet._overridePoses[i].scale = Vectors::ONE;
_internalPoseSet._overridePoses[i].rot = rotations[i];
// scale translations from meters back into geometry units.
_internalPoseSet._overridePoses[i].trans = _invGeometryOffset.scale * translations[i];
const JointData& data = jointDataVec.at(i);
_internalPoseSet._relativePoses[i].scale = Vectors::ONE;
_internalPoseSet._relativePoses[i].rot = rotations[i];
if (data.translationSet) {
// JointData translations are in scaled relative-frame so we scale back to regular relative-frame
_internalPoseSet._relativePoses[i].trans = _invGeometryOffset.scale * data.translation;
} else {
_internalPoseSet._relativePoses[i].trans = relativeDefaultPoses[i].trans;
}
}
}

View file

@ -311,6 +311,7 @@ protected:
std::map<QString, AnimNode::Pointer> _origRoleAnimations;
int32_t _numOverrides { 0 };
bool _lastEnableInverseKinematics { true };
bool _enableInverseKinematics { true };

View file

@ -16,6 +16,14 @@
#include "AudioHRTF.h"
#include "AudioHRTFData.h"
#if defined(_MSC_VER)
#define ALIGN32 __declspec(align(32))
#elif defined(__GNUC__)
#define ALIGN32 __attribute__((aligned(32)))
#else
#define ALIGN32
#endif
#ifndef MAX
#define MAX(a,b) (((a) > (b)) ? (a) : (b))
#endif
@ -30,7 +38,7 @@
// Transients in the time-varying Thiran allpass filter are eliminated by the initial delay.
// Valimaki, Laakso. "Elimination of Transients in Time-Varying Allpass Fractional Delay Filters"
//
static const float crossfadeTable[HRTF_BLOCK] = {
ALIGN32 static const float crossfadeTable[HRTF_BLOCK] = {
1.0000000000f, 1.0000000000f, 1.0000000000f, 1.0000000000f, 1.0000000000f, 1.0000000000f, 1.0000000000f, 1.0000000000f,
0.9999545513f, 0.9998182135f, 0.9995910114f, 0.9992729863f, 0.9988641959f, 0.9983647147f, 0.9977746334f, 0.9970940592f,
0.9963231160f, 0.9954619438f, 0.9945106993f, 0.9934695553f, 0.9923387012f, 0.9911183425f, 0.9898087010f, 0.9884100149f,
@ -192,25 +200,29 @@ static void FIR_1x4_SSE(float* src, float* dst0, float* dst1, float* dst2, float
for (int k = 0; k < HRTF_TAPS; k += 4) {
acc0 = _mm_add_ps(acc0, _mm_mul_ps(_mm_load1_ps(&coef0[-k-0]), _mm_loadu_ps(&ps[k+0])));
acc1 = _mm_add_ps(acc1, _mm_mul_ps(_mm_load1_ps(&coef1[-k-0]), _mm_loadu_ps(&ps[k+0])));
acc2 = _mm_add_ps(acc2, _mm_mul_ps(_mm_load1_ps(&coef2[-k-0]), _mm_loadu_ps(&ps[k+0])));
acc3 = _mm_add_ps(acc3, _mm_mul_ps(_mm_load1_ps(&coef3[-k-0]), _mm_loadu_ps(&ps[k+0])));
__m128 x0 = _mm_loadu_ps(&ps[k+0]);
acc0 = _mm_add_ps(acc0, _mm_mul_ps(_mm_load1_ps(&coef0[-k-0]), x0));
acc1 = _mm_add_ps(acc1, _mm_mul_ps(_mm_load1_ps(&coef1[-k-0]), x0));
acc2 = _mm_add_ps(acc2, _mm_mul_ps(_mm_load1_ps(&coef2[-k-0]), x0));
acc3 = _mm_add_ps(acc3, _mm_mul_ps(_mm_load1_ps(&coef3[-k-0]), x0));
acc0 = _mm_add_ps(acc0, _mm_mul_ps(_mm_load1_ps(&coef0[-k-1]), _mm_loadu_ps(&ps[k+1])));
acc1 = _mm_add_ps(acc1, _mm_mul_ps(_mm_load1_ps(&coef1[-k-1]), _mm_loadu_ps(&ps[k+1])));
acc2 = _mm_add_ps(acc2, _mm_mul_ps(_mm_load1_ps(&coef2[-k-1]), _mm_loadu_ps(&ps[k+1])));
acc3 = _mm_add_ps(acc3, _mm_mul_ps(_mm_load1_ps(&coef3[-k-1]), _mm_loadu_ps(&ps[k+1])));
__m128 x1 = _mm_loadu_ps(&ps[k+1]);
acc0 = _mm_add_ps(acc0, _mm_mul_ps(_mm_load1_ps(&coef0[-k-1]), x1));
acc1 = _mm_add_ps(acc1, _mm_mul_ps(_mm_load1_ps(&coef1[-k-1]), x1));
acc2 = _mm_add_ps(acc2, _mm_mul_ps(_mm_load1_ps(&coef2[-k-1]), x1));
acc3 = _mm_add_ps(acc3, _mm_mul_ps(_mm_load1_ps(&coef3[-k-1]), x1));
acc0 = _mm_add_ps(acc0, _mm_mul_ps(_mm_load1_ps(&coef0[-k-2]), _mm_loadu_ps(&ps[k+2])));
acc1 = _mm_add_ps(acc1, _mm_mul_ps(_mm_load1_ps(&coef1[-k-2]), _mm_loadu_ps(&ps[k+2])));
acc2 = _mm_add_ps(acc2, _mm_mul_ps(_mm_load1_ps(&coef2[-k-2]), _mm_loadu_ps(&ps[k+2])));
acc3 = _mm_add_ps(acc3, _mm_mul_ps(_mm_load1_ps(&coef3[-k-2]), _mm_loadu_ps(&ps[k+2])));
__m128 x2 = _mm_loadu_ps(&ps[k+2]);
acc0 = _mm_add_ps(acc0, _mm_mul_ps(_mm_load1_ps(&coef0[-k-2]), x2));
acc1 = _mm_add_ps(acc1, _mm_mul_ps(_mm_load1_ps(&coef1[-k-2]), x2));
acc2 = _mm_add_ps(acc2, _mm_mul_ps(_mm_load1_ps(&coef2[-k-2]), x2));
acc3 = _mm_add_ps(acc3, _mm_mul_ps(_mm_load1_ps(&coef3[-k-2]), x2));
acc0 = _mm_add_ps(acc0, _mm_mul_ps(_mm_load1_ps(&coef0[-k-3]), _mm_loadu_ps(&ps[k+3])));
acc1 = _mm_add_ps(acc1, _mm_mul_ps(_mm_load1_ps(&coef1[-k-3]), _mm_loadu_ps(&ps[k+3])));
acc2 = _mm_add_ps(acc2, _mm_mul_ps(_mm_load1_ps(&coef2[-k-3]), _mm_loadu_ps(&ps[k+3])));
acc3 = _mm_add_ps(acc3, _mm_mul_ps(_mm_load1_ps(&coef3[-k-3]), _mm_loadu_ps(&ps[k+3])));
__m128 x3 = _mm_loadu_ps(&ps[k+3]);
acc0 = _mm_add_ps(acc0, _mm_mul_ps(_mm_load1_ps(&coef0[-k-3]), x3));
acc1 = _mm_add_ps(acc1, _mm_mul_ps(_mm_load1_ps(&coef1[-k-3]), x3));
acc2 = _mm_add_ps(acc2, _mm_mul_ps(_mm_load1_ps(&coef2[-k-3]), x3));
acc3 = _mm_add_ps(acc3, _mm_mul_ps(_mm_load1_ps(&coef3[-k-3]), x3));
}
_mm_storeu_ps(&dst0[i], acc0);
@ -226,11 +238,11 @@ static void FIR_1x4_SSE(float* src, float* dst0, float* dst1, float* dst2, float
#include "CPUDetect.h"
void FIR_1x4_AVX(float* src, float* dst0, float* dst1, float* dst2, float* dst3, float coef[4][HRTF_TAPS], int numFrames);
void FIR_1x4_AVX2(float* src, float* dst0, float* dst1, float* dst2, float* dst3, float coef[4][HRTF_TAPS], int numFrames);
static void FIR_1x4(float* src, float* dst0, float* dst1, float* dst2, float* dst3, float coef[4][HRTF_TAPS], int numFrames) {
static auto f = cpuSupportsAVX() ? FIR_1x4_AVX : FIR_1x4_SSE;
static auto f = cpuSupportsAVX2() ? FIR_1x4_AVX2 : FIR_1x4_SSE;
(*f)(src, dst0, dst1, dst2, dst3, coef, numFrames); // dispatch
}
@ -842,12 +854,12 @@ void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth,
assert(index < HRTF_TABLES);
assert(numFrames == HRTF_BLOCK);
float in[HRTF_TAPS + HRTF_BLOCK]; // mono
float firCoef[4][HRTF_TAPS]; // 4-channel
float firBuffer[4][HRTF_DELAY + HRTF_BLOCK]; // 4-channel
float bqCoef[5][8]; // 4-channel (interleaved)
float bqBuffer[4 * HRTF_BLOCK]; // 4-channel (interleaved)
int delay[4]; // 4-channel (interleaved)
ALIGN32 float in[HRTF_TAPS + HRTF_BLOCK]; // mono
ALIGN32 float firCoef[4][HRTF_TAPS]; // 4-channel
ALIGN32 float firBuffer[4][HRTF_DELAY + HRTF_BLOCK]; // 4-channel
ALIGN32 float bqCoef[5][8]; // 4-channel (interleaved)
ALIGN32 float bqBuffer[4 * HRTF_BLOCK]; // 4-channel (interleaved)
int delay[4]; // 4-channel (interleaved)
// to avoid polluting the cache, old filters are recomputed instead of stored
setFilters(firCoef, bqCoef, delay, index, _azimuthState, _distanceState, _gainState, L0);

View file

@ -30,6 +30,14 @@
// 6) Truncate filter length to 2.5ms using rectangular window with 8-tap Hanning taper
//
#if defined(_MSC_VER)
#define ALIGN32 __declspec(align(32))
#elif defined(__GNUC__)
#define ALIGN32 __attribute__((aligned(32)))
#else
#define ALIGN32
#endif
static const float itd_1002_table[HRTF_AZIMUTHS] = {
-0.07851f, 0.85414f, 1.77170f, 2.71137f, 3.71065f, 4.74907f, 5.79892f, 6.82396f,
7.82837f, 8.80796f, 9.75426f, 10.68332f, 11.59979f, 12.48520f, 13.36135f, 14.19234f,
@ -42,7 +50,7 @@ static const float itd_1002_table[HRTF_AZIMUTHS] = {
-8.39670f, -7.23606f, -6.09663f, -5.05593f, -4.06186f, -3.07465f, -2.06122f, -1.05417f,
};
static const float ir_1002_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1002_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
8.341559e-01f, 1.886116e-02f, 2.677664e-01f, -7.037183e-02f, -4.147236e-02f, -2.761588e-01f, 2.310035e-01f, -1.643133e-01f,
@ -1497,7 +1505,7 @@ static const float itd_1003_table[HRTF_AZIMUTHS] = {
-6.64380f, -5.73462f, -4.83364f, -3.97025f, -3.08925f, -2.16621f, -1.19364f, -0.20709f,
};
static const float ir_1003_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1003_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
9.266240e-01f, 1.260510e-01f, 5.051008e-02f, -3.536678e-01f, 2.462246e-02f, 4.465557e-02f, 6.813228e-02f, -6.063477e-02f,
@ -2952,7 +2960,7 @@ static const float itd_1004_table[HRTF_AZIMUTHS] = {
-7.55720f, -6.55578f, -5.59246f, -4.69657f, -3.80733f, -2.88567f, -1.90337f, -0.89923f,
};
static const float ir_1004_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1004_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.326633e-01f, 4.279429e-01f, -5.910516e-02f, -2.480760e-01f, -9.903029e-02f, 9.215562e-02f, -2.893536e-02f, 5.464364e-02f,
@ -4407,7 +4415,7 @@ static const float itd_1005_table[HRTF_AZIMUTHS] = {
-6.80079f, -6.03878f, -5.25100f, -4.34973f, -3.39268f, -2.41226f, -1.45444f, -0.50375f,
};
static const float ir_1005_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1005_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
8.515557e-01f, 1.208618e-01f, 3.238278e-01f, -3.605847e-01f, -3.354420e-02f, -1.829174e-01f, 2.309960e-01f, -1.744711e-01f,
@ -5862,7 +5870,7 @@ static const float itd_1007_table[HRTF_AZIMUTHS] = {
-7.68135f, -6.69801f, -5.72186f, -4.72708f, -3.74413f, -2.77373f, -1.79032f, -0.81823f,
};
static const float ir_1007_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1007_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
6.544936e-01f, 2.820574e-01f, 1.850652e-01f, -2.597811e-01f, -5.585250e-02f, -7.975905e-02f, 8.143960e-02f, -5.044548e-02f,
@ -7317,7 +7325,7 @@ static const float itd_1012_table[HRTF_AZIMUTHS] = {
-7.32159f, -6.30684f, -5.31969f, -4.40260f, -3.50567f, -2.60925f, -1.70893f, -0.80401f,
};
static const float ir_1012_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1012_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
8.505165e-01f, 9.074762e-02f, 3.296598e-01f, -5.213905e-01f, 1.348379e-01f, -1.828924e-01f, 1.400077e-01f, -4.071996e-02f,
@ -8772,7 +8780,7 @@ static const float itd_1014_table[HRTF_AZIMUTHS] = {
-7.51312f, -6.52705f, -5.56262f, -4.72113f, -3.90664f, -3.07768f, -2.22719f, -1.37514f,
};
static const float ir_1014_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1014_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
6.542071e-01f, 4.575563e-01f, 1.118072e-02f, -1.823464e-01f, -2.222339e-01f, 1.371357e-01f, 7.027919e-03f, -5.534852e-02f,
@ -10227,7 +10235,7 @@ static const float itd_1017_table[HRTF_AZIMUTHS] = {
-7.46925f, -6.49073f, -5.52501f, -4.62178f, -3.74041f, -2.86207f, -1.97362f, -1.07512f,
};
static const float ir_1017_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1017_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.470867e-01f, 2.686078e-01f, 2.097923e-01f, -2.935018e-01f, -8.687224e-02f, -4.547367e-02f, 6.920631e-03f, 3.752071e-02f,
@ -11682,7 +11690,7 @@ static const float itd_1020_table[HRTF_AZIMUTHS] = {
-8.28071f, -7.36311f, -6.43732f, -5.49298f, -4.53728f, -3.57601f, -2.59830f, -1.63297f,
};
static const float ir_1020_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1020_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
6.953847e-01f, 3.081256e-01f, 2.474324e-01f, -3.025226e-01f, -1.119181e-01f, -4.966299e-02f, 5.727889e-02f, 6.715016e-03f,
@ -13137,7 +13145,7 @@ static const float itd_1021_table[HRTF_AZIMUTHS] = {
-8.12772f, -7.17689f, -6.23068f, -5.27554f, -4.32391f, -3.38489f, -2.46445f, -1.54407f,
};
static const float ir_1021_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1021_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.807186e-01f, 3.835520e-01f, 1.208801e-01f, -4.044311e-01f, -5.188029e-02f, -7.750225e-02f, 1.739668e-01f, -6.599168e-02f,
@ -14592,7 +14600,7 @@ static const float itd_1022_table[HRTF_AZIMUTHS] = {
-7.19675f, -6.30334f, -5.39609f, -4.47018f, -3.53964f, -2.62393f, -1.75389f, -0.90222f,
};
static const float ir_1022_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1022_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.053226e-01f, 2.645844e-01f, 2.462055e-01f, -2.145682e-01f, -1.333283e-01f, -1.751403e-01f, 2.721890e-01f, -1.743790e-01f,
@ -16047,7 +16055,7 @@ static const float itd_1026_table[HRTF_AZIMUTHS] = {
-7.45209f, -6.46598f, -5.49746f, -4.54220f, -3.60610f, -2.68084f, -1.74087f, -0.80841f,
};
static const float ir_1026_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1026_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.150396e-01f, 3.144234e-01f, 9.132840e-02f, -2.128668e-01f, -1.899010e-01f, 1.362356e-01f, -4.105226e-02f, 4.896281e-02f,
@ -17502,7 +17510,7 @@ static const float itd_1028_table[HRTF_AZIMUTHS] = {
-7.80099f, -6.89255f, -5.95721f, -5.04107f, -4.11968f, -3.20233f, -2.33316f, -1.46289f,
};
static const float ir_1028_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1028_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
9.491360e-01f, 2.952796e-01f, -1.585342e-01f, -3.497386e-01f, 1.204260e-01f, -4.886012e-02f, 5.238760e-02f, -8.209077e-03f,
@ -18957,7 +18965,7 @@ static const float itd_1038_table[HRTF_AZIMUTHS] = {
-6.69661f, -5.65906f, -4.62851f, -3.63493f, -2.66802f, -1.71997f, -0.76853f, 0.18497f,
};
static const float ir_1038_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1038_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
9.325991e-01f, 1.817283e-01f, 5.397613e-02f, -4.121773e-01f, -7.921759e-03f, -4.009945e-02f, 1.499187e-01f, -1.838252e-02f,
@ -20412,7 +20420,7 @@ static const float itd_1041_table[HRTF_AZIMUTHS] = {
-7.03257f, -6.07458f, -5.13664f, -4.24453f, -3.37177f, -2.49083f, -1.55807f, -0.62014f,
};
static const float ir_1041_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1041_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.012368e-01f, 2.006662e-01f, 3.173636e-01f, -2.865733e-01f, 1.345042e-01f, -5.030394e-01f, 3.717757e-01f, -1.138039e-01f,
@ -21867,7 +21875,7 @@ static const float itd_1042_table[HRTF_AZIMUTHS] = {
-7.79822f, -6.84403f, -5.88862f, -4.94525f, -3.99704f, -3.03547f, -2.06207f, -1.07916f,
};
static const float ir_1042_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1042_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
9.114429e-01f, 2.201994e-03f, 3.703525e-01f, -4.825957e-01f, 1.210277e-01f, -2.471091e-01f, 1.766662e-01f, -5.840113e-03f,
@ -23322,7 +23330,7 @@ static const float itd_1043_table[HRTF_AZIMUTHS] = {
-6.81973f, -5.86664f, -4.92096f, -3.99232f, -3.07973f, -2.16321f, -1.20142f, -0.22538f,
};
static const float ir_1043_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1043_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.339447e-01f, 1.339343e-01f, 4.031645e-01f, -4.891909e-01f, 8.751389e-02f, -2.110783e-01f, 2.573841e-01f, -1.050324e-01f,
@ -24777,7 +24785,7 @@ static const float itd_1044_table[HRTF_AZIMUTHS] = {
-7.31965f, -6.37963f, -5.45379f, -4.54748f, -3.59370f, -2.59525f, -1.67705f, -0.73882f,
};
static const float ir_1044_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1044_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.028871e-01f, 2.381998e-01f, 4.686725e-01f, -5.412304e-01f, 1.262568e-01f, -3.198619e-01f, 1.963468e-01f, -4.016186e-02f,
@ -26232,7 +26240,7 @@ static const float itd_1047_table[HRTF_AZIMUTHS] = {
-9.01225f, -7.93667f, -6.85884f, -5.78919f, -4.72064f, -3.66640f, -2.66295f, -1.65780f,
};
static const float ir_1047_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1047_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
7.788578e-01f, 1.598904e-01f, 2.366520e-01f, -3.524184e-01f, -8.784474e-03f, -5.144472e-02f, 8.679429e-02f, -1.634258e-02f,
@ -27687,7 +27695,7 @@ static const float itd_1048_table[HRTF_AZIMUTHS] = {
-7.15985f, -6.30472f, -5.41513f, -4.54994f, -3.62385f, -2.66142f, -1.79111f, -0.94033f,
};
static const float ir_1048_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1048_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
8.865287e-01f, 2.972076e-01f, -1.305391e-01f, -1.213860e-01f, -1.948535e-01f, 1.458427e-01f, -8.912857e-02f, 9.493978e-02f,
@ -29142,7 +29150,7 @@ static const float itd_1050_table[HRTF_AZIMUTHS] = {
-6.52690f, -5.58085f, -4.64474f, -3.71658f, -2.80444f, -1.92096f, -1.07543f, -0.23450f,
};
static const float ir_1050_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1050_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
9.005889e-01f, -6.452200e-02f, 3.675525e-01f, -4.309962e-01f, 7.086621e-02f, -9.161573e-02f, -4.290351e-02f, 9.057393e-02f,
@ -30597,7 +30605,7 @@ static const float itd_1052_table[HRTF_AZIMUTHS] = {
-6.50194f, -5.61262f, -4.72534f, -3.84869f, -2.97504f, -2.10269f, -1.23783f, -0.36766f,
};
static const float ir_1052_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1052_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
6.650009e-01f, 3.507944e-01f, -3.274164e-02f, -1.830690e-01f, -7.720853e-02f, 1.030789e-01f, 3.877069e-02f, -5.674440e-02f,
@ -32052,7 +32060,7 @@ static const float itd_1054_table[HRTF_AZIMUTHS] = {
-7.35642f, -6.36606f, -5.37262f, -4.40394f, -3.44967f, -2.51333f, -1.59834f, -0.68300f,
};
static const float ir_1054_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1054_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
8.629450e-01f, 1.677356e-01f, 1.467365e-01f, -3.248726e-01f, -5.105235e-02f, -5.031096e-02f, 1.796471e-01f, -1.298094e-01f,
@ -33507,7 +33515,7 @@ static const float itd_1056_table[HRTF_AZIMUTHS] = {
-6.99437f, -5.82430f, -4.73408f, -3.76713f, -2.88870f, -2.05251f, -1.18172f, -0.32736f,
};
static const float ir_1056_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1056_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
8.031418e-01f, 2.411323e-01f, 1.417951e-01f, -2.476192e-01f, -1.076012e-01f, 1.009190e-01f, 7.761394e-02f, -1.250722e-01f,
@ -34962,7 +34970,7 @@ static const float itd_1058_table[HRTF_AZIMUTHS] = {
-7.78555f, -6.81447f, -5.85685f, -4.89466f, -3.93902f, -2.98660f, -2.01925f, -1.05758f,
};
static const float ir_1058_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
ALIGN32 static const float ir_1058_table[HRTF_AZIMUTHS][2][HRTF_TAPS] = {
// azimuth = 0
{{
9.307292e-01f, 5.592706e-02f, 2.567367e-01f, -4.525413e-01f, 1.378666e-01f, -2.503950e-01f, 1.983286e-01f, 5.925522e-03f,

View file

@ -211,6 +211,49 @@ static inline int32_t peaklog2(float* input0, float* input1) {
return (e << LOG2_FRACBITS) - (c2 >> 3);
}
//
// Peak detection and -log2(x) for float input (quad)
// x < 2^(31-LOG2_HEADROOM) returns 0x7fffffff
// x > 2^LOG2_HEADROOM undefined
//
static inline int32_t peaklog2(float* input0, float* input1, float* input2, float* input3) {
// float as integer bits
int32_t u0 = *(int32_t*)input0;
int32_t u1 = *(int32_t*)input1;
int32_t u2 = *(int32_t*)input2;
int32_t u3 = *(int32_t*)input3;
// max absolute value
u0 &= IEEE754_FABS_MASK;
u1 &= IEEE754_FABS_MASK;
u2 &= IEEE754_FABS_MASK;
u3 &= IEEE754_FABS_MASK;
int32_t peak = MAX(MAX(u0, u1), MAX(u2, u3));
// split into e and x - 1.0
int32_t e = IEEE754_EXPN_BIAS - (peak >> IEEE754_MANT_BITS) + LOG2_HEADROOM;
int32_t x = (peak << (31 - IEEE754_MANT_BITS)) & 0x7fffffff;
// saturate
if (e > 31) {
return 0x7fffffff;
}
int k = x >> (31 - LOG2_TABBITS);
// polynomial for log2(1+x) over x=[0,1]
int32_t c0 = log2Table[k][0];
int32_t c1 = log2Table[k][1];
int32_t c2 = log2Table[k][2];
c1 += MULHI(c0, x);
c2 += MULHI(c1, x);
// reconstruct result in Q26
return (e << LOG2_FRACBITS) - (c2 >> 3);
}
//
// Compute exp2(-x) for x=[0,32] in Q26, result in Q31
// x < 0 undefined
@ -258,7 +301,7 @@ class PeakFilterT {
static_assert((CIC1 - 1) + (CIC2 - 1) == (N - 1), "Total CIC delay must be N-1");
int32_t _buffer[2*N] = {}; // shared FIFO
int _index = 0;
size_t _index = 0;
int32_t _acc1 = 0; // CIC1 integrator
int32_t _acc2 = 0; // CIC2 integrator
@ -267,21 +310,21 @@ public:
PeakFilterT() {
// fill history
for (int n = 0; n < N-1; n++) {
for (size_t n = 0; n < N-1; n++) {
process(0x7fffffff);
}
}
int32_t process(int32_t x) {
const int MASK = 2*N - 1; // buffer wrap
int i = _index;
const size_t MASK = 2*N - 1; // buffer wrap
size_t i = _index;
// Fast peak-hold using a running-min filter. Finds the peak (min) value
// in the sliding window of N-1 samples, using only log2(N) comparisons.
// Hold time of N-1 samples exactly cancels the step response of FIR filter.
for (int n = 1; n < N; n <<= 1) {
for (size_t n = 1; n < N; n <<= 1) {
_buffer[i] = x;
i = (i + n) & MASK;
@ -329,13 +372,13 @@ class MonoDelay {
static_assert((N & (N - 1)) == 0, "N must be a power of 2");
float _buffer[N] = {};
int _index = 0;
size_t _index = 0;
public:
void process(float& x) {
const int MASK = N - 1; // buffer wrap
int i = _index;
const size_t MASK = N - 1; // buffer wrap
size_t i = _index;
_buffer[i] = x;
@ -356,13 +399,13 @@ class StereoDelay {
static_assert((N & (N - 1)) == 0, "N must be a power of 2");
float _buffer[2*N] = {};
int _index = 0;
size_t _index = 0;
public:
void process(float& x0, float& x1) {
const int MASK = 2*N - 1; // buffer wrap
int i = _index;
const size_t MASK = 2*N - 1; // buffer wrap
size_t i = _index;
_buffer[i+0] = x0;
_buffer[i+1] = x1;
@ -376,6 +419,39 @@ public:
}
};
//
// N-1 sample delay (quad)
//
template<int N>
class QuadDelay {
static_assert((N & (N - 1)) == 0, "N must be a power of 2");
float _buffer[4*N] = {};
size_t _index = 0;
public:
void process(float& x0, float& x1, float& x2, float& x3) {
const size_t MASK = 4*N - 1; // buffer wrap
size_t i = _index;
_buffer[i+0] = x0;
_buffer[i+1] = x1;
_buffer[i+2] = x2;
_buffer[i+3] = x3;
i = (i + 4*(N - 1)) & MASK;
x0 = _buffer[i+0];
x1 = _buffer[i+1];
x2 = _buffer[i+2];
x3 = _buffer[i+3];
_index = i;
}
};
//
// Limiter (common)
//
@ -428,7 +504,7 @@ LimiterImpl::LimiterImpl(int sampleRate) {
//
void LimiterImpl::setThreshold(float threshold) {
const double OUT_CEILING = -0.3;
const double OUT_CEILING = -0.3; // cannot be 0.0, due to dither
const double Q31_TO_Q15 = 32768 / 2147483648.0;
// limiter threshold = -48dB to 0dB
@ -537,12 +613,12 @@ int32_t LimiterImpl::envelope(int32_t attn) {
// arc = (attn-rms)*6/attn for attn = 1dB to 6dB
// arc = (attn-rms)*6/6 for attn > 6dB
int bits = MIN(attn >> 20, 0x3f); // saturate 1/attn at 6dB
_arc = MAX(attn - _rms, 0); // peak/rms = (attn-rms)
_arc = MULHI(_arc, invTable[bits]); // normalized peak/rms = (attn-rms)/attn
_arc = MIN(_arc, NARC - 1); // saturate at 6dB
size_t bits = MIN(attn >> 20, 0x3f); // saturate 1/attn at 6dB
_arc = MAX(attn - _rms, 0); // peak/rms = (attn-rms)
_arc = MULHI(_arc, invTable[bits]); // normalized peak/rms = (attn-rms)/attn
_arc = MIN(_arc, NARC - 1); // saturate at 6dB
_arcRelease = 0x7fffffff; // reset release
_arcRelease = 0x7fffffff; // reset release
}
_attn = attn;
@ -571,8 +647,8 @@ public:
};
template<int N>
void LimiterMono<N>::process(float* input, int16_t* output, int numFrames)
{
void LimiterMono<N>::process(float* input, int16_t* output, int numFrames) {
for (int n = 0; n < numFrames; n++) {
// peak detect and convert to log2 domain
@ -623,8 +699,8 @@ public:
};
template<int N>
void LimiterStereo<N>::process(float* input, int16_t* output, int numFrames)
{
void LimiterStereo<N>::process(float* input, int16_t* output, int numFrames) {
for (int n = 0; n < numFrames; n++) {
// peak detect and convert to log2 domain
@ -663,6 +739,71 @@ void LimiterStereo<N>::process(float* input, int16_t* output, int numFrames)
}
}
//
// Limiter (quad)
//
template<int N>
class LimiterQuad : public LimiterImpl {
PeakFilter<N> _filter;
QuadDelay<N> _delay;
public:
LimiterQuad(int sampleRate) : LimiterImpl(sampleRate) {}
// interleaved quad input/output
void process(float* input, int16_t* output, int numFrames) override;
};
template<int N>
void LimiterQuad<N>::process(float* input, int16_t* output, int numFrames) {
for (int n = 0; n < numFrames; n++) {
// peak detect and convert to log2 domain
int32_t peak = peaklog2(&input[4*n+0], &input[4*n+1], &input[4*n+2], &input[4*n+3]);
// compute limiter attenuation
int32_t attn = MAX(_threshold - peak, 0);
// apply envelope
attn = envelope(attn);
// convert from log2 domain
attn = fixexp2(attn);
// lowpass filter
attn = _filter.process(attn);
float gain = attn * _outGain;
// delay audio
float x0 = input[4*n+0];
float x1 = input[4*n+1];
float x2 = input[4*n+2];
float x3 = input[4*n+3];
_delay.process(x0, x1, x2, x3);
// apply gain
x0 *= gain;
x1 *= gain;
x2 *= gain;
x3 *= gain;
// apply dither
float d = dither();
x0 += d;
x1 += d;
x2 += d;
x3 += d;
// store 16-bit output
output[4*n+0] = (int16_t)floatToInt(x0);
output[4*n+1] = (int16_t)floatToInt(x1);
output[4*n+2] = (int16_t)floatToInt(x2);
output[4*n+3] = (int16_t)floatToInt(x3);
}
}
//
// Public API
//
@ -695,6 +836,19 @@ AudioLimiter::AudioLimiter(int sampleRate, int numChannels) {
_impl = new LimiterStereo<128>(sampleRate);
}
} else if (numChannels == 4) {
// ~1.5ms lookahead for all rates
if (sampleRate < 16000) {
_impl = new LimiterQuad<16>(sampleRate);
} else if (sampleRate < 32000) {
_impl = new LimiterQuad<32>(sampleRate);
} else if (sampleRate < 64000) {
_impl = new LimiterQuad<64>(sampleRate);
} else {
_impl = new LimiterQuad<128>(sampleRate);
}
} else {
assert(0); // unsupported
}

View file

@ -99,18 +99,22 @@ static void cubicInterpolation(const float* input, float* output, int inputSize,
}
}
int AudioSRC::createRationalFilter(int upFactor, int downFactor, float gain) {
int numTaps = PROTOTYPE_TAPS;
int AudioSRC::createRationalFilter(int upFactor, int downFactor, float gain, Quality quality) {
int prototypeTaps = prototypeFilterTable[quality].taps;
int prototypeCoefs = prototypeFilterTable[quality].coefs;
const float* prototypeFilter = prototypeFilterTable[quality].filter;
int numTaps = prototypeTaps;
int numPhases = upFactor;
int numCoefs = numTaps * numPhases;
int oldCoefs = numCoefs;
int prototypeCoefs = PROTOTYPE_TAPS * PROTOTYPE_PHASES;
//
// When downsampling, we can lower the filter cutoff by downFactor/upFactor using the
// time-scaling property of the Fourier transform. The gain is adjusted accordingly.
//
if (downFactor > upFactor) {
int oldCoefs = numCoefs;
numCoefs = ((int64_t)oldCoefs * downFactor) / upFactor;
numTaps = (numCoefs + upFactor - 1) / upFactor;
gain *= (float)oldCoefs / numCoefs;
@ -149,18 +153,22 @@ int AudioSRC::createRationalFilter(int upFactor, int downFactor, float gain) {
return numTaps;
}
int AudioSRC::createIrrationalFilter(int upFactor, int downFactor, float gain) {
int numTaps = PROTOTYPE_TAPS;
int AudioSRC::createIrrationalFilter(int upFactor, int downFactor, float gain, Quality quality) {
int prototypeTaps = prototypeFilterTable[quality].taps;
int prototypeCoefs = prototypeFilterTable[quality].coefs;
const float* prototypeFilter = prototypeFilterTable[quality].filter;
int numTaps = prototypeTaps;
int numPhases = upFactor;
int numCoefs = numTaps * numPhases;
int oldCoefs = numCoefs;
int prototypeCoefs = PROTOTYPE_TAPS * PROTOTYPE_PHASES;
//
// When downsampling, we can lower the filter cutoff by downFactor/upFactor using the
// time-scaling property of the Fourier transform. The gain is adjusted accordingly.
//
if (downFactor > upFactor) {
int oldCoefs = numCoefs;
numCoefs = ((int64_t)oldCoefs * downFactor) / upFactor;
numTaps = (numCoefs + upFactor - 1) / upFactor;
gain *= (float)oldCoefs / numCoefs;
@ -1405,7 +1413,8 @@ int AudioSRC::render(float** inputs, float** outputs, int inputFrames) {
return outputFrames;
}
AudioSRC::AudioSRC(int inputSampleRate, int outputSampleRate, int numChannels) {
AudioSRC::AudioSRC(int inputSampleRate, int outputSampleRate, int numChannels, Quality quality) {
assert(inputSampleRate > 0);
assert(outputSampleRate > 0);
assert(numChannels > 0);
@ -1433,9 +1442,9 @@ AudioSRC::AudioSRC(int inputSampleRate, int outputSampleRate, int numChannels) {
// create the polyphase filter
if (_step == 0) {
_numTaps = createRationalFilter(_upFactor, _downFactor, 1.0f);
_numTaps = createRationalFilter(_upFactor, _downFactor, 1.0f, quality);
} else {
_numTaps = createIrrationalFilter(_upFactor, _downFactor, 1.0f);
_numTaps = createIrrationalFilter(_upFactor, _downFactor, 1.0f, quality);
}
//printf("up=%d down=%.3f taps=%d\n", _upFactor, _downFactor + (LO32(_step)<<SRC_PHASEBITS) * Q32_TO_FLOAT, _numTaps);

View file

@ -17,7 +17,7 @@
static const int SRC_MAX_CHANNELS = 4;
// polyphase filter
static const int SRC_PHASEBITS = 8;
static const int SRC_PHASEBITS = 9;
static const int SRC_PHASES = (1 << SRC_PHASEBITS);
static const int SRC_FRACBITS = 32 - SRC_PHASEBITS;
static const uint32_t SRC_FRACMASK = (1 << SRC_FRACBITS) - 1;
@ -31,7 +31,13 @@ static const int SRC_BLOCK = 256;
class AudioSRC {
public:
AudioSRC(int inputSampleRate, int outputSampleRate, int numChannels);
enum Quality {
LOW_QUALITY,
MEDIUM_QUALITY,
HIGH_QUALITY
};
AudioSRC(int inputSampleRate, int outputSampleRate, int numChannels, Quality quality = MEDIUM_QUALITY);
~AudioSRC();
// deinterleaved float input/output (native format)
@ -70,8 +76,8 @@ private:
int64_t _offset;
int64_t _step;
int createRationalFilter(int upFactor, int downFactor, float gain);
int createIrrationalFilter(int upFactor, int downFactor, float gain);
int createRationalFilter(int upFactor, int downFactor, float gain, Quality quality);
int createIrrationalFilter(int upFactor, int downFactor, float gain, Quality quality);
int multirateFilter1(const float* input0, float* output0, int inputFrames);
int multirateFilter2(const float* input0, const float* input1, float* output0, float* output1, int inputFrames);

File diff suppressed because it is too large Load diff

View file

@ -1,96 +0,0 @@
//
// AudioHRTF_avx.cpp
// libraries/audio/src/avx
//
// Created by Ken Cooke on 1/17/16.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#if defined(_M_IX86) || defined(_M_X64) || defined(__i386__) || defined(__x86_64__)
#include <assert.h>
#include <immintrin.h>
#include "../AudioHRTF.h"
#ifndef __AVX__
#error Must be compiled with /arch:AVX or -mavx.
#endif
// 1 channel input, 4 channel output
void FIR_1x4_AVX(float* src, float* dst0, float* dst1, float* dst2, float* dst3, float coef[4][HRTF_TAPS], int numFrames) {
float* coef0 = coef[0] + HRTF_TAPS - 1; // process backwards
float* coef1 = coef[1] + HRTF_TAPS - 1;
float* coef2 = coef[2] + HRTF_TAPS - 1;
float* coef3 = coef[3] + HRTF_TAPS - 1;
assert(numFrames % 8 == 0);
for (int i = 0; i < numFrames; i += 8) {
__m256 acc0 = _mm256_setzero_ps();
__m256 acc1 = _mm256_setzero_ps();
__m256 acc2 = _mm256_setzero_ps();
__m256 acc3 = _mm256_setzero_ps();
float* ps = &src[i - HRTF_TAPS + 1]; // process forwards
assert(HRTF_TAPS % 8 == 0);
for (int k = 0; k < HRTF_TAPS; k += 8) {
acc0 = _mm256_add_ps(acc0, _mm256_mul_ps(_mm256_broadcast_ss(&coef0[-k-0]), _mm256_loadu_ps(&ps[k+0])));
acc1 = _mm256_add_ps(acc1, _mm256_mul_ps(_mm256_broadcast_ss(&coef1[-k-0]), _mm256_loadu_ps(&ps[k+0])));
acc2 = _mm256_add_ps(acc2, _mm256_mul_ps(_mm256_broadcast_ss(&coef2[-k-0]), _mm256_loadu_ps(&ps[k+0])));
acc3 = _mm256_add_ps(acc3, _mm256_mul_ps(_mm256_broadcast_ss(&coef3[-k-0]), _mm256_loadu_ps(&ps[k+0])));
acc0 = _mm256_add_ps(acc0, _mm256_mul_ps(_mm256_broadcast_ss(&coef0[-k-1]), _mm256_loadu_ps(&ps[k+1])));
acc1 = _mm256_add_ps(acc1, _mm256_mul_ps(_mm256_broadcast_ss(&coef1[-k-1]), _mm256_loadu_ps(&ps[k+1])));
acc2 = _mm256_add_ps(acc2, _mm256_mul_ps(_mm256_broadcast_ss(&coef2[-k-1]), _mm256_loadu_ps(&ps[k+1])));
acc3 = _mm256_add_ps(acc3, _mm256_mul_ps(_mm256_broadcast_ss(&coef3[-k-1]), _mm256_loadu_ps(&ps[k+1])));
acc0 = _mm256_add_ps(acc0, _mm256_mul_ps(_mm256_broadcast_ss(&coef0[-k-2]), _mm256_loadu_ps(&ps[k+2])));
acc1 = _mm256_add_ps(acc1, _mm256_mul_ps(_mm256_broadcast_ss(&coef1[-k-2]), _mm256_loadu_ps(&ps[k+2])));
acc2 = _mm256_add_ps(acc2, _mm256_mul_ps(_mm256_broadcast_ss(&coef2[-k-2]), _mm256_loadu_ps(&ps[k+2])));
acc3 = _mm256_add_ps(acc3, _mm256_mul_ps(_mm256_broadcast_ss(&coef3[-k-2]), _mm256_loadu_ps(&ps[k+2])));
acc0 = _mm256_add_ps(acc0, _mm256_mul_ps(_mm256_broadcast_ss(&coef0[-k-3]), _mm256_loadu_ps(&ps[k+3])));
acc1 = _mm256_add_ps(acc1, _mm256_mul_ps(_mm256_broadcast_ss(&coef1[-k-3]), _mm256_loadu_ps(&ps[k+3])));
acc2 = _mm256_add_ps(acc2, _mm256_mul_ps(_mm256_broadcast_ss(&coef2[-k-3]), _mm256_loadu_ps(&ps[k+3])));
acc3 = _mm256_add_ps(acc3, _mm256_mul_ps(_mm256_broadcast_ss(&coef3[-k-3]), _mm256_loadu_ps(&ps[k+3])));
acc0 = _mm256_add_ps(acc0, _mm256_mul_ps(_mm256_broadcast_ss(&coef0[-k-4]), _mm256_loadu_ps(&ps[k+4])));
acc1 = _mm256_add_ps(acc1, _mm256_mul_ps(_mm256_broadcast_ss(&coef1[-k-4]), _mm256_loadu_ps(&ps[k+4])));
acc2 = _mm256_add_ps(acc2, _mm256_mul_ps(_mm256_broadcast_ss(&coef2[-k-4]), _mm256_loadu_ps(&ps[k+4])));
acc3 = _mm256_add_ps(acc3, _mm256_mul_ps(_mm256_broadcast_ss(&coef3[-k-4]), _mm256_loadu_ps(&ps[k+4])));
acc0 = _mm256_add_ps(acc0, _mm256_mul_ps(_mm256_broadcast_ss(&coef0[-k-5]), _mm256_loadu_ps(&ps[k+5])));
acc1 = _mm256_add_ps(acc1, _mm256_mul_ps(_mm256_broadcast_ss(&coef1[-k-5]), _mm256_loadu_ps(&ps[k+5])));
acc2 = _mm256_add_ps(acc2, _mm256_mul_ps(_mm256_broadcast_ss(&coef2[-k-5]), _mm256_loadu_ps(&ps[k+5])));
acc3 = _mm256_add_ps(acc3, _mm256_mul_ps(_mm256_broadcast_ss(&coef3[-k-5]), _mm256_loadu_ps(&ps[k+5])));
acc0 = _mm256_add_ps(acc0, _mm256_mul_ps(_mm256_broadcast_ss(&coef0[-k-6]), _mm256_loadu_ps(&ps[k+6])));
acc1 = _mm256_add_ps(acc1, _mm256_mul_ps(_mm256_broadcast_ss(&coef1[-k-6]), _mm256_loadu_ps(&ps[k+6])));
acc2 = _mm256_add_ps(acc2, _mm256_mul_ps(_mm256_broadcast_ss(&coef2[-k-6]), _mm256_loadu_ps(&ps[k+6])));
acc3 = _mm256_add_ps(acc3, _mm256_mul_ps(_mm256_broadcast_ss(&coef3[-k-6]), _mm256_loadu_ps(&ps[k+6])));
acc0 = _mm256_add_ps(acc0, _mm256_mul_ps(_mm256_broadcast_ss(&coef0[-k-7]), _mm256_loadu_ps(&ps[k+7])));
acc1 = _mm256_add_ps(acc1, _mm256_mul_ps(_mm256_broadcast_ss(&coef1[-k-7]), _mm256_loadu_ps(&ps[k+7])));
acc2 = _mm256_add_ps(acc2, _mm256_mul_ps(_mm256_broadcast_ss(&coef2[-k-7]), _mm256_loadu_ps(&ps[k+7])));
acc3 = _mm256_add_ps(acc3, _mm256_mul_ps(_mm256_broadcast_ss(&coef3[-k-7]), _mm256_loadu_ps(&ps[k+7])));
}
_mm256_storeu_ps(&dst0[i], acc0);
_mm256_storeu_ps(&dst1[i], acc1);
_mm256_storeu_ps(&dst2[i], acc2);
_mm256_storeu_ps(&dst3[i], acc3);
}
_mm256_zeroupper();
}
#endif

View file

@ -0,0 +1,94 @@
//
// AudioHRTF_avx2.cpp
// libraries/audio/src
//
// Created by Ken Cooke on 1/17/16.
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#if defined(_M_IX86) || defined(_M_X64) || defined(__i386__) || defined(__x86_64__)
#include <assert.h>
#include <immintrin.h> // AVX2
#include "../AudioHRTF.h"
#ifndef __AVX2__
#error Must be compiled with /arch:AVX2 or -mavx2 -mfma.
#endif
#if defined(__GNUC__) && !defined(__clang__)
// for some reason, GCC -O2 results in poorly optimized code
#pragma GCC optimize("Os")
#endif
// 1 channel input, 4 channel output
void FIR_1x4_AVX2(float* src, float* dst0, float* dst1, float* dst2, float* dst3, float coef[4][HRTF_TAPS], int numFrames) {
float* coef0 = coef[0] + HRTF_TAPS - 1; // process backwards
float* coef1 = coef[1] + HRTF_TAPS - 1;
float* coef2 = coef[2] + HRTF_TAPS - 1;
float* coef3 = coef[3] + HRTF_TAPS - 1;
assert(numFrames % 8 == 0);
for (int i = 0; i < numFrames; i += 8) {
__m256 acc0 = _mm256_setzero_ps();
__m256 acc1 = _mm256_setzero_ps();
__m256 acc2 = _mm256_setzero_ps();
__m256 acc3 = _mm256_setzero_ps();
__m256 acc4 = _mm256_setzero_ps();
__m256 acc5 = _mm256_setzero_ps();
__m256 acc6 = _mm256_setzero_ps();
__m256 acc7 = _mm256_setzero_ps();
float* ps = &src[i - HRTF_TAPS + 1]; // process forwards
assert(HRTF_TAPS % 4 == 0);
for (int k = 0; k < HRTF_TAPS; k += 4) {
__m256 x0 = _mm256_loadu_ps(&ps[k+0]);
acc0 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef0[-k-0]), x0, acc0);
acc1 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef1[-k-0]), x0, acc1);
acc2 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef2[-k-0]), x0, acc2);
acc3 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef3[-k-0]), x0, acc3);
__m256 x1 = _mm256_loadu_ps(&ps[k+1]);
acc4 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef0[-k-1]), x1, acc4);
acc5 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef1[-k-1]), x1, acc5);
acc6 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef2[-k-1]), x1, acc6);
acc7 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef3[-k-1]), x1, acc7);
__m256 x2 = _mm256_loadu_ps(&ps[k+2]);
acc0 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef0[-k-2]), x2, acc0);
acc1 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef1[-k-2]), x2, acc1);
acc2 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef2[-k-2]), x2, acc2);
acc3 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef3[-k-2]), x2, acc3);
__m256 x3 = _mm256_loadu_ps(&ps[k+3]);
acc4 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef0[-k-3]), x3, acc4);
acc5 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef1[-k-3]), x3, acc5);
acc6 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef2[-k-3]), x3, acc6);
acc7 = _mm256_fmadd_ps(_mm256_broadcast_ss(&coef3[-k-3]), x3, acc7);
}
acc0 = _mm256_add_ps(acc0, acc4);
acc1 = _mm256_add_ps(acc1, acc5);
acc2 = _mm256_add_ps(acc2, acc6);
acc3 = _mm256_add_ps(acc3, acc7);
_mm256_storeu_ps(&dst0[i], acc0);
_mm256_storeu_ps(&dst1[i], acc1);
_mm256_storeu_ps(&dst2[i], acc2);
_mm256_storeu_ps(&dst3[i], acc3);
}
_mm256_zeroupper();
}
#endif

View file

@ -50,9 +50,20 @@ const glm::vec3 DEFAULT_LOCAL_AABOX_SCALE(1.0f);
const QString AvatarData::FRAME_NAME = "com.highfidelity.recording.AvatarData";
namespace AvatarDataPacket {
// NOTE: AvatarDataPackets start with a uint16_t sequence number that is not reflected in the Header structure.
PACKED_BEGIN struct Header {
uint8_t packetStateFlags; // state flags, currently used to indicate if the packet is a minimal or fuller packet
} PACKED_END;
const size_t HEADER_SIZE = 1;
PACKED_BEGIN struct MinimalAvatarInfo {
float globalPosition[3]; // avatar's position
} PACKED_END;
const size_t MINIMAL_AVATAR_INFO_SIZE = 12;
PACKED_BEGIN struct AvatarInfo {
float position[3]; // skeletal model's position
float globalPosition[3]; // avatar's position
float globalBoundingBoxCorner[3]; // global position of the lowest corner of the avatar's bounding box
@ -65,16 +76,16 @@ namespace AvatarDataPacket {
float sensorToWorldTrans[3]; // fourth column of sensor to world matrix
uint8_t flags;
} PACKED_END;
const size_t HEADER_SIZE = 81;
const size_t AVATAR_INFO_SIZE = 81;
// only present if HAS_REFERENTIAL flag is set in header.flags
// only present if HAS_REFERENTIAL flag is set in AvatarInfo.flags
PACKED_BEGIN struct ParentInfo {
uint8_t parentUUID[16]; // rfc 4122 encoded
uint16_t parentJointIndex;
} PACKED_END;
const size_t PARENT_INFO_SIZE = 18;
// only present if IS_FACESHIFT_CONNECTED flag is set in header.flags
// only present if IS_FACESHIFT_CONNECTED flag is set in AvatarInfo.flags
PACKED_BEGIN struct FaceTrackerInfo {
float leftEyeBlink;
float rightEyeBlink;
@ -124,6 +135,8 @@ AvatarData::AvatarData() :
setBodyRoll(0.0f);
ASSERT(sizeof(AvatarDataPacket::Header) == AvatarDataPacket::HEADER_SIZE);
ASSERT(sizeof(AvatarDataPacket::MinimalAvatarInfo) == AvatarDataPacket::MINIMAL_AVATAR_INFO_SIZE);
ASSERT(sizeof(AvatarDataPacket::AvatarInfo) == AvatarDataPacket::AVATAR_INFO_SIZE);
ASSERT(sizeof(AvatarDataPacket::ParentInfo) == AvatarDataPacket::PARENT_INFO_SIZE);
ASSERT(sizeof(AvatarDataPacket::FaceTrackerInfo) == AvatarDataPacket::FACE_TRACKER_INFO_SIZE);
}
@ -132,9 +145,9 @@ AvatarData::~AvatarData() {
delete _headData;
}
// We cannot have a file-level variable (const or otherwise) in the header if it uses PathUtils, because that references Application, which will not yet initialized.
// We cannot have a file-level variable (const or otherwise) in the AvatarInfo if it uses PathUtils, because that references Application, which will not yet initialized.
// Thus we have a static class getter, referencing a static class var.
QUrl AvatarData::_defaultFullAvatarModelUrl = {}; // In C++, if this initialization were in the header, every file would have it's own copy, even for class vars.
QUrl AvatarData::_defaultFullAvatarModelUrl = {}; // In C++, if this initialization were in the AvatarInfo, every file would have it's own copy, even for class vars.
const QUrl& AvatarData::defaultFullAvatarModelUrl() {
if (_defaultFullAvatarModelUrl.isEmpty()) {
_defaultFullAvatarModelUrl = QUrl::fromLocalFile(PathUtils::resourcesPath() + "meshes/defaultAvatar_full.fst");
@ -216,56 +229,56 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail) {
memcpy(destinationBuffer, &_globalPosition, sizeof(_globalPosition));
destinationBuffer += sizeof(_globalPosition);
} else {
auto header = reinterpret_cast<AvatarDataPacket::Header*>(destinationBuffer);
header->position[0] = getLocalPosition().x;
header->position[1] = getLocalPosition().y;
header->position[2] = getLocalPosition().z;
header->globalPosition[0] = _globalPosition.x;
header->globalPosition[1] = _globalPosition.y;
header->globalPosition[2] = _globalPosition.z;
header->globalBoundingBoxCorner[0] = getPosition().x - _globalBoundingBoxCorner.x;
header->globalBoundingBoxCorner[1] = getPosition().y - _globalBoundingBoxCorner.y;
header->globalBoundingBoxCorner[2] = getPosition().z - _globalBoundingBoxCorner.z;
auto avatarInfo = reinterpret_cast<AvatarDataPacket::AvatarInfo*>(destinationBuffer);
avatarInfo->position[0] = getLocalPosition().x;
avatarInfo->position[1] = getLocalPosition().y;
avatarInfo->position[2] = getLocalPosition().z;
avatarInfo->globalPosition[0] = _globalPosition.x;
avatarInfo->globalPosition[1] = _globalPosition.y;
avatarInfo->globalPosition[2] = _globalPosition.z;
avatarInfo->globalBoundingBoxCorner[0] = getPosition().x - _globalBoundingBoxCorner.x;
avatarInfo->globalBoundingBoxCorner[1] = getPosition().y - _globalBoundingBoxCorner.y;
avatarInfo->globalBoundingBoxCorner[2] = getPosition().z - _globalBoundingBoxCorner.z;
glm::vec3 bodyEulerAngles = glm::degrees(safeEulerAngles(getLocalOrientation()));
packFloatAngleToTwoByte((uint8_t*)(header->localOrientation + 0), bodyEulerAngles.y);
packFloatAngleToTwoByte((uint8_t*)(header->localOrientation + 1), bodyEulerAngles.x);
packFloatAngleToTwoByte((uint8_t*)(header->localOrientation + 2), bodyEulerAngles.z);
packFloatRatioToTwoByte((uint8_t*)(&header->scale), getDomainLimitedScale());
header->lookAtPosition[0] = _headData->_lookAtPosition.x;
header->lookAtPosition[1] = _headData->_lookAtPosition.y;
header->lookAtPosition[2] = _headData->_lookAtPosition.z;
header->audioLoudness = _headData->_audioLoudness;
packFloatAngleToTwoByte((uint8_t*)(avatarInfo->localOrientation + 0), bodyEulerAngles.y);
packFloatAngleToTwoByte((uint8_t*)(avatarInfo->localOrientation + 1), bodyEulerAngles.x);
packFloatAngleToTwoByte((uint8_t*)(avatarInfo->localOrientation + 2), bodyEulerAngles.z);
packFloatRatioToTwoByte((uint8_t*)(&avatarInfo->scale), getDomainLimitedScale());
avatarInfo->lookAtPosition[0] = _headData->_lookAtPosition.x;
avatarInfo->lookAtPosition[1] = _headData->_lookAtPosition.y;
avatarInfo->lookAtPosition[2] = _headData->_lookAtPosition.z;
avatarInfo->audioLoudness = _headData->_audioLoudness;
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
packOrientationQuatToSixBytes(header->sensorToWorldQuat, glmExtractRotation(sensorToWorldMatrix));
packOrientationQuatToSixBytes(avatarInfo->sensorToWorldQuat, glmExtractRotation(sensorToWorldMatrix));
glm::vec3 scale = extractScale(sensorToWorldMatrix);
packFloatScalarToSignedTwoByteFixed((uint8_t*)&header->sensorToWorldScale, scale.x, SENSOR_TO_WORLD_SCALE_RADIX);
header->sensorToWorldTrans[0] = sensorToWorldMatrix[3][0];
header->sensorToWorldTrans[1] = sensorToWorldMatrix[3][1];
header->sensorToWorldTrans[2] = sensorToWorldMatrix[3][2];
packFloatScalarToSignedTwoByteFixed((uint8_t*)&avatarInfo->sensorToWorldScale, scale.x, SENSOR_TO_WORLD_SCALE_RADIX);
avatarInfo->sensorToWorldTrans[0] = sensorToWorldMatrix[3][0];
avatarInfo->sensorToWorldTrans[1] = sensorToWorldMatrix[3][1];
avatarInfo->sensorToWorldTrans[2] = sensorToWorldMatrix[3][2];
setSemiNibbleAt(header->flags, KEY_STATE_START_BIT, _keyState);
setSemiNibbleAt(avatarInfo->flags, KEY_STATE_START_BIT, _keyState);
// hand state
bool isFingerPointing = _handState & IS_FINGER_POINTING_FLAG;
setSemiNibbleAt(header->flags, HAND_STATE_START_BIT, _handState & ~IS_FINGER_POINTING_FLAG);
setSemiNibbleAt(avatarInfo->flags, HAND_STATE_START_BIT, _handState & ~IS_FINGER_POINTING_FLAG);
if (isFingerPointing) {
setAtBit(header->flags, HAND_STATE_FINGER_POINTING_BIT);
setAtBit(avatarInfo->flags, HAND_STATE_FINGER_POINTING_BIT);
}
// faceshift state
if (_headData->_isFaceTrackerConnected) {
setAtBit(header->flags, IS_FACESHIFT_CONNECTED);
setAtBit(avatarInfo->flags, IS_FACESHIFT_CONNECTED);
}
// eye tracker state
if (_headData->_isEyeTrackerConnected) {
setAtBit(header->flags, IS_EYE_TRACKER_CONNECTED);
setAtBit(avatarInfo->flags, IS_EYE_TRACKER_CONNECTED);
}
// referential state
QUuid parentID = getParentID();
if (!parentID.isNull()) {
setAtBit(header->flags, HAS_REFERENTIAL);
setAtBit(avatarInfo->flags, HAS_REFERENTIAL);
}
destinationBuffer += sizeof(AvatarDataPacket::Header);
destinationBuffer += sizeof(AvatarDataPacket::AvatarInfo);
if (!parentID.isNull()) {
auto parentInfo = reinterpret_cast<AvatarDataPacket::ParentInfo*>(destinationBuffer);
@ -510,13 +523,13 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
quint64 now = usecTimestampNow();
PACKET_READ_CHECK(Header, sizeof(AvatarDataPacket::Header));
auto header = reinterpret_cast<const AvatarDataPacket::Header*>(sourceBuffer);
sourceBuffer += sizeof(AvatarDataPacket::Header);
PACKET_READ_CHECK(AvatarInfo, sizeof(AvatarDataPacket::AvatarInfo));
auto avatarInfo = reinterpret_cast<const AvatarDataPacket::AvatarInfo*>(sourceBuffer);
sourceBuffer += sizeof(AvatarDataPacket::AvatarInfo);
glm::vec3 position = glm::vec3(header->position[0], header->position[1], header->position[2]);
_globalPosition = glm::vec3(header->globalPosition[0], header->globalPosition[1], header->globalPosition[2]);
_globalBoundingBoxCorner = glm::vec3(header->globalBoundingBoxCorner[0], header->globalBoundingBoxCorner[1], header->globalBoundingBoxCorner[2]);
glm::vec3 position = glm::vec3(avatarInfo->position[0], avatarInfo->position[1], avatarInfo->position[2]);
_globalPosition = glm::vec3(avatarInfo->globalPosition[0], avatarInfo->globalPosition[1], avatarInfo->globalPosition[2]);
_globalBoundingBoxCorner = glm::vec3(avatarInfo->globalBoundingBoxCorner[0], avatarInfo->globalBoundingBoxCorner[1], avatarInfo->globalBoundingBoxCorner[2]);
if (isNaN(position)) {
if (shouldLogError(now)) {
qCWarning(avatars) << "Discard AvatarData packet: position NaN, uuid " << getSessionUUID();
@ -526,9 +539,9 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
setLocalPosition(position);
float pitch, yaw, roll;
unpackFloatAngleFromTwoByte(header->localOrientation + 0, &yaw);
unpackFloatAngleFromTwoByte(header->localOrientation + 1, &pitch);
unpackFloatAngleFromTwoByte(header->localOrientation + 2, &roll);
unpackFloatAngleFromTwoByte(avatarInfo->localOrientation + 0, &yaw);
unpackFloatAngleFromTwoByte(avatarInfo->localOrientation + 1, &pitch);
unpackFloatAngleFromTwoByte(avatarInfo->localOrientation + 2, &roll);
if (isNaN(yaw) || isNaN(pitch) || isNaN(roll)) {
if (shouldLogError(now)) {
qCWarning(avatars) << "Discard AvatarData packet: localOriention is NaN, uuid " << getSessionUUID();
@ -545,7 +558,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
}
float scale;
unpackFloatRatioFromTwoByte((uint8_t*)&header->scale, scale);
unpackFloatRatioFromTwoByte((uint8_t*)&avatarInfo->scale, scale);
if (isNaN(scale)) {
if (shouldLogError(now)) {
qCWarning(avatars) << "Discard AvatarData packet: scale NaN, uuid " << getSessionUUID();
@ -554,7 +567,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
}
setTargetScale(scale);
glm::vec3 lookAt = glm::vec3(header->lookAtPosition[0], header->lookAtPosition[1], header->lookAtPosition[2]);
glm::vec3 lookAt = glm::vec3(avatarInfo->lookAtPosition[0], avatarInfo->lookAtPosition[1], avatarInfo->lookAtPosition[2]);
if (isNaN(lookAt)) {
if (shouldLogError(now)) {
qCWarning(avatars) << "Discard AvatarData packet: lookAtPosition is NaN, uuid " << getSessionUUID();
@ -563,7 +576,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
}
_headData->_lookAtPosition = lookAt;
float audioLoudness = header->audioLoudness;
float audioLoudness = avatarInfo->audioLoudness;
if (isNaN(audioLoudness)) {
if (shouldLogError(now)) {
qCWarning(avatars) << "Discard AvatarData packet: audioLoudness is NaN, uuid " << getSessionUUID();
@ -573,16 +586,16 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
_headData->_audioLoudness = audioLoudness;
glm::quat sensorToWorldQuat;
unpackOrientationQuatFromSixBytes(header->sensorToWorldQuat, sensorToWorldQuat);
unpackOrientationQuatFromSixBytes(avatarInfo->sensorToWorldQuat, sensorToWorldQuat);
float sensorToWorldScale;
unpackFloatScalarFromSignedTwoByteFixed((int16_t*)&header->sensorToWorldScale, &sensorToWorldScale, SENSOR_TO_WORLD_SCALE_RADIX);
glm::vec3 sensorToWorldTrans(header->sensorToWorldTrans[0], header->sensorToWorldTrans[1], header->sensorToWorldTrans[2]);
unpackFloatScalarFromSignedTwoByteFixed((int16_t*)&avatarInfo->sensorToWorldScale, &sensorToWorldScale, SENSOR_TO_WORLD_SCALE_RADIX);
glm::vec3 sensorToWorldTrans(avatarInfo->sensorToWorldTrans[0], avatarInfo->sensorToWorldTrans[1], avatarInfo->sensorToWorldTrans[2]);
glm::mat4 sensorToWorldMatrix = createMatFromScaleQuatAndPos(glm::vec3(sensorToWorldScale), sensorToWorldQuat, sensorToWorldTrans);
_sensorToWorldMatrixCache.set(sensorToWorldMatrix);
{ // bitFlags and face data
uint8_t bitItems = header->flags;
uint8_t bitItems = avatarInfo->flags;
// key state, stored as a semi-nibble in the bitItems
_keyState = (KeyState)getSemiNibbleAt(bitItems, KEY_STATE_START_BIT);

View file

@ -426,7 +426,8 @@ void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
// a timer with a small interval is used to get better performance.
QObject::connect(&_updateTimer, &QTimer::timeout, this, &OffscreenQmlSurface::updateQuick);
QObject::connect(qApp, &QCoreApplication::aboutToQuit, this, &OffscreenQmlSurface::onAboutToQuit);
_updateTimer.setInterval(MIN_TIMER_MS);
_updateTimer.setTimerType(Qt::PreciseTimer);
_updateTimer.setInterval(MIN_TIMER_MS); // 5ms, Qt::PreciseTimer required
_updateTimer.start();
auto rootContext = getRootContext();

View file

@ -398,9 +398,13 @@ bool GL45Texture::continueTransfer() {
glTextureSubImage2D(_id, mipLevel, 0, 0, size.x, size.y, texelFormat.format, texelFormat.type, mip->readData());
} else if (GL_TEXTURE_CUBE_MAP == _target) {
// DSA ARB does not work on AMD, so use EXT
// glTextureSubImage3D(_id, mipLevel, 0, 0, face, size.x, size.y, 1, texelFormat.format, texelFormat.type, mip->readData());
auto target = CUBE_FACE_LAYOUT[face];
glTextureSubImage2DEXT(_id, target, mipLevel, 0, 0, size.x, size.y, texelFormat.format, texelFormat.type, mip->readData());
// unless EXT is not available on the driver
if (glTextureSubImage2DEXT) {
auto target = CUBE_FACE_LAYOUT[face];
glTextureSubImage2DEXT(_id, target, mipLevel, 0, 0, size.x, size.y, texelFormat.format, texelFormat.type, mip->readData());
} else {
glTextureSubImage3D(_id, mipLevel, 0, 0, face, size.x, size.y, 1, texelFormat.format, texelFormat.type, mip->readData());
}
} else {
Q_ASSERT(false);
}

View file

@ -41,12 +41,12 @@ DomainHandler::DomainHandler(QObject* parent) :
// setup a timeout for failure on settings requests
static const int DOMAIN_SETTINGS_TIMEOUT_MS = 5000;
_settingsTimer.setInterval(DOMAIN_SETTINGS_TIMEOUT_MS);
_settingsTimer.setInterval(DOMAIN_SETTINGS_TIMEOUT_MS); // 5s, Qt::CoarseTimer acceptable
connect(&_settingsTimer, &QTimer::timeout, this, &DomainHandler::settingsReceiveFail);
// setup the API refresh timer for auto connection information refresh from API when failing to connect
const int API_REFRESH_TIMEOUT_MSEC = 2500;
_apiRefreshTimer.setInterval(API_REFRESH_TIMEOUT_MSEC);
_apiRefreshTimer.setInterval(API_REFRESH_TIMEOUT_MSEC); // 2.5s, Qt::CoarseTimer acceptable
auto addressManager = DependencyManager::get<AddressManager>();
connect(&_apiRefreshTimer, &QTimer::timeout, addressManager.data(), &AddressManager::refreshPreviousLookup);

View file

@ -902,7 +902,7 @@ void LimitedNodeList::startSTUNPublicSocketUpdate() {
connect(_initialSTUNTimer.data(), &QTimer::timeout, this, &LimitedNodeList::sendSTUNRequest);
const int STUN_INITIAL_UPDATE_INTERVAL_MSECS = 250;
_initialSTUNTimer->setInterval(STUN_INITIAL_UPDATE_INTERVAL_MSECS);
_initialSTUNTimer->setInterval(STUN_INITIAL_UPDATE_INTERVAL_MSECS); // 250ms, Qt::CoarseTimer acceptable
// if we don't know the STUN IP yet we need to wait until it is known to start STUN requests
if (_stunSockAddr.getAddress().isNull()) {

View file

@ -102,7 +102,7 @@ NodeList::NodeList(char newOwnerType, int socketListenPort, int dtlsListenPort)
connect(this, &LimitedNodeList::nodeActivated, this, &NodeList::maybeSendIgnoreSetToNode);
// setup our timer to send keepalive pings (it's started and stopped on domain connect/disconnect)
_keepAlivePingTimer.setInterval(KEEPALIVE_PING_INTERVAL_MS);
_keepAlivePingTimer.setInterval(KEEPALIVE_PING_INTERVAL_MS); // 1s, Qt::CoarseTimer acceptable
connect(&_keepAlivePingTimer, &QTimer::timeout, this, &NodeList::sendKeepAlivePings);
connect(&_domainHandler, SIGNAL(connectedToDomain(QString)), &_keepAlivePingTimer, SLOT(start()));
connect(&_domainHandler, &DomainHandler::disconnectedFromDomain, &_keepAlivePingTimer, &QTimer::stop);

View file

@ -71,6 +71,11 @@ QList<QSharedPointer<Resource>> ResourceCacheSharedItems::getLoadingRequests() {
return result;
}
uint32_t ResourceCacheSharedItems::getLoadingRequestsCount() const {
Lock lock(_mutex);
return _loadingRequests.size();
}
void ResourceCacheSharedItems::removeRequest(QWeakPointer<Resource> resource) {
Lock lock(_mutex);
@ -463,6 +468,10 @@ int ResourceCache::getPendingRequestCount() {
return DependencyManager::get<ResourceCacheSharedItems>()->getPendingRequestsCount();
}
int ResourceCache::getLoadingRequestCount() {
return DependencyManager::get<ResourceCacheSharedItems>()->getLoadingRequestsCount();
}
bool ResourceCache::attemptRequest(QSharedPointer<Resource> resource) {
Q_ASSERT(!resource.isNull());
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();

View file

@ -73,6 +73,7 @@ public:
uint32_t getPendingRequestsCount() const;
QList<QSharedPointer<Resource>> getLoadingRequests();
QSharedPointer<Resource> getHighestPendingRequest();
uint32_t getLoadingRequestsCount() const;
private:
ResourceCacheSharedItems() = default;
@ -241,6 +242,8 @@ public:
static int getPendingRequestCount();
static int getLoadingRequestCount();
ResourceCache(QObject* parent = nullptr);
virtual ~ResourceCache();

View file

@ -27,11 +27,11 @@ ThreadedAssignment::ThreadedAssignment(ReceivedMessage& message) :
_statsTimer(this)
{
static const int STATS_TIMEOUT_MS = 1000;
_statsTimer.setInterval(STATS_TIMEOUT_MS);
_statsTimer.setInterval(STATS_TIMEOUT_MS); // 1s, Qt::CoarseTimer acceptable
connect(&_statsTimer, &QTimer::timeout, this, &ThreadedAssignment::sendStatsPacket);
connect(&_domainServerTimer, &QTimer::timeout, this, &ThreadedAssignment::checkInWithDomainServerOrExit);
_domainServerTimer.setInterval(DOMAIN_SERVER_CHECK_IN_MSECS);
_domainServerTimer.setInterval(DOMAIN_SERVER_CHECK_IN_MSECS); // 1s, Qt::CoarseTimer acceptable
// if the NL tells us we got a DS response, clear our member variable of queued check-ins
auto nodeList = DependencyManager::get<NodeList>();

View file

@ -16,6 +16,7 @@
#include <QtCore/QSize>
#include <QtCore/QPoint>
#include <QtCore/QElapsedTimer>
#include <QtCore/QJsonObject>
#include <GLMHelpers.h>
#include <RegisteredMetaTypes.h>
@ -194,6 +195,9 @@ public:
virtual float newFramePresentRate() const { return -1.0f; }
// Rate at which rendered frames are being skipped
virtual float droppedFrameRate() const { return -1.0f; }
// Hardware specific stats
virtual QJsonObject getHardwareStats() const { return QJsonObject(); }
uint32_t presentCount() const { return _presentedFrameIndex; }
// Time since last call to incrementPresentCount (only valid if DEBUG_PAINT_DELAY is defined)

View file

@ -234,17 +234,19 @@ void Model::updateRenderItems() {
render::PendingChanges pendingChanges;
foreach (auto itemID, self->_modelMeshRenderItems.keys()) {
pendingChanges.updateItem<ModelMeshPartPayload>(itemID, [modelTransform, modelMeshOffset, deleteGeometryCounter](ModelMeshPartPayload& data) {
if (!data.hasStartedFade() && data._model && data._model->isLoaded() && data._model->getGeometry()->areTexturesLoaded()) {
data.startFade();
}
// Ensure the model geometry was not reset between frames
if (data._model && data._model->isLoaded() && deleteGeometryCounter == data._model->_deleteGeometryCounter) {
// lazy update of cluster matrices used for rendering. We need to update them here, so we can correctly update the bounding box.
data._model->updateClusterMatrices(modelTransform.getTranslation(), modelTransform.getRotation());
if (data._model && data._model->isLoaded()) {
if (!data.hasStartedFade() && data._model->getGeometry()->areTexturesLoaded()) {
data.startFade();
}
// Ensure the model geometry was not reset between frames
if (deleteGeometryCounter == data._model->_deleteGeometryCounter) {
// lazy update of cluster matrices used for rendering. We need to update them here, so we can correctly update the bounding box.
data._model->updateClusterMatrices(modelTransform.getTranslation(), modelTransform.getRotation());
// update the model transform and bounding box for this render item.
const Model::MeshState& state = data._model->_meshStates.at(data._meshIndex);
data.updateTransformForSkinnedMesh(modelTransform, modelMeshOffset, state.clusterMatrices);
// update the model transform and bounding box for this render item.
const Model::MeshState& state = data._model->_meshStates.at(data._meshIndex);
data.updateTransformForSkinnedMesh(modelTransform, modelMeshOffset, state.clusterMatrices);
}
}
});
}
@ -1158,7 +1160,8 @@ void Model::updateClusterMatrices(glm::vec3 modelPosition, glm::quat modelOrient
}
_needsUpdateClusterMatrices = false;
const FBXGeometry& geometry = getFBXGeometry();
glm::mat4 zeroScale(glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
static const glm::mat4 zeroScale(
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
@ -1168,11 +1171,17 @@ void Model::updateClusterMatrices(glm::vec3 modelPosition, glm::quat modelOrient
for (int i = 0; i < _meshStates.size(); i++) {
MeshState& state = _meshStates[i];
const FBXMesh& mesh = geometry.meshes.at(i);
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
auto jointMatrix = _rig->getJointTransform(cluster.jointIndex);
#if GLM_ARCH & GLM_ARCH_SSE2
glm::mat4 temp, out, inverseBindMatrix = cluster.inverseBindMatrix;
glm_mat4_mul((glm_vec4*)&modelToWorld, (glm_vec4*)&jointMatrix, (glm_vec4*)&temp);
glm_mat4_mul((glm_vec4*)&temp, (glm_vec4*)&inverseBindMatrix, (glm_vec4*)&out);
state.clusterMatrices[j] = out;
#else
state.clusterMatrices[j] = modelToWorld * jointMatrix * cluster.inverseBindMatrix;
#endif
// as an optimization, don't build cautrizedClusterMatrices if the boneSet is empty.
if (!_cauterizeBoneSet.empty()) {

View file

@ -28,14 +28,8 @@
#include <render/drawItemBounds_frag.h>
using namespace render;
extern void initOverlay3DPipelines(render::ShapePlumber& plumber);
extern void initDeferredPipelines(render::ShapePlumber& plumber);
RenderForwardTask::RenderForwardTask(RenderFetchCullSortTask::Output items) {
// Prepare the ShapePipelines
ShapePlumberPointer shapePlumber = std::make_shared<ShapePlumber>();
initDeferredPipelines(*shapePlumber);
// Extract opaques / transparents / lights / overlays
const auto opaques = items[0];
const auto transparents = items[1];
@ -46,6 +40,9 @@ RenderForwardTask::RenderForwardTask(RenderFetchCullSortTask::Output items) {
const auto framebuffer = addJob<PrepareFramebuffer>("PrepareFramebuffer");
addJob<DrawBackground>("DrawBackground", background);
// bounds do not draw on stencil buffer, so they must come last
addJob<DrawBounds>("DrawBounds", opaques);
// Blit!
@ -119,6 +116,8 @@ void DrawBounds::run(const SceneContextPointer& sceneContext, const RenderContex
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
// Setup projection
glm::mat4 projMat;
Transform viewMat;
@ -143,3 +142,26 @@ void DrawBounds::run(const SceneContextPointer& sceneContext, const RenderContex
}
});
}
void DrawBackground::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext, const Inputs& items) {
RenderArgs* args = renderContext->args;
gpu::doInBatch(args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
batch.enableSkybox(true);
batch.setViewportTransform(args->_viewport);
batch.setStateScissorRect(args->_viewport);
// Setup projection
glm::mat4 projMat;
Transform viewMat;
args->getViewFrustum().evalProjectionMatrix(projMat);
args->getViewFrustum().evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
renderItems(sceneContext, renderContext, items);
});
args->_batch = nullptr;
}

View file

@ -47,4 +47,12 @@ private:
int _scaleLocation { -1 };
};
class DrawBackground {
public:
using Inputs = render::ItemBounds;
using JobModel = render::Job::ModelI<DrawBackground, Inputs>;
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext, const Inputs& background);
};
#endif // hifi_RenderForwardTask_h

View file

@ -35,7 +35,7 @@ void PendingChanges::updateItem(ItemID id, const UpdateFunctorPointer& functor)
_updateFunctors.push_back(functor);
}
void PendingChanges::merge(PendingChanges& changes) {
void PendingChanges::merge(const PendingChanges& changes) {
_resetItems.insert(_resetItems.end(), changes._resetItems.begin(), changes._resetItems.end());
_resetPayloads.insert(_resetPayloads.end(), changes._resetPayloads.begin(), changes._resetPayloads.end());
_removedItems.insert(_removedItems.end(), changes._removedItems.begin(), changes._removedItems.end());
@ -71,7 +71,7 @@ void Scene::enqueuePendingChanges(const PendingChanges& pendingChanges) {
void consolidateChangeQueue(PendingChangesQueue& queue, PendingChanges& singleBatch) {
while (!queue.empty()) {
auto pendingChanges = queue.front();
const auto& pendingChanges = queue.front();
singleBatch.merge(pendingChanges);
queue.pop();
};

View file

@ -34,7 +34,7 @@ public:
void updateItem(ItemID id, const UpdateFunctorPointer& functor);
void updateItem(ItemID id) { updateItem(id, nullptr); }
void merge(PendingChanges& changes);
void merge(const PendingChanges& changes);
ItemIDs _resetItems;
Payloads _resetPayloads;

View file

@ -800,7 +800,7 @@ void ScriptEngine::addEventHandler(const EntityItemID& entityID, const QString&
_registeredHandlers[entityID] = RegisteredEventHandlers();
}
CallbackList& handlersForEvent = _registeredHandlers[entityID][eventName];
CallbackData handlerData = {handler, currentEntityIdentifier, currentSandboxURL};
CallbackData handlerData = { handler, currentEntityIdentifier, currentSandboxURL };
handlersForEvent << handlerData; // Note that the same handler can be added many times. See removeEntityEventHandler().
}
@ -878,23 +878,48 @@ void ScriptEngine::run() {
// Throttle to SCRIPT_FPS
// We'd like to try to keep the script at a solid SCRIPT_FPS update rate. And so we will
// calculate a sleepUntil to be the time from our start time until the original target
// sleepUntil for this frame.
const std::chrono::microseconds FRAME_DURATION(USECS_PER_SECOND / SCRIPT_FPS + 1);
clock::time_point targetSleepUntil(startTime + thisFrame++ * FRAME_DURATION);
// sleepUntil for this frame. This approach will allow us to "catch up" in the event
// that some of our script udpates/frames take a little bit longer than the target average
// to execute.
// NOTE: if we go to variable SCRIPT_FPS, then we will need to reconsider this approach
const std::chrono::microseconds TARGET_SCRIPT_FRAME_DURATION(USECS_PER_SECOND / SCRIPT_FPS + 1);
clock::time_point targetSleepUntil(startTime + (thisFrame++ * TARGET_SCRIPT_FRAME_DURATION));
// However, if our sleepUntil is not at least our average update time into the future
// it means our script is taking too long in it's updates, and we want to punish the
// script a little bit. So we will force the sleepUntil to be at least our averageUpdate
// time into the future.
// However, if our sleepUntil is not at least our average update and timer execution time
// into the future it means our script is taking too long in its updates, and we want to
// punish the script a little bit. So we will force the sleepUntil to be at least our
// averageUpdate + averageTimerPerFrame time into the future.
auto averageUpdate = totalUpdates / thisFrame;
auto sleepUntil = std::max(targetSleepUntil, beforeSleep + averageUpdate);
auto averageTimerPerFrame = _totalTimerExecution / thisFrame;
auto averageTimerAndUpdate = averageUpdate + averageTimerPerFrame;
auto sleepUntil = std::max(targetSleepUntil, beforeSleep + averageTimerAndUpdate);
// We don't want to actually sleep for too long, because it causes our scripts to hang
// on shutdown and stop... so we want to loop and sleep until we've spent our time in
// purgatory, constantly checking to see if our script was asked to end
bool processedEvents = false;
while (!_isFinished && clock::now() < sleepUntil) {
QCoreApplication::processEvents(); // before we sleep again, give events a chance to process
auto thisSleepUntil = std::min(sleepUntil, clock::now() + FRAME_DURATION);
processedEvents = true;
// If after processing events, we're past due, exit asap
if (clock::now() >= sleepUntil) {
break;
}
// determine how long before the next timer should fire, we'd ideally like to sleep just
// that long, so the next processEvents() will allow the timers to fire on time.
const std::chrono::microseconds minTimerTimeRemaining(USECS_PER_MSEC * getTimersRemainingTime());
// However, if we haven't yet slept at least as long as our average timer per frame, then we will
// punish the timers to at least wait as long as the average run time of the timers.
auto untilTimer = std::max(minTimerTimeRemaining, averageTimerPerFrame);
// choose the closest time point, our
auto remainingSleepUntil = std::chrono::duration_cast<std::chrono::microseconds>(sleepUntil - clock::now());
auto closestUntil = std::min(remainingSleepUntil, untilTimer);
auto thisSleepUntil = std::min(sleepUntil, clock::now() + closestUntil);
std::this_thread::sleep_until(thisSleepUntil);
}
@ -919,7 +944,10 @@ void ScriptEngine::run() {
break;
}
QCoreApplication::processEvents();
// Only call this if we didn't processEvents as part of waiting for next frame
if (!processedEvents) {
QCoreApplication::processEvents();
}
if (_isFinished) {
break;
@ -982,6 +1010,21 @@ void ScriptEngine::run() {
emit doneRunning();
}
quint64 ScriptEngine::getTimersRemainingTime() {
quint64 minimumTime = USECS_PER_SECOND; // anything larger than this can be ignored
QMutableHashIterator<QTimer*, CallbackData> i(_timerFunctionMap);
while (i.hasNext()) {
i.next();
QTimer* timer = i.key();
int remainingTime = timer->remainingTime();
if (remainingTime >= 0) {
minimumTime = std::min((quint64)remainingTime, minimumTime);
}
}
return minimumTime;
}
// NOTE: This is private because it must be called on the same thread that created the timers, which is why
// we want to only call it in our own run "shutdown" processing.
void ScriptEngine::stopAllTimers() {
@ -1077,7 +1120,12 @@ void ScriptEngine::timerFired() {
// call the associated JS function, if it exists
if (timerData.function.isValid()) {
auto preTimer = p_high_resolution_clock::now();
callWithEnvironment(timerData.definingEntityIdentifier, timerData.definingSandboxURL, timerData.function, timerData.function, QScriptValueList());
auto postTimer = p_high_resolution_clock::now();
auto elapsed = (postTimer - preTimer);
_totalTimerExecution += std::chrono::duration_cast<std::chrono::microseconds>(elapsed);
}
}
@ -1087,12 +1135,18 @@ QObject* ScriptEngine::setupTimerWithInterval(const QScriptValue& function, int
QTimer* newTimer = new QTimer(this);
newTimer->setSingleShot(isSingleShot);
// The default timer type is not very accurate below about 200ms http://doc.qt.io/qt-5/qt.html#TimerType-enum
static const int MIN_TIMEOUT_FOR_COARSE_TIMER = 200;
if (intervalMS < MIN_TIMEOUT_FOR_COARSE_TIMER) {
newTimer->setTimerType(Qt::PreciseTimer);
}
connect(newTimer, &QTimer::timeout, this, &ScriptEngine::timerFired);
// make sure the timer stops when the script does
connect(this, &ScriptEngine::scriptEnding, newTimer, &QTimer::stop);
CallbackData timerData = {function, currentEntityIdentifier, currentSandboxURL};
CallbackData timerData = {function, currentEntityIdentifier, currentSandboxURL };
_timerFunctionMap.insert(newTimer, timerData);
newTimer->start(intervalMS);

View file

@ -218,6 +218,7 @@ protected:
void init();
bool evaluatePending() const { return _evaluatesPending > 0; }
quint64 getTimersRemainingTime();
void timerFired();
void stopAllTimers();
void stopAllTimersForEntityScript(const EntityItemID& entityID);
@ -252,6 +253,8 @@ protected:
std::function<bool()> _emitScriptUpdates{ [](){ return true; } };
std::recursive_mutex _lock;
std::chrono::microseconds _totalTimerExecution { 0 };
};
#endif // hifi_ScriptEngine_h

View file

@ -575,18 +575,18 @@ void AABox::transform(const Transform& transform) {
// Logic based on http://clb.demon.fi/MathGeoLib/nightly/docs/AABB.cpp_code.html#471
void AABox::transform(const glm::mat4& matrix) {
// FIXME use simd operations
auto halfSize = _scale * 0.5f;
auto center = _corner + halfSize;
halfSize = abs(halfSize);
auto newCenter = transformPoint(matrix, center);
auto mm = glm::transpose(glm::mat3(matrix));
vec3 newDir = vec3(
glm::dot(glm::abs(vec3(mm[0])), halfSize),
glm::dot(glm::abs(vec3(mm[1])), halfSize),
glm::dot(glm::abs(vec3(mm[2])), halfSize)
glm::dot(glm::abs(mm[0]), halfSize),
glm::dot(glm::abs(mm[1]), halfSize),
glm::dot(glm::abs(mm[2]), halfSize)
);
auto newCenter = transformPoint(matrix, center);
_corner = newCenter - newDir;
_scale = newDir * 2.0f;
}

View file

@ -370,7 +370,7 @@ glm::quat glmExtractRotation(const glm::mat4& matrix) {
glm::vec3 extractScale(const glm::mat4& matrix) {
glm::mat3 m(matrix);
float det = glm::determinant(m);
if (det < 0) {
if (det < 0.0f) {
// left handed matrix, flip sign to compensate.
return glm::vec3(-glm::length(m[0]), glm::length(m[1]), glm::length(m[2]));
} else {
@ -502,7 +502,10 @@ glm::mat4 cancelOutRollAndPitch(const glm::mat4& m) {
glm::vec3 transformPoint(const glm::mat4& m, const glm::vec3& p) {
glm::vec4 temp = m * glm::vec4(p, 1.0f);
return glm::vec3(temp.x / temp.w, temp.y / temp.w, temp.z / temp.w);
if (temp.w != 1.0f) {
temp *= (1.0f / temp.w);
}
return glm::vec3(temp);
}
// does not handle non-uniform scale correctly, but it's faster then transformVectorFull

View file

@ -83,7 +83,7 @@ namespace Setting {
_saveTimer = new QTimer(this);
Q_CHECK_PTR(_saveTimer);
_saveTimer->setSingleShot(true); // We will restart it once settings are saved.
_saveTimer->setInterval(SAVE_INTERVAL_MSEC);
_saveTimer->setInterval(SAVE_INTERVAL_MSEC); // 5s, Qt::CoarseTimer acceptable
connect(_saveTimer, SIGNAL(timeout()), this, SLOT(saveAll()));
}
_saveTimer->start();

View file

@ -101,7 +101,10 @@ private:
};
inline void traceEvent(const QLoggingCategory& category, const QString& name, EventType type, const QString& id = "", const QVariantMap& args = {}, const QVariantMap& extra = {}) {
DependencyManager::get<Tracer>()->traceEvent(category, name, type, id, args, extra);
const auto& tracer = DependencyManager::get<Tracer>();
if (tracer) {
tracer->traceEvent(category, name, type, id, args, extra);
}
}
inline void traceEvent(const QLoggingCategory& category, const QString& name, EventType type, int id, const QVariantMap& args = {}, const QVariantMap& extra = {}) {

View file

@ -137,5 +137,4 @@ void OculusBaseDisplayPlugin::updatePresentPose() {
}
OculusBaseDisplayPlugin::~OculusBaseDisplayPlugin() {
qDebug() << "Destroying OculusBaseDisplayPlugin";
}

View file

@ -22,6 +22,13 @@
const char* OculusDisplayPlugin::NAME { "Oculus Rift" };
static ovrPerfHudMode currentDebugMode = ovrPerfHud_Off;
OculusDisplayPlugin::OculusDisplayPlugin() {
_appDroppedFrames.store(0);
_compositorDroppedFrames.store(0);
}
bool OculusDisplayPlugin::internalActivate() {
bool result = Parent::internalActivate();
currentDebugMode = ovrPerfHud_Off;
@ -147,19 +154,31 @@ void OculusDisplayPlugin::hmdPresent() {
logWarning("Failed to present");
}
static int droppedFrames = 0;
static int compositorDroppedFrames = 0;
static int appDroppedFrames = 0;
ovrPerfStats perfStats;
ovr_GetPerfStats(_session, &perfStats);
for (int i = 0; i < perfStats.FrameStatsCount; ++i) {
const auto& frameStats = perfStats.FrameStats[i];
int delta = frameStats.CompositorDroppedFrameCount - droppedFrames;
int delta = frameStats.CompositorDroppedFrameCount - compositorDroppedFrames;
_stutterRate.increment(delta);
droppedFrames = frameStats.CompositorDroppedFrameCount;
compositorDroppedFrames = frameStats.CompositorDroppedFrameCount;
appDroppedFrames = frameStats.AppDroppedFrameCount;
}
_appDroppedFrames.store(appDroppedFrames);
_compositorDroppedFrames.store(compositorDroppedFrames);
}
_presentRate.increment();
}
QJsonObject OculusDisplayPlugin::getHardwareStats() const {
QJsonObject hardwareStats;
hardwareStats["app_dropped_frame_count"] = _appDroppedFrames.load();
hardwareStats["compositor_dropped_frame_count"] = _compositorDroppedFrames.load();
return hardwareStats;
}
bool OculusDisplayPlugin::isHmdMounted() const {
ovrSessionStatus status;
return (OVR_SUCCESS(ovr_GetSessionStatus(_session, &status)) &&
@ -183,5 +202,4 @@ QString OculusDisplayPlugin::getPreferredAudioOutDevice() const {
}
OculusDisplayPlugin::~OculusDisplayPlugin() {
qDebug() << "Destroying OculusDisplayPlugin";
}

View file

@ -12,6 +12,7 @@
class OculusDisplayPlugin : public OculusBaseDisplayPlugin {
using Parent = OculusBaseDisplayPlugin;
public:
OculusDisplayPlugin();
~OculusDisplayPlugin();
const QString getName() const override { return NAME; }
@ -19,6 +20,8 @@ public:
QString getPreferredAudioInDevice() const override;
QString getPreferredAudioOutDevice() const override;
virtual QJsonObject getHardwareStats() const;
protected:
bool internalActivate() override;
@ -33,5 +36,8 @@ private:
ovrTextureSwapChain _textureSwapChain;
gpu::FramebufferPointer _outputFramebuffer;
bool _customized { false };
std::atomic_int _compositorDroppedFrames;
std::atomic_int _appDroppedFrames;
};

View file

@ -363,7 +363,7 @@ void showMinSpecWarning() {
vrOverlay->ShowOverlay(minSpecFailedOverlay);
QTimer* timer = new QTimer(&miniApp);
timer->setInterval(FAILED_MIN_SPEC_UPDATE_INTERVAL_MS);
timer->setInterval(FAILED_MIN_SPEC_UPDATE_INTERVAL_MS); // Qt::CoarseTimer acceptable, we don't need this to be frame rate accurate
QObject::connect(timer, &QTimer::timeout, [&] {
vr::TrackedDevicePose_t vrPoses[vr::k_unMaxTrackedDeviceCount];
vrSystem->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseSeated, 0, vrPoses, vr::k_unMaxTrackedDeviceCount);

View file

@ -19,19 +19,19 @@ class QUrl;
class SteamAPIPlugin : public SteamClientPlugin {
public:
bool isRunning();
bool isRunning() override;
bool init();
void shutdown();
bool init() override;
void shutdown() override;
void runCallbacks();
void runCallbacks() override;
void requestTicket(TicketRequestCallback callback);
void updateLocation(QString status, QUrl locationUrl);
void openInviteOverlay();
void joinLobby(QString lobbyId);
void requestTicket(TicketRequestCallback callback) override;
void updateLocation(QString status, QUrl locationUrl) override;
void openInviteOverlay() override;
void joinLobby(QString lobbyId) override;
int getSteamVRBuildID();
int getSteamVRBuildID() override;
};
#endif // hifi_SteamAPIPlugin_h

View file

@ -8,6 +8,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function() { // BEGIN LOCAL_SCOPE
// Set up the qml ui
var qml = Script.resolvePath('debugWindow.qml');
@ -19,18 +20,33 @@ var window = new OverlayWindow({
window.setPosition(25, 50);
window.closed.connect(function() { Script.stop(); });
var getFormattedDate = function() {
var date = new Date();
return date.getMonth() + "/" + date.getDate() + " " + date.getHours() + ":" + date.getMinutes() + ":" + date.getSeconds();
};
var sendToLogWindow = function(type, message, scriptFileName) {
var typeFormatted = "";
if (type) {
typeFormatted = type + " - ";
}
window.sendToQml("[" + getFormattedDate() + "] " + "[" + scriptFileName + "] " + typeFormatted + message);
};
ScriptDiscoveryService.printedMessage.connect(function(message, scriptFileName) {
window.sendToQml("[" + scriptFileName + "] " + message);
sendToLogWindow("", message, scriptFileName);
});
ScriptDiscoveryService.warningMessage.connect(function(message, scriptFileName) {
window.sendToQml("[" + scriptFileName + "] WARNING - " + message);
sendToLogWindow("WARNING", message, scriptFileName);
});
ScriptDiscoveryService.errorMessage.connect(function(message, scriptFileName) {
window.sendToQml("[" + scriptFileName + "] ERROR - " + message);
sendToLogWindow("ERROR", message, scriptFileName);
});
ScriptDiscoveryService.infoMessage.connect(function(message, scriptFileName) {
window.sendToQml("[" + scriptFileName + "] INFO - " + message);
sendToLogWindow("INFO", message, scriptFileName);
});
}()); // END LOCAL_SCOPE

View file

@ -64,7 +64,7 @@
Audio.playSound(bubbleActivateSound, {
position: { x: MyAvatar.position.x, y: MyAvatar.position.y, z: MyAvatar.position.z },
localOnly: true,
volume: 0.4
volume: 0.2
});
hideOverlays();
if (updateConnected === true) {

View file

@ -79,7 +79,7 @@ ExtendedOverlay.applyPickRay = function (pickRay, cb) { // cb(overlay) on the on
var pal = new OverlayWindow({
title: 'People Action List',
source: 'hifi/Pal.qml',
width: 480,
width: 580,
height: 640,
visible: false
});
@ -94,6 +94,10 @@ pal.fromQml.connect(function (message) { // messages are {method, params}, like
overlay.select(selected);
});
break;
case 'refresh':
removeOverlays();
populateUserList();
break;
default:
print('Unrecognized message from Pal.qml:', JSON.stringify(message));
}
@ -119,7 +123,8 @@ function populateUserList() {
var avatarPalDatum = {
displayName: avatar.sessionDisplayName,
userName: '',
sessionId: id || ''
sessionId: id || '',
audioLevel: 0.0
};
// If the current user is an admin OR
// they're requesting their own username ("id" is blank)...
@ -140,12 +145,13 @@ function populateUserList() {
function usernameFromIDReply(id, username, machineFingerprint) {
var data;
// If the ID we've received is our ID...
if (AvatarList.getAvatar('').sessionUUID === id) {
if (MyAvatar.sessionUUID === id) {
// Set the data to contain specific strings.
data = ['', username + ' (hidden)']
data = ['', username]
} else {
// Set the data to contain the ID and the username+ID concat string.
data = [id, username + '/' + machineFingerprint];
// Set the data to contain the ID and the username (if we have one)
// or fingerprint (if we don't have a username) string.
data = [id, username || machineFingerprint];
}
print('Username Data:', JSON.stringify(data));
// Ship the data off to QML
@ -261,16 +267,63 @@ function onClicked() {
pal.setVisible(!pal.visible);
}
var AVERAGING_RATIO = 0.05
var LOUDNESS_FLOOR = 11.0;
var LOUDNESS_SCALE = 2.8 / 5.0;
var LOG2 = Math.log(2.0);
var AUDIO_LEVEL_UPDATE_INTERVAL_MS = 100; // 10hz for now (change this and change the AVERAGING_RATIO too)
var accumulatedLevels = {};
function getAudioLevel(id) {
// the VU meter should work similarly to the one in AvatarInputs: log scale, exponentially averaged
// But of course it gets the data at a different rate, so we tweak the averaging ratio and frequency
// of updating (the latter for efficiency too).
var avatar = AvatarList.getAvatar(id);
var audioLevel = 0.0;
// we will do exponential moving average by taking some the last loudness and averaging
accumulatedLevels[id] = AVERAGING_RATIO * (accumulatedLevels[id] || 0 ) + (1 - AVERAGING_RATIO) * (avatar.audioLoudness);
// add 1 to insure we don't go log() and hit -infinity. Math.log is
// natural log, so to get log base 2, just divide by ln(2).
var logLevel = Math.log(accumulatedLevels[id] + 1) / LOG2;
if (logLevel <= LOUDNESS_FLOOR) {
audioLevel = logLevel / LOUDNESS_FLOOR * LOUDNESS_SCALE;
} else {
audioLevel = (logLevel - (LOUDNESS_FLOOR - 1.0)) * LOUDNESS_SCALE;
}
if (audioLevel > 1.0) {
audioLevel = 1;
}
return audioLevel;
}
// we will update the audioLevels periodically
// TODO: tune for efficiency - expecially with large numbers of avatars
Script.setInterval(function () {
if (pal.visible) {
var param = {};
AvatarList.getAvatarIdentifiers().sort().forEach(function (id) {
var level = getAudioLevel(id);
// qml didn't like an object with null/empty string for a key, so...
var userId = id || 0;
param[userId]= level;
});
pal.sendToQml({method: 'updateAudioLevel', params: param});
}
}, AUDIO_LEVEL_UPDATE_INTERVAL_MS);
//
// Button state.
//
function onVisibileChanged() {
function onVisibleChanged() {
button.writeProperty('buttonState', pal.visible ? 0 : 1);
button.writeProperty('defaultState', pal.visible ? 0 : 1);
button.writeProperty('hoverState', pal.visible ? 2 : 3);
}
button.clicked.connect(onClicked);
pal.visibleChanged.connect(onVisibileChanged);
pal.visibleChanged.connect(onVisibleChanged);
pal.closed.connect(off);
Users.usernameFromIDReply.connect(usernameFromIDReply);
@ -280,7 +333,7 @@ Users.usernameFromIDReply.connect(usernameFromIDReply);
Script.scriptEnding.connect(function () {
button.clicked.disconnect(onClicked);
toolBar.removeButton(buttonName);
pal.visibleChanged.disconnect(onVisibileChanged);
pal.visibleChanged.disconnect(onVisibleChanged);
pal.closed.disconnect(off);
Users.usernameFromIDReply.disconnect(usernameFromIDReply);
off();

View file

@ -8,6 +8,9 @@
// userData.range should be an integer for the max distance away from the entity where the sound will be audible.
// userData.volume is the max volume at which the clip should play. Defaults to 1.0 full volume)
//
// The rotation of the entity is copied to the ambisonic field, so by rotating the entity you will rotate the
// direction in-which a certain sound comes from.
//
// Remember that the entity has to be visible to the user for the sound to play at all, so make sure the entity is
// large enough to be loaded at the range you set, particularly for large ranges.
//
@ -27,6 +30,7 @@
var range = DEFAULT_RANGE;
var maxVolume = DEFAULT_VOLUME;
var UPDATE_INTERVAL_MSECS = 100;
var rotation;
var entity;
var ambientSound;
@ -35,11 +39,11 @@
var checkTimer = false;
var _this;
var WANT_COLOR_CHANGE = true;
var WANT_COLOR_CHANGE = false;
var COLOR_OFF = { red: 128, green: 128, blue: 128 };
var COLOR_ON = { red: 255, green: 0, blue: 0 };
var WANT_DEBUG = true;
var WANT_DEBUG = false;
function debugPrint(string) {
if (WANT_DEBUG) {
print(string);
@ -92,23 +96,27 @@
this.maybeUpdate = function() {
// Every UPDATE_INTERVAL_MSECS, update the volume of the ambient sound based on distance from my avatar
_this.updateSettings();
var props = Entities.getEntityProperties(entity);
var HYSTERESIS_FRACTION = 0.1;
var props = Entities.getEntityProperties(entity, [ "position" ]);
var props = Entities.getEntityProperties(entity, [ "position", "rotation" ]);
center = props.position;
rotation = props.rotation;
var distance = Vec3.length(Vec3.subtract(MyAvatar.position, center));
if (distance <= range) {
var volume = (1.0 - distance / range) * maxVolume;
if (!soundPlaying && ambientSound.downloaded) {
soundPlaying = Audio.playSound(ambientSound, { loop: true, localOnly: true, volume: volume });
soundPlaying = Audio.playSound(ambientSound, { loop: true,
localOnly: true,
orientation: rotation,
volume: volume });
debugPrint("Starting ambient sound, volume: " + volume);
if (WANT_COLOR_CHANGE) {
Entities.editEntity(entity, { color: COLOR_ON });
}
} else if (soundPlaying && soundPlaying.playing) {
soundPlaying.setOptions( { volume: volume } );
soundPlaying.setOptions( { volume: volume, orientation: rotation } );
}
} else if (soundPlaying && soundPlaying.playing && (distance > range * HYSTERESIS_FRACTION)) {
soundPlaying.stop();
soundPlaying = false;

View file

@ -36,7 +36,7 @@ void AudioRingBufferTests::runAllTests() {
int readIndexAt;
AudioRingBuffer ringBuffer(10, false, 10); // makes buffer of 100 int16_t samples
AudioRingBuffer ringBuffer(10, 10); // makes buffer of 100 int16_t samples
for (int T = 0; T < 300; T++) {
writeIndexAt = 0;

View file

@ -34,6 +34,7 @@ TestWindow::TestWindow() {
auto timer = new QTimer(this);
timer->setTimerType(Qt::PreciseTimer);
timer->setInterval(5);
connect(timer, &QTimer::timeout, [&] { draw(); });
timer->start();

View file

@ -12,29 +12,29 @@
#include "PacketTests.h"
#include "../QTestExtensions.h"
#include <udt/Packet.h>
#include <NLPacket.h>
QTEST_MAIN(PacketTests)
std::unique_ptr<Packet> copyToReadPacket(std::unique_ptr<Packet>& packet) {
std::unique_ptr<NLPacket> copyToReadPacket(std::unique_ptr<NLPacket>& packet) {
auto size = packet->getDataSize();
auto data = std::unique_ptr<char[]>(new char[size]);
memcpy(data.get(), packet->getData(), size);
return Packet::fromReceivedPacket(std::move(data), size, HifiSockAddr());
return NLPacket::fromReceivedPacket(std::move(data), size, HifiSockAddr());
}
void PacketTests::emptyPacketTest() {
auto packet = Packet::create(PacketType::Unknown);
auto packet = NLPacket::create(PacketType::Unknown);
QCOMPARE(packet->getType(), PacketType::Unknown);
QCOMPARE(packet->getPayloadSize(), 0);
QCOMPARE(packet->getDataSize(), packet->totalHeadersSize());
QCOMPARE(packet->getDataSize(), NLPacket::totalHeaderSize(packet->getType()));
QCOMPARE(packet->bytesLeftToRead(), 0);
QCOMPARE(packet->bytesAvailableForWrite(), packet->getPayloadCapacity());
}
void PacketTests::packetTypeTest() {
auto packet = Packet::create(PacketType::EntityAdd);
auto packet = NLPacket::create(PacketType::EntityAdd);
QCOMPARE(packet->getType(), PacketType::EntityAdd);
@ -46,7 +46,7 @@ void PacketTests::packetTypeTest() {
}
void PacketTests::writeTest() {
auto packet = Packet::create(PacketType::Unknown);
auto packet = NLPacket::create(PacketType::Unknown);
QCOMPARE(packet->getPayloadSize(), 0);
@ -62,7 +62,7 @@ void PacketTests::writeTest() {
void PacketTests::readTest() {
// Test reads for several different size packets
for (int i = 1; i < 4; i++) {
auto packet = Packet::create(PacketType::Unknown);
auto packet = NLPacket::create(PacketType::Unknown);
auto size = packet->getPayloadCapacity();
size /= i;
@ -91,7 +91,7 @@ void PacketTests::readTest() {
}
void PacketTests::writePastCapacityTest() {
auto packet = Packet::create(PacketType::Unknown);
auto packet = NLPacket::create(PacketType::Unknown);
auto size = packet->getPayloadCapacity();
char* data = new char[size];
@ -111,20 +111,20 @@ void PacketTests::writePastCapacityTest() {
QCOMPARE(packet->bytesAvailableForWrite(), 0);
QCOMPARE(packet->getPayloadSize(), size);
QCOMPARE(Packet::PACKET_WRITE_ERROR, packet->write("data"));
// Packet::write() shouldn't allow the caller to write if no space is left
QCOMPARE(NLPacket::PACKET_WRITE_ERROR, packet->write("data")); // asserts in DEBUG
// NLPacket::write() shouldn't allow the caller to write if no space is left
QCOMPARE(packet->getPayloadSize(), size);
}
void PacketTests::primitiveTest() {
auto packet = Packet::create(PacketType::Unknown);
auto packet = NLPacket::create(PacketType::Unknown);
int value1 = 5;
char value2 = 10;
bool value3 = true;
qint64 value4 = -93404;
packet->writePrimitive(value1);
packet->writePrimitive(value2);
packet->writePrimitive(value3);
@ -133,7 +133,7 @@ void PacketTests::primitiveTest() {
auto recvPacket = copyToReadPacket(packet);
// Peek & read first value
{
{
int peekValue = 0;
QCOMPARE(recvPacket->peekPrimitive(&peekValue), (int)sizeof(peekValue));
QCOMPARE(peekValue, value1);

View file

@ -37,36 +37,21 @@ void ResourceTests::initTestCase() {
static QSharedPointer<Resource> resource;
static bool waitForSignal(QObject *sender, const char *signal, int timeout = 1000) {
QEventLoop loop;
QTimer timer;
timer.setInterval(timeout);
timer.setSingleShot(true);
loop.connect(sender, signal, SLOT(quit()));
loop.connect(&timer, SIGNAL(timeout()), SLOT(quit()));
timer.start();
loop.exec();
return timer.isActive();
}
void ResourceTests::downloadFirst() {
// download the Mery fst file
QUrl meryUrl = QUrl("http://hifi-public.s3.amazonaws.com/marketplace/contents/e21c0b95-e502-4d15-8c41-ea2fc40f1125/3585ddf674869a67d31d5964f7b52de1.fst");
resource = QSharedPointer<Resource>::create(meryUrl, false);
resource = QSharedPointer<Resource>::create(meryUrl);
resource->setSelf(resource);
const int timeout = 1000;
QEventLoop loop;
QTimer timer;
timer.setInterval(timeout);
timer.setInterval(timeout); // 1s, Qt::CoarseTimer acceptable
timer.setSingleShot(true);
loop.connect(resource, SIGNAL(loaded(QNetworkReply&)), SLOT(quit()));
loop.connect(resource, SIGNAL(failed(QNetworkReply::NetworkError)), SLOT(quit()));
loop.connect(&timer, SIGNAL(timeout()), SLOT(quit()));
connect(resource.data(), &Resource::loaded, &loop, &QEventLoop::quit);
connect(resource.data(), &Resource::failed, &loop, &QEventLoop::quit);
connect(&timer, &QTimer::timeout, &loop, &QEventLoop::quit);
timer.start();
resource->ensureLoading();
@ -76,26 +61,24 @@ void ResourceTests::downloadFirst() {
}
void ResourceTests::downloadAgain() {
// download the Mery fst file
QUrl meryUrl = QUrl("http://hifi-public.s3.amazonaws.com/marketplace/contents/e21c0b95-e502-4d15-8c41-ea2fc40f1125/3585ddf674869a67d31d5964f7b52de1.fst");
resource = QSharedPointer<Resource>::create(meryUrl, false);
resource = QSharedPointer<Resource>::create(meryUrl);
resource->setSelf(resource);
const int timeout = 1000;
QEventLoop loop;
QTimer timer;
timer.setInterval(timeout);
timer.setInterval(timeout); // 1s, Qt::CoarseTimer acceptable
timer.setSingleShot(true);
loop.connect(resource, SIGNAL(loaded(QNetworkReply&)), SLOT(quit()));
loop.connect(resource, SIGNAL(failed(QNetworkReply::NetworkError)), SLOT(quit()));
loop.connect(&timer, SIGNAL(timeout()), SLOT(quit()));
connect(resource.data(), &Resource::loaded, &loop, &QEventLoop::quit);
connect(resource.data(), &Resource::failed, &loop, &QEventLoop::quit);
connect(&timer, &QTimer::timeout, &loop, &QEventLoop::quit);
timer.start();
resource->ensureLoading();
loop.exec();
QVERIFY(resource->isLoaded());
}

View file

@ -38,6 +38,7 @@
#include <shared/NetworkUtils.h>
#include <shared/FileLogger.h>
#include <shared/FileUtils.h>
#include <StatTracker.h>
#include <LogHandler.h>
#include <AssetClient.h>
@ -477,6 +478,8 @@ public:
DependencyManager::registerInheritance<EntityActionFactoryInterface, TestActionFactory>();
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
DependencyManager::registerInheritance<SpatialParentFinder, ParentFinder>();
DependencyManager::set<tracing::Tracer>();
DependencyManager::set<StatTracker>();
DependencyManager::set<AddressManager>();
DependencyManager::set<NodeList>(NodeType::Agent);
DependencyManager::set<DeferredLightingEffect>();
@ -552,7 +555,7 @@ public:
restorePosition();
QTimer* timer = new QTimer(this);
timer->setInterval(0);
timer->setInterval(0); // Qt::CoarseTimer acceptable
connect(timer, &QTimer::timeout, this, [this] {
draw();
});

View file

@ -355,7 +355,7 @@ public:
}
QTimer* timer = new QTimer(this);
timer->setInterval(0);
timer->setInterval(0); // Qt::CoarseTimer acceptable
connect(timer, &QTimer::timeout, this, [this] {
draw();
});

View file

@ -201,7 +201,7 @@ int main(int argc, char** argv) {
QLoggingCategory::setFilterRules(LOG_FILTER_RULES);
QTestWindow window;
QTimer timer;
timer.setInterval(1);
timer.setInterval(1); // Qt::CoarseTimer acceptable
app.connect(&timer, &QTimer::timeout, &app, [&] {
window.draw();
});

View file

@ -15,6 +15,8 @@
#include <StreamUtils.h>
#include <../QTestExtensions.h>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/simd/matrix.h>
QTEST_MAIN(GLMHelpersTests)
@ -102,3 +104,39 @@ void GLMHelpersTests::testSixByteOrientationCompression() {
testQuatCompression(-(ROT_Y_180 * ROT_Z_30 * ROT_X_90));
testQuatCompression(-(ROT_Z_30 * ROT_X_90 * ROT_Y_180));
}
#define LOOPS 500000
void GLMHelpersTests::testSimd() {
glm::mat4 a = glm::translate(glm::mat4(), vec3(1, 4, 9));
glm::mat4 b = glm::rotate(glm::mat4(), PI / 3, vec3(0, 1, 0));
glm::mat4 a1, b1;
glm::mat4 a2, b2;
a1 = a * b;
b1 = b * a;
glm_mat4_mul((glm_vec4*)&a, (glm_vec4*)&b, (glm_vec4*)&a2);
glm_mat4_mul((glm_vec4*)&b, (glm_vec4*)&a, (glm_vec4*)&b2);
{
QElapsedTimer timer;
timer.start();
for (size_t i = 0; i < LOOPS; ++i) {
a1 = a * b;
b1 = b * a;
}
qDebug() << "Native " << timer.elapsed();
}
{
QElapsedTimer timer;
timer.start();
for (size_t i = 0; i < LOOPS; ++i) {
glm_mat4_mul((glm_vec4*)&a, (glm_vec4*)&b, (glm_vec4*)&a2);
glm_mat4_mul((glm_vec4*)&b, (glm_vec4*)&a, (glm_vec4*)&b2);
}
qDebug() << "SIMD " << timer.elapsed();
}
qDebug() << "Done ";
}

View file

@ -20,6 +20,7 @@ class GLMHelpersTests : public QObject {
private slots:
void testEulerDecomposition();
void testSixByteOrientationCompression();
void testSimd();
};
float getErrorDifference(const float& a, const float& b);

View file

@ -255,11 +255,11 @@ void GeometryUtilTests::testTwistSwingDecomposition() {
glm::quat measuredTwistRotation;
glm::quat measuredSwingRotation;
swingTwistDecomposition(totalRotation, twistAxis, measuredSwingRotation, measuredTwistRotation);
// dot decomposed with components
float twistDot = fabsf(glm::dot(twistRotation, measuredTwistRotation));
float swingDot = fabsf(glm::dot(swingRotation, measuredSwingRotation));
// the dot products should be very close to 1.0
const float MIN_ERROR = 1.0e-6f;
QCOMPARE_WITH_ABS_ERROR(1.0f, twistDot, MIN_ERROR);
@ -277,7 +277,7 @@ void GeometryUtilTests::testSphereCapsulePenetration() {
glm::vec3 capsuleEnd(0.0f, 10.0f, 0.0f);
float capsuleRadius = 1.0f;
glm::vec3 penetration(glm::vec3::_null);
glm::vec3 penetration(0.0f);
bool hit = findSphereCapsulePenetration(sphereCenter, sphereRadius, capsuleStart, capsuleEnd, capsuleRadius, penetration);
QCOMPARE(hit, true);
QCOMPARE_WITH_ABS_ERROR(penetration, glm::vec3(-0.5f, 0.0f, 0.0f), EPSILON);

View file

@ -39,19 +39,21 @@ void MovingPercentileTests::testRunningMedian() {
}
float MovingPercentileTests::random() {
return rand() / (float)RAND_MAX;
int64_t MovingPercentileTests::random() {
return ((int64_t) rand() << 48) ^
((int64_t) rand() << 32) ^
((int64_t) rand() << 16) ^
((int64_t) rand());
}
void MovingPercentileTests::testRunningMinForN (int n) {
// Stores the last n samples
QQueue<float> samples;
QQueue<int64_t> samples;
MovingPercentile movingMin (n, 0.0f);
for (int s = 0; s < 3 * n; ++s) {
float sample = random();
int64_t sample = random();
samples.push_back(sample);
if (samples.size() > n)
@ -64,30 +66,32 @@ void MovingPercentileTests::testRunningMinForN (int n) {
movingMin.updatePercentile(sample);
// Calculate the minimum of the moving samples
float expectedMin = std::numeric_limits<float>::max();
int64_t expectedMin = std::numeric_limits<int64_t>::max();
int prevSize = samples.size();
for (auto val : samples)
for (auto val : samples) {
expectedMin = std::min(val, expectedMin);
}
QCOMPARE(samples.size(), prevSize);
QCOMPARE(movingMin.getValueAtPercentile(), expectedMin);
QVERIFY(movingMin.getValueAtPercentile() - expectedMin == 0L);
}
}
void MovingPercentileTests::testRunningMaxForN (int n) {
// Stores the last n samples
QQueue<float> samples;
QQueue<int64_t> samples;
MovingPercentile movingMax (n, 1.0f);
for (int s = 0; s < 10000; ++s) {
float sample = random();
int64_t sample = random();
samples.push_back(sample);
if (samples.size() > n)
if (samples.size() > n) {
samples.pop_front();
}
if (samples.size() == 0) {
QFAIL_WITH_MESSAGE("\n\n\n\tWTF\n\tsamples.size() = " << samples.size() << ", n = " << n);
@ -96,22 +100,22 @@ void MovingPercentileTests::testRunningMaxForN (int n) {
movingMax.updatePercentile(sample);
// Calculate the maximum of the moving samples
float expectedMax = std::numeric_limits<float>::min();
int64_t expectedMax = std::numeric_limits<int64_t>::min();
for (auto val : samples)
expectedMax = std::max(val, expectedMax);
QCOMPARE(movingMax.getValueAtPercentile(), expectedMax);
QVERIFY(movingMax.getValueAtPercentile() - expectedMax == 0L);
}
}
void MovingPercentileTests::testRunningMedianForN (int n) {
// Stores the last n samples
QQueue<float> samples;
QQueue<int64_t> samples;
MovingPercentile movingMedian (n, 0.5f);
for (int s = 0; s < 10000; ++s) {
float sample = random();
int64_t sample = random();
samples.push_back(sample);
if (samples.size() > n)

View file

@ -25,7 +25,7 @@ private slots:
private:
// Utilities and helper functions
float random();
int64_t random();
void testRunningMinForN (int n);
void testRunningMaxForN (int n);
void testRunningMedianForN (int n);

View file

@ -28,7 +28,7 @@ void TraceTests::testTraceSerialization() {
auto start = usecTimestampNow();
PROFILE_RANGE(test, "TestEvent")
for (size_t i = 0; i < 10000; ++i) {
SAMPLE_PROFILE_COUNTER(0.1f, test, "TestCounter", { { "i", i } })
SAMPLE_PROFILE_COUNTER(0.1f, test, "TestCounter", { { "i", (int)i } })
}
auto duration = usecTimestampNow() - start;
duration /= USECS_PER_MSEC;

View file

@ -51,6 +51,7 @@ bool vhacd::VHACDUtil::loadFBX(const QString filename, FBXGeometry& result) {
return false;
}
result = *geom;
delete geom;
reSortFBXGeometryMeshes(result);
} catch (const QString& error) {