mirror of
https://github.com/overte-org/overte.git
synced 2025-04-16 23:26:25 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into config-pucks
This commit is contained in:
commit
2d3eb7ba07
140 changed files with 2715 additions and 1361 deletions
|
@ -191,7 +191,7 @@ endif()
|
|||
# link required hifi libraries
|
||||
link_hifi_libraries(
|
||||
shared octree ktx gpu gl gpu-gl procedural model render
|
||||
recording fbx networking model-networking entities avatars
|
||||
recording fbx networking model-networking entities avatars trackers
|
||||
audio audio-client animation script-engine physics
|
||||
render-utils entities-renderer avatars-renderer ui auto-updater
|
||||
controllers plugins image trackers
|
||||
|
|
21
interface/resources/images/Announce-Blast.svg
Normal file
21
interface/resources/images/Announce-Blast.svg
Normal file
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 576 576" style="enable-background:new 0 0 576 576;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M359.2,249.6c16.7,43.9,33.3,87.9,50,131.8c1.3,3.4,2.5,6.7-0.2,9.9c-2.8,3.3-6.3,3-10.1,2.2
|
||||
c-42.4-8.8-84.8-17.5-127.2-26.4c-5.6-1.2-10.7-1-16,1.4c-5,2.2-10.3,3.9-15.4,5.9c-8.2,3.2-9.5,7.6-4.1,14.4
|
||||
c22.2,28.7,44.4,57.3,66.6,86c1.2,1.6,2.5,3.1,3.5,4.8c2.3,4.1,1.3,7.8-2.8,10.2c-1.6,0.9-3.3,1.6-5,2.2
|
||||
c-15.6,5.9-31.2,11.8-46.7,17.7c-8.7,3.3-10.2,2.9-15.7-4.2c-24-31-48-62.1-72-93.2c-4.8-6.3-6.5-6.7-14-3.9c-3,1.1-5.9,2.3-8.9,3.3
|
||||
c-10.9,3.5-20.5-1.1-24.6-11.7c-13-34.1-25.9-68.2-38.8-102.4c-4.5-11.7-0.2-21.5,11.6-26.2c9.8-3.9,19.6-7.5,29.4-11.2
|
||||
c28-10.6,56-21.4,84.2-31.8c5.4-2,9.4-5.1,12.8-9.7c25.7-34.6,51.6-69.1,77.3-103.7c2.3-3.1,4.7-5.8,8.9-5.3c4.4,0.5,5.7,4,7,7.5
|
||||
C325.7,161.4,342.4,205.5,359.2,249.6z"/>
|
||||
<path class="st0" d="M348.2,141.2c-2.2,0-4.5-0.6-6.5-1.8c-5.9-3.6-7.8-11.3-4.2-17.2l39.3-64.3c3.6-5.9,11.3-7.8,17.2-4.2
|
||||
c5.9,3.6,7.8,11.3,4.2,17.2l-39.3,64.3C356.5,139.1,352.4,141.2,348.2,141.2z"/>
|
||||
<path class="st0" d="M504.6,363c-0.9,0-1.8-0.1-2.7-0.3L424,345.6c-6.7-1.5-11-8.1-9.5-14.9c1.5-6.7,8.1-11,14.9-9.5l77.8,17.1
|
||||
c6.7,1.5,11,8.1,9.5,14.9C515.5,359,510.3,363,504.6,363z"/>
|
||||
<path class="st0" d="M393.2,237.7c-4.7,0-9.2-2.7-11.3-7.2c-2.9-6.3-0.2-13.7,6.1-16.6l78.4-36.4c6.3-2.9,13.7-0.2,16.6,6.1
|
||||
c2.9,6.3,0.2,13.7-6.1,16.6l-78.4,36.4C396.7,237.3,394.9,237.7,393.2,237.7z"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1.7 KiB |
|
@ -17,26 +17,26 @@ Rectangle {
|
|||
property alias pixelSize: label.font.pixelSize;
|
||||
property bool selected: false
|
||||
property bool hovered: false
|
||||
property bool enabled: false
|
||||
property int spacing: 2
|
||||
property var action: function () {}
|
||||
property string enabledColor: hifi.colors.blueHighlight
|
||||
property string disabledColor: hifi.colors.blueHighlight
|
||||
property string highlightColor: hifi.colors.blueHighlight;
|
||||
width: label.width + 64
|
||||
height: 32
|
||||
color: hifi.colors.white
|
||||
enabled: false
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
RalewaySemiBold {
|
||||
id: label;
|
||||
color: enabledColor
|
||||
color: enabled ? enabledColor : disabledColor
|
||||
font.pixelSize: 15;
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter;
|
||||
verticalCenter: parent.verticalCenter;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Rectangle {
|
||||
id: indicator
|
||||
|
|
|
@ -8,6 +8,7 @@ import "../styles" as HifiStyles
|
|||
import "../styles-uit"
|
||||
import "../"
|
||||
import "."
|
||||
|
||||
Item {
|
||||
id: web
|
||||
HifiConstants { id: hifi }
|
||||
|
@ -18,21 +19,18 @@ Item {
|
|||
property string url
|
||||
property string scriptURL
|
||||
property alias eventBridge: eventBridgeWrapper.eventBridge
|
||||
property bool keyboardEnabled: HMD.active
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
property bool isDesktop: false
|
||||
property string initialPage: ""
|
||||
property bool startingUp: true
|
||||
property alias webView: webview
|
||||
property alias profile: webview.profile
|
||||
property bool remove: false
|
||||
property var urlList: []
|
||||
property var forwardList: []
|
||||
|
||||
|
||||
property int currentPage: -1 // used as a model for repeater
|
||||
property alias pagesModel: pagesModel
|
||||
// Manage own browse history because WebEngineView history is wiped when a new URL is loaded via
|
||||
// onNewViewRequested, e.g., as happens when a social media share button is clicked.
|
||||
property var history: []
|
||||
property int historyIndex: -1
|
||||
|
||||
Rectangle {
|
||||
id: buttons
|
||||
|
@ -51,21 +49,22 @@ Item {
|
|||
|
||||
TabletWebButton {
|
||||
id: back
|
||||
enabledColor: hifi.colors.baseGray
|
||||
enabled: false
|
||||
enabledColor: hifi.colors.darkGray
|
||||
disabledColor: hifi.colors.lightGrayText
|
||||
enabled: historyIndex > 0
|
||||
text: "BACK"
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
onClicked: goBack()
|
||||
hoverEnabled: true
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
TabletWebButton {
|
||||
id: close
|
||||
enabledColor: hifi.colors.darkGray
|
||||
disabledColor: hifi.colors.lightGrayText
|
||||
enabled: true
|
||||
text: "CLOSE"
|
||||
|
||||
MouseArea {
|
||||
|
@ -75,7 +74,6 @@ Item {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
RalewaySemiBold {
|
||||
id: displayUrl
|
||||
color: hifi.colors.baseGray
|
||||
|
@ -90,7 +88,6 @@ Item {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
preventStealing: true
|
||||
|
@ -98,29 +95,10 @@ Item {
|
|||
}
|
||||
}
|
||||
|
||||
ListModel {
|
||||
id: pagesModel
|
||||
onCountChanged: {
|
||||
currentPage = count - 1;
|
||||
if (currentPage > 0) {
|
||||
back.enabledColor = hifi.colors.darkGray;
|
||||
} else {
|
||||
back.enabledColor = hifi.colors.baseGray;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function goBack() {
|
||||
if (webview.canGoBack) {
|
||||
forwardList.push(webview.url);
|
||||
webview.goBack();
|
||||
} else if (web.urlList.length > 0) {
|
||||
var url = web.urlList.pop();
|
||||
loadUrl(url);
|
||||
} else if (web.forwardList.length > 0) {
|
||||
var url = web.forwardList.pop();
|
||||
loadUrl(url);
|
||||
web.forwardList = [];
|
||||
if (historyIndex > 0) {
|
||||
historyIndex--;
|
||||
loadUrl(history[historyIndex]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -137,19 +115,12 @@ Item {
|
|||
}
|
||||
|
||||
function goForward() {
|
||||
if (currentPage < pagesModel.count - 1) {
|
||||
currentPage++;
|
||||
if (historyIndex < history.length - 1) {
|
||||
historyIndex++;
|
||||
loadUrl(history[historyIndex]);
|
||||
}
|
||||
}
|
||||
|
||||
function gotoPage(url) {
|
||||
urlAppend(url)
|
||||
}
|
||||
|
||||
function isUrlLoaded(url) {
|
||||
return (pagesModel.get(currentPage).webUrl === url);
|
||||
}
|
||||
|
||||
function reloadPage() {
|
||||
view.reloadAndBypassCache()
|
||||
view.setActiveFocusOnPress(true);
|
||||
|
@ -161,36 +132,8 @@ Item {
|
|||
web.url = webview.url;
|
||||
}
|
||||
|
||||
function onInitialPage(url) {
|
||||
return (url === webview.url);
|
||||
}
|
||||
|
||||
|
||||
function urlAppend(url) {
|
||||
var lurl = decodeURIComponent(url)
|
||||
if (lurl[lurl.length - 1] !== "/") {
|
||||
lurl = lurl + "/"
|
||||
}
|
||||
web.urlList.push(url);
|
||||
setBackButtonStatus();
|
||||
}
|
||||
|
||||
function setBackButtonStatus() {
|
||||
if (web.urlList.length > 0 || webview.canGoBack) {
|
||||
back.enabledColor = hifi.colors.darkGray;
|
||||
back.enabled = true;
|
||||
} else {
|
||||
back.enabledColor = hifi.colors.baseGray;
|
||||
back.enabled = false;
|
||||
}
|
||||
}
|
||||
|
||||
onUrlChanged: {
|
||||
loadUrl(url);
|
||||
if (startingUp) {
|
||||
web.initialPage = webview.url;
|
||||
startingUp = false;
|
||||
}
|
||||
}
|
||||
|
||||
QtObject {
|
||||
|
@ -238,7 +181,7 @@ Item {
|
|||
worldId: WebEngineScript.MainWorld
|
||||
}
|
||||
|
||||
property string urlTag: "noDownload=false";
|
||||
property string urlTag: "noDownload=false";
|
||||
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard, userScript ]
|
||||
|
||||
property string newUrl: ""
|
||||
|
@ -258,13 +201,24 @@ Item {
|
|||
grantFeaturePermission(securityOrigin, feature, true);
|
||||
}
|
||||
|
||||
onUrlChanged: {
|
||||
// Record history, skipping null and duplicate items.
|
||||
var urlString = url + "";
|
||||
urlString = urlString.replace(/\//g, "%2F"); // Consistent representation of "/"s to avoid false differences.
|
||||
if (urlString.length > 0 && (historyIndex === -1 || urlString !== history[historyIndex])) {
|
||||
historyIndex++;
|
||||
history = history.slice(0, historyIndex);
|
||||
history.push(urlString);
|
||||
}
|
||||
}
|
||||
|
||||
onLoadingChanged: {
|
||||
keyboardRaised = false;
|
||||
punctuationMode = false;
|
||||
keyboard.resetShiftMode(false);
|
||||
// Required to support clicking on "hifi://" links
|
||||
if (WebEngineView.LoadStartedStatus == loadRequest.status) {
|
||||
var url = loadRequest.url.toString();
|
||||
var url = loadRequest.url.toString();
|
||||
if (urlHandler.canHandleUrl(url)) {
|
||||
if (urlHandler.handleUrl(url)) {
|
||||
root.stop();
|
||||
|
@ -273,20 +227,15 @@ Item {
|
|||
}
|
||||
|
||||
if (WebEngineView.LoadFailedStatus == loadRequest.status) {
|
||||
console.log(" Tablet WebEngineView failed to laod url: " + loadRequest.url.toString());
|
||||
console.log(" Tablet WebEngineView failed to load url: " + loadRequest.url.toString());
|
||||
}
|
||||
|
||||
if (WebEngineView.LoadSucceededStatus == loadRequest.status) {
|
||||
if (startingUp) {
|
||||
web.initialPage = webview.url;
|
||||
startingUp = false;
|
||||
}
|
||||
webview.forceActiveFocus();
|
||||
}
|
||||
}
|
||||
|
||||
onNewViewRequested: {
|
||||
var currentUrl = webview.url;
|
||||
urlAppend(currentUrl);
|
||||
request.openIn(webview);
|
||||
}
|
||||
}
|
||||
|
@ -305,6 +254,7 @@ Item {
|
|||
|
||||
Component.onCompleted: {
|
||||
web.isDesktop = (typeof desktop !== "undefined");
|
||||
keyboardEnabled = HMD.active;
|
||||
}
|
||||
|
||||
Keys.onPressed: {
|
||||
|
|
|
@ -167,11 +167,9 @@ Item {
|
|||
Rectangle {
|
||||
id: lozenge;
|
||||
visible: isAnnouncement;
|
||||
color: hifi.colors.redHighlight;
|
||||
color: lozengeHot.containsMouse ? hifi.colors.redAccent : hifi.colors.redHighlight;
|
||||
anchors.fill: infoRow;
|
||||
radius: lozenge.height / 2.0;
|
||||
border.width: lozengeHot.containsMouse ? 4 : 0;
|
||||
border.color: "white";
|
||||
}
|
||||
Row {
|
||||
id: infoRow;
|
||||
|
|
|
@ -23,7 +23,8 @@ Item {
|
|||
property var callbackFunction;
|
||||
property int dialogWidth;
|
||||
property int dialogHeight;
|
||||
property int comboOptionTextSize: 18;
|
||||
property int comboOptionTextSize: 16;
|
||||
property int comboBodyTextSize: 16;
|
||||
FontLoader { id: ralewayRegular; source: "../../fonts/Raleway-Regular.ttf"; }
|
||||
FontLoader { id: ralewaySemiBold; source: "../../fonts/Raleway-SemiBold.ttf"; }
|
||||
visible: false;
|
||||
|
@ -63,7 +64,7 @@ Item {
|
|||
anchors.left: parent.left;
|
||||
anchors.leftMargin: 20;
|
||||
size: 24;
|
||||
color: 'black';
|
||||
color: hifi.colors.darkGray;
|
||||
horizontalAlignment: Text.AlignLeft;
|
||||
verticalAlignment: Text.AlignTop;
|
||||
}
|
||||
|
@ -141,6 +142,7 @@ Item {
|
|||
height: 30;
|
||||
size: comboOptionTextSize;
|
||||
wrapMode: Text.WordWrap;
|
||||
color: hifi.colors.darkGray;
|
||||
}
|
||||
|
||||
RalewayRegular {
|
||||
|
@ -148,11 +150,12 @@ Item {
|
|||
text: bodyText;
|
||||
anchors.top: optionTitle.bottom;
|
||||
anchors.left: comboOptionSelected.right;
|
||||
anchors.leftMargin: 25;
|
||||
anchors.leftMargin: 10;
|
||||
anchors.right: parent.right;
|
||||
anchors.rightMargin: 10;
|
||||
size: comboOptionTextSize;
|
||||
size: comboBodyTextSize;
|
||||
wrapMode: Text.WordWrap;
|
||||
color: hifi.colors.darkGray;
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
|
|
|
@ -156,10 +156,8 @@ Column {
|
|||
function makeFilteredStoryProcessor() { // answer a function(storyData) that adds it to suggestions if it matches
|
||||
var words = filter.toUpperCase().split(/\s+/).filter(identity);
|
||||
function suggestable(story) {
|
||||
if (story.action === 'snapshot') {
|
||||
return true;
|
||||
}
|
||||
return story.place_name !== AddressManager.placename; // Not our entry, but do show other entry points to current domain.
|
||||
// We could filter out places we don't want to suggest, such as those where (story.place_name === AddressManager.placename) or (story.username === Account.username).
|
||||
return true;
|
||||
}
|
||||
function matches(story) {
|
||||
if (!words.length) {
|
||||
|
|
|
@ -1013,10 +1013,10 @@ Rectangle {
|
|||
onClicked: {
|
||||
popupComboDialog("Set your availability:",
|
||||
availabilityComboBox.availabilityStrings,
|
||||
["Your username will be visible in everyone's 'Nearby' list.\nAnyone will be able to jump to your location from within the 'Nearby' list.",
|
||||
"Your location will be visible in the 'Connections' list only for those with whom you are connected or friends.\nThey will be able to jump to your location if the domain allows.",
|
||||
"Your location will be visible in the 'Connections' list only for those with whom you are friends.\nThey will be able to jump to your location if the domain allows.",
|
||||
"Your location will not be visible in the 'Connections' list of any other users. Only domain admins will be able to see your username in the 'Nearby' list."],
|
||||
["Your username will be visible in everyone's 'Nearby' list. Anyone will be able to jump to your location from within the 'Nearby' list.",
|
||||
"Your location will be visible in the 'Connections' list only for those with whom you are connected or friends. They'll be able to jump to your location if the domain allows.",
|
||||
"Your location will be visible in the 'Connections' list only for those with whom you are friends. They'll be able to jump to your location if the domain allows. You will only receive 'Happening Now' notifications in 'Go To' from friends.",
|
||||
"You will appear offline in the 'Connections' list, and you will not receive 'Happening Now' notifications in 'Go To'."],
|
||||
["all", "connections", "friends", "none"]);
|
||||
}
|
||||
onEntered: availabilityComboBox.color = hifi.colors.lightGrayText;
|
||||
|
|
|
@ -34,9 +34,6 @@ ScrollingWindow {
|
|||
property var runningScriptsModel: ListModel { }
|
||||
property bool isHMD: false
|
||||
|
||||
onVisibleChanged: console.log("Running scripts visible changed to " + visible)
|
||||
onShownChanged: console.log("Running scripts visible changed to " + visible)
|
||||
|
||||
Settings {
|
||||
category: "Overlay.RunningScripts"
|
||||
property alias x: root.x
|
||||
|
|
|
@ -65,7 +65,11 @@ Item {
|
|||
});
|
||||
|
||||
// pass a reference to the tabletRoot object to the button.
|
||||
button.tabletRoot = parent.parent;
|
||||
if (tabletRoot) {
|
||||
button.tabletRoot = tabletRoot;
|
||||
} else {
|
||||
button.tabletRoot = parent.parent;
|
||||
}
|
||||
|
||||
sortButtons();
|
||||
|
||||
|
|
|
@ -129,12 +129,12 @@
|
|||
#include <Preferences.h>
|
||||
#include <display-plugins/CompositorHelper.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
|
||||
#include <avatars-renderer/ScriptAvatar.h>
|
||||
|
||||
#include "AudioClient.h"
|
||||
#include "audio/AudioScope.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
#include "avatar/ScriptAvatar.h"
|
||||
#include "avatar/MyHead.h"
|
||||
#include "CrashHandler.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/Leapmotion.h"
|
||||
|
@ -1586,6 +1586,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
connect(&domainHandler, &DomainHandler::hostnameChanged, this, &Application::addAssetToWorldMessageClose);
|
||||
|
||||
updateSystemTabletMode();
|
||||
|
||||
connect(&_myCamera, &Camera::modeUpdated, this, &Application::cameraModeChanged);
|
||||
}
|
||||
|
||||
void Application::domainConnectionRefused(const QString& reasonMessage, int reasonCodeInt, const QString& extraInfo) {
|
||||
|
@ -2191,7 +2193,7 @@ void Application::paintGL() {
|
|||
_myCamera.setOrientation(glm::quat_cast(camMat));
|
||||
} else {
|
||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
|
||||
_myCamera.setOrientation(myAvatar->getHead()->getCameraOrientation());
|
||||
_myCamera.setOrientation(myAvatar->getMyHead()->getCameraOrientation());
|
||||
}
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
if (isHMDMode()) {
|
||||
|
@ -4087,6 +4089,30 @@ void Application::cycleCamera() {
|
|||
cameraMenuChanged(); // handle the menu change
|
||||
}
|
||||
|
||||
void Application::cameraModeChanged() {
|
||||
switch (_myCamera.getMode()) {
|
||||
case CAMERA_MODE_FIRST_PERSON:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, true);
|
||||
break;
|
||||
case CAMERA_MODE_THIRD_PERSON:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, true);
|
||||
break;
|
||||
case CAMERA_MODE_MIRROR:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FullscreenMirror, true);
|
||||
break;
|
||||
case CAMERA_MODE_INDEPENDENT:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::IndependentMode, true);
|
||||
break;
|
||||
case CAMERA_MODE_ENTITY:
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::CameraEntityMode, true);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
cameraMenuChanged();
|
||||
}
|
||||
|
||||
|
||||
void Application::cameraMenuChanged() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||
if (_myCamera.getMode() != CAMERA_MODE_MIRROR) {
|
||||
|
@ -5435,7 +5461,6 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerGlobalObject("Test", TestScriptingInterface::getInstance());
|
||||
}
|
||||
|
||||
scriptEngine->registerGlobalObject("Overlays", &_overlays);
|
||||
scriptEngine->registerGlobalObject("Rates", new RatesScriptingInterface(this));
|
||||
|
||||
// hook our avatar and avatar hash map object into this script engine
|
||||
|
@ -5534,6 +5559,8 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
|
||||
auto entityScriptServerLog = DependencyManager::get<EntityScriptServerLogClient>();
|
||||
scriptEngine->registerGlobalObject("EntityScriptServerLog", entityScriptServerLog.data());
|
||||
scriptEngine->registerGlobalObject("AvatarInputs", AvatarInputs::getInstance());
|
||||
|
||||
|
||||
qScriptRegisterMetaType(scriptEngine, OverlayIDtoScriptValue, OverlayIDfromScriptValue);
|
||||
|
||||
|
|
|
@ -373,6 +373,7 @@ public slots:
|
|||
static void showHelp();
|
||||
|
||||
void cycleCamera();
|
||||
void cameraModeChanged();
|
||||
void cameraMenuChanged();
|
||||
void toggleOverlays();
|
||||
void setOverlaysVisible(bool visible);
|
||||
|
|
|
@ -12,6 +12,9 @@
|
|||
|
||||
#include "Application.h"
|
||||
|
||||
PickRay FancyCamera::computePickRay(float x, float y) const {
|
||||
return qApp->computePickRay(x, y);
|
||||
}
|
||||
|
||||
QUuid FancyCamera::getCameraEntity() const {
|
||||
if (_cameraEntity != nullptr) {
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
#ifndef hifi_FancyCamera_h
|
||||
#define hifi_FancyCamera_h
|
||||
|
||||
#include "Camera.h"
|
||||
#include <shared/Camera.h>
|
||||
|
||||
#include <EntityTypes.h>
|
||||
|
||||
|
@ -30,6 +30,8 @@ public:
|
|||
FancyCamera() : Camera() {}
|
||||
|
||||
EntityItemPointer getCameraEntityPointer() const { return _cameraEntity; }
|
||||
PickRay computePickRay(float x, float y) const override;
|
||||
|
||||
|
||||
public slots:
|
||||
QUuid getCameraEntity() const;
|
||||
|
|
|
@ -142,11 +142,6 @@ void renderWorldBox(gpu::Batch& batch) {
|
|||
geometryCache->renderSolidSphereInstance(batch, GREY);
|
||||
}
|
||||
|
||||
// Return a random vector of average length 1
|
||||
const glm::vec3 randVector() {
|
||||
return glm::vec3(randFloat() - 0.5f, randFloat() - 0.5f, randFloat() - 0.5f) * 2.0f;
|
||||
}
|
||||
|
||||
// Do some basic timing tests and report the results
|
||||
void runTimingTests() {
|
||||
// How long does it take to make a call to get the time?
|
||||
|
|
|
@ -17,9 +17,6 @@
|
|||
|
||||
#include <gpu/Batch.h>
|
||||
|
||||
float randFloat();
|
||||
const glm::vec3 randVector();
|
||||
|
||||
void renderWorldBox(gpu::Batch& batch);
|
||||
|
||||
void runTimingTests();
|
||||
|
|
|
@ -32,9 +32,9 @@
|
|||
#include <SettingHandle.h>
|
||||
#include <UsersScriptingInterface.h>
|
||||
#include <UUID.h>
|
||||
#include <avatars-renderer/OtherAvatar.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "AvatarManager.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Menu.h"
|
||||
|
@ -299,7 +299,7 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
|
|||
}
|
||||
|
||||
AvatarSharedPointer AvatarManager::newSharedAvatar() {
|
||||
return std::make_shared<Avatar>(qApp->thread(), std::make_shared<Rig>());
|
||||
return std::make_shared<OtherAvatar>(qApp->thread(), std::make_shared<Rig>());
|
||||
}
|
||||
|
||||
void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar, KillAvatarReason removalReason) {
|
||||
|
|
|
@ -21,13 +21,11 @@
|
|||
#include <PIDController.h>
|
||||
#include <SimpleMovingAverage.h>
|
||||
#include <shared/RateCounter.h>
|
||||
#include <avatars-renderer/AvatarMotionState.h>
|
||||
#include <avatars-renderer/ScriptAvatar.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "AvatarMotionState.h"
|
||||
#include "ScriptAvatar.h"
|
||||
|
||||
class MyAvatar;
|
||||
class AudioInjector;
|
||||
|
||||
class AvatarManager : public AvatarHashMap {
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "MyAvatar.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
|
||||
|
@ -43,11 +45,12 @@
|
|||
#include <RecordingScriptingInterface.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
|
||||
#include "MyHead.h"
|
||||
#include "MySkeletonModel.h"
|
||||
#include "Application.h"
|
||||
#include "AvatarManager.h"
|
||||
#include "AvatarActionHold.h"
|
||||
#include "Menu.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "Util.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "DebugDraw.h"
|
||||
|
@ -96,23 +99,12 @@ static const glm::quat DEFAULT_AVATAR_RIGHTFOOT_ROT { -0.4016716778278351f, 0.91
|
|||
|
||||
MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
|
||||
Avatar(thread, rig),
|
||||
_wasPushing(false),
|
||||
_isPushing(false),
|
||||
_isBeingPushed(false),
|
||||
_isBraking(false),
|
||||
_isAway(false),
|
||||
_boomLength(ZOOM_DEFAULT),
|
||||
_yawSpeed(YAW_SPEED_DEFAULT),
|
||||
_pitchSpeed(PITCH_SPEED_DEFAULT),
|
||||
_thrust(0.0f),
|
||||
_actionMotorVelocity(0.0f),
|
||||
_scriptedMotorVelocity(0.0f),
|
||||
_scriptedMotorTimescale(DEFAULT_SCRIPTED_MOTOR_TIMESCALE),
|
||||
_scriptedMotorFrame(SCRIPTED_MOTOR_CAMERA_FRAME),
|
||||
_motionBehaviors(AVATAR_MOTION_DEFAULTS),
|
||||
_characterController(this),
|
||||
_lookAtTargetAvatar(),
|
||||
_shouldRender(true),
|
||||
_eyeContactTarget(LEFT_EYE),
|
||||
_realWorldFieldOfView("realWorldFieldOfView",
|
||||
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
|
||||
|
@ -129,6 +121,14 @@ MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
|
|||
_audioListenerMode(FROM_HEAD),
|
||||
_hmdAtRestDetector(glm::vec3(0), glm::quat())
|
||||
{
|
||||
|
||||
// give the pointer to our head to inherited _headData variable from AvatarData
|
||||
_headData = new MyHead(this);
|
||||
|
||||
_skeletonModel = std::make_shared<MySkeletonModel>(this, nullptr, rig);
|
||||
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
|
||||
|
||||
|
||||
using namespace recording;
|
||||
_skeletonModel->flagAsCauterized();
|
||||
|
||||
|
@ -536,7 +536,7 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
}
|
||||
head->setPosition(headPosition);
|
||||
head->setScale(getUniformScale());
|
||||
head->simulate(deltaTime, true);
|
||||
head->simulate(deltaTime);
|
||||
}
|
||||
|
||||
// Record avatars movements.
|
||||
|
@ -1450,12 +1450,12 @@ void MyAvatar::updateMotors() {
|
|||
glm::quat motorRotation;
|
||||
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
|
||||
if (_characterController.getState() == CharacterController::State::Hover) {
|
||||
motorRotation = getHead()->getCameraOrientation();
|
||||
motorRotation = getMyHead()->getCameraOrientation();
|
||||
} else {
|
||||
// non-hovering = walking: follow camera twist about vertical but not lift
|
||||
// so we decompose camera's rotation and store the twist part in motorRotation
|
||||
glm::quat liftRotation;
|
||||
swingTwistDecomposition(getHead()->getCameraOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
||||
swingTwistDecomposition(getMyHead()->getCameraOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
||||
}
|
||||
const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
|
||||
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
|
||||
|
@ -1469,7 +1469,7 @@ void MyAvatar::updateMotors() {
|
|||
}
|
||||
if (_motionBehaviors & AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED) {
|
||||
if (_scriptedMotorFrame == SCRIPTED_MOTOR_CAMERA_FRAME) {
|
||||
motorRotation = getHead()->getCameraOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
motorRotation = getMyHead()->getCameraOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
} else if (_scriptedMotorFrame == SCRIPTED_MOTOR_AVATAR_FRAME) {
|
||||
motorRotation = getOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
} else {
|
||||
|
@ -1814,7 +1814,7 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
if (getCharacterController()->getState() == CharacterController::State::Hover) {
|
||||
|
||||
// This is the direction the user desires to fly in.
|
||||
glm::vec3 desiredFacing = getHead()->getCameraOrientation() * Vectors::UNIT_Z;
|
||||
glm::vec3 desiredFacing = getMyHead()->getCameraOrientation() * Vectors::UNIT_Z;
|
||||
desiredFacing.y = 0.0f;
|
||||
|
||||
// This is our reference frame, it is captured when the user begins to move.
|
||||
|
@ -1958,7 +1958,7 @@ void MyAvatar::updatePosition(float deltaTime) {
|
|||
if (!_hoverReferenceCameraFacingIsCaptured && (fabs(getDriveKey(TRANSLATE_Z)) > 0.1f || fabs(getDriveKey(TRANSLATE_X)) > 0.1f)) {
|
||||
_hoverReferenceCameraFacingIsCaptured = true;
|
||||
// transform the camera facing vector into sensor space.
|
||||
_hoverReferenceCameraFacing = transformVectorFast(glm::inverse(_sensorToWorldMatrix), getHead()->getCameraOrientation() * Vectors::UNIT_Z);
|
||||
_hoverReferenceCameraFacing = transformVectorFast(glm::inverse(_sensorToWorldMatrix), getMyHead()->getCameraOrientation() * Vectors::UNIT_Z);
|
||||
} else if (_hoverReferenceCameraFacingIsCaptured && (fabs(getDriveKey(TRANSLATE_Z)) <= 0.1f && fabs(getDriveKey(TRANSLATE_X)) <= 0.1f)) {
|
||||
_hoverReferenceCameraFacingIsCaptured = false;
|
||||
}
|
||||
|
@ -2804,3 +2804,7 @@ void MyAvatar::updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose&
|
|||
});
|
||||
}
|
||||
}
|
||||
|
||||
const MyHead* MyAvatar::getMyHead() const {
|
||||
return static_cast<const MyHead*>(getHead());
|
||||
}
|
||||
|
|
|
@ -22,14 +22,15 @@
|
|||
|
||||
#include <controllers/Pose.h>
|
||||
#include <controllers/Actions.h>
|
||||
#include <avatars-renderer/Avatar.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "AtRestDetector.h"
|
||||
#include "MyCharacterController.h"
|
||||
#include <ThreadSafeValueCache.h>
|
||||
|
||||
class AvatarActionHold;
|
||||
class ModelItemID;
|
||||
class MyHead;
|
||||
|
||||
enum eyeContactTarget {
|
||||
LEFT_EYE,
|
||||
|
@ -149,6 +150,7 @@ public:
|
|||
explicit MyAvatar(QThread* thread, RigPointer rig);
|
||||
~MyAvatar();
|
||||
|
||||
void instantiableAvatar() override {};
|
||||
void registerMetaTypes(QScriptEngine* engine);
|
||||
|
||||
virtual void simulateAttachments(float deltaTime) override;
|
||||
|
@ -353,6 +355,7 @@ public:
|
|||
|
||||
eyeContactTarget getEyeContactTarget();
|
||||
|
||||
const MyHead* getMyHead() const;
|
||||
Q_INVOKABLE glm::vec3 getHeadPosition() const { return getHead()->getPosition(); }
|
||||
Q_INVOKABLE float getHeadFinalYaw() const { return getHead()->getFinalYaw(); }
|
||||
Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); }
|
||||
|
@ -589,17 +592,17 @@ private:
|
|||
std::array<float, MAX_DRIVE_KEYS> _driveKeys;
|
||||
std::bitset<MAX_DRIVE_KEYS> _disabledDriveKeys;
|
||||
|
||||
bool _wasPushing;
|
||||
bool _isPushing;
|
||||
bool _isBeingPushed;
|
||||
bool _isBraking;
|
||||
bool _isAway;
|
||||
bool _wasPushing { false };
|
||||
bool _isPushing { false };
|
||||
bool _isBeingPushed { false };
|
||||
bool _isBraking { false };
|
||||
bool _isAway { false };
|
||||
|
||||
float _boomLength;
|
||||
float _boomLength { ZOOM_DEFAULT };
|
||||
float _yawSpeed; // degrees/sec
|
||||
float _pitchSpeed; // degrees/sec
|
||||
|
||||
glm::vec3 _thrust; // impulse accumulator for outside sources
|
||||
glm::vec3 _thrust { 0.0f }; // impulse accumulator for outside sources
|
||||
|
||||
glm::vec3 _actionMotorVelocity; // target local-frame velocity of avatar (default controller actions)
|
||||
glm::vec3 _scriptedMotorVelocity; // target local-frame velocity of avatar (analog script)
|
||||
|
@ -615,7 +618,7 @@ private:
|
|||
|
||||
AvatarWeakPointer _lookAtTargetAvatar;
|
||||
glm::vec3 _targetAvatarPosition;
|
||||
bool _shouldRender;
|
||||
bool _shouldRender { true };
|
||||
float _oculusYawOffset;
|
||||
|
||||
eyeContactTarget _eyeContactTarget;
|
||||
|
|
76
interface/src/avatar/MyHead.cpp
Normal file
76
interface/src/avatar/MyHead.cpp
Normal file
|
@ -0,0 +1,76 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "MyHead.h"
|
||||
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
#include <gpu/Batch.h>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <Rig.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "Application.h"
|
||||
#include "MyAvatar.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
||||
}
|
||||
|
||||
glm::quat MyHead::getCameraOrientation() const {
|
||||
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||
// always the same.
|
||||
if (qApp->isHMDMode()) {
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
||||
} else {
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
}
|
||||
|
||||
void MyHead::simulate(float deltaTime) {
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
// Only use face trackers when not playing back a recording.
|
||||
if (!player->isPlaying()) {
|
||||
FaceTracker* faceTracker = qApp->getActiveFaceTracker();
|
||||
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
|
||||
if (_isFaceTrackerConnected) {
|
||||
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||
|
||||
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
|
||||
calculateMouthShapes(deltaTime);
|
||||
|
||||
const int JAW_OPEN_BLENDSHAPE = 21;
|
||||
const int MMMM_BLENDSHAPE = 34;
|
||||
const int FUNNEL_BLENDSHAPE = 40;
|
||||
const int SMILE_LEFT_BLENDSHAPE = 28;
|
||||
const int SMILE_RIGHT_BLENDSHAPE = 29;
|
||||
_blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
|
||||
_blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
|
||||
_blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
|
||||
_blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
|
||||
_blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
|
||||
}
|
||||
applyEyelidOffset(getFinalOrientationInWorldFrame());
|
||||
}
|
||||
}
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||
}
|
||||
Parent::simulate(deltaTime);
|
||||
}
|
30
interface/src/avatar/MyHead.h
Normal file
30
interface/src/avatar/MyHead.h
Normal file
|
@ -0,0 +1,30 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_MyHead_h
|
||||
#define hifi_MyHead_h
|
||||
|
||||
#include <avatars-renderer/Head.h>
|
||||
|
||||
class MyAvatar;
|
||||
class MyHead : public Head {
|
||||
using Parent = Head;
|
||||
public:
|
||||
explicit MyHead(MyAvatar* owningAvatar);
|
||||
|
||||
/// \return orientationBody * orientationBasePitch
|
||||
glm::quat getCameraOrientation() const;
|
||||
void simulate(float deltaTime) override;
|
||||
|
||||
private:
|
||||
// disallow copies of the Head, copy of owning Avatar is disallowed too
|
||||
MyHead(const Head&);
|
||||
MyHead& operator= (const MyHead&);
|
||||
};
|
||||
|
||||
#endif // hifi_MyHead_h
|
156
interface/src/avatar/MySkeletonModel.cpp
Normal file
156
interface/src/avatar/MySkeletonModel.cpp
Normal file
|
@ -0,0 +1,156 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "MySkeletonModel.h"
|
||||
|
||||
#include <avatars-renderer/Avatar.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
MySkeletonModel::MySkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer rig) : SkeletonModel(owningAvatar, parent, rig) {
|
||||
}
|
||||
|
||||
Rig::CharacterControllerState convertCharacterControllerState(CharacterController::State state) {
|
||||
switch (state) {
|
||||
default:
|
||||
case CharacterController::State::Ground:
|
||||
return Rig::CharacterControllerState::Ground;
|
||||
case CharacterController::State::Takeoff:
|
||||
return Rig::CharacterControllerState::Takeoff;
|
||||
case CharacterController::State::InAir:
|
||||
return Rig::CharacterControllerState::InAir;
|
||||
case CharacterController::State::Hover:
|
||||
return Rig::CharacterControllerState::Hover;
|
||||
};
|
||||
}
|
||||
|
||||
// Called within Model::simulate call, below.
|
||||
void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
||||
Head* head = _owningAvatar->getHead();
|
||||
|
||||
// make sure lookAt is not too close to face (avoid crosseyes)
|
||||
glm::vec3 lookAt = _owningAvatar->isMyAvatar() ? head->getLookAtPosition() : head->getCorrectedLookAtPosition();
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
|
||||
Rig::HeadParameters headParams;
|
||||
|
||||
// input action is the highest priority source for head orientation.
|
||||
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
|
||||
if (avatarHeadPose.isValid()) {
|
||||
glm::mat4 rigHeadMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
|
||||
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
|
||||
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
if (qApp->isHMDMode()) {
|
||||
// get HMD position from sensor space into world space, and back into rig space
|
||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
|
||||
glm::mat4 worldToRig = glm::inverse(rigToWorld);
|
||||
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
|
||||
_rig->computeHeadFromHMD(AnimPose(rigHMDMat), headParams.rigHeadPosition, headParams.rigHeadOrientation);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and down in desktop mode.
|
||||
// preMult 180 is necessary to convert from avatar to rig coordinates.
|
||||
// postMult 180 is necessary to convert head from -z forward to z forward.
|
||||
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
|
||||
headParams.headEnabled = false;
|
||||
}
|
||||
}
|
||||
|
||||
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
|
||||
if (avatarHipsPose.isValid()) {
|
||||
glm::mat4 rigHipsMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHipsPose.getRotation(), avatarHipsPose.getTranslation());
|
||||
headParams.hipsMatrix = rigHipsMat;
|
||||
headParams.hipsEnabled = true;
|
||||
} else {
|
||||
headParams.hipsEnabled = false;
|
||||
}
|
||||
|
||||
auto avatarSpine2Pose = myAvatar->getSpine2ControllerPoseInAvatarFrame();
|
||||
if (avatarSpine2Pose.isValid()) {
|
||||
glm::mat4 rigSpine2Mat = Matrices::Y_180 * createMatFromQuatAndPos(avatarSpine2Pose.getRotation(), avatarSpine2Pose.getTranslation());
|
||||
headParams.spine2Matrix = rigSpine2Mat;
|
||||
headParams.spine2Enabled = true;
|
||||
} else {
|
||||
headParams.spine2Enabled = false;
|
||||
}
|
||||
|
||||
headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;
|
||||
|
||||
_rig->updateFromHeadParameters(headParams, deltaTime);
|
||||
|
||||
Rig::HandAndFeetParameters handAndFeetParams;
|
||||
|
||||
auto leftPose = myAvatar->getLeftHandControllerPoseInAvatarFrame();
|
||||
if (leftPose.isValid()) {
|
||||
handAndFeetParams.isLeftEnabled = true;
|
||||
handAndFeetParams.leftPosition = Quaternions::Y_180 * leftPose.getTranslation();
|
||||
handAndFeetParams.leftOrientation = Quaternions::Y_180 * leftPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isLeftEnabled = false;
|
||||
}
|
||||
|
||||
auto rightPose = myAvatar->getRightHandControllerPoseInAvatarFrame();
|
||||
if (rightPose.isValid()) {
|
||||
handAndFeetParams.isRightEnabled = true;
|
||||
handAndFeetParams.rightPosition = Quaternions::Y_180 * rightPose.getTranslation();
|
||||
handAndFeetParams.rightOrientation = Quaternions::Y_180 * rightPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isRightEnabled = false;
|
||||
}
|
||||
|
||||
auto leftFootPose = myAvatar->getLeftFootControllerPoseInAvatarFrame();
|
||||
if (leftFootPose.isValid()) {
|
||||
handAndFeetParams.isLeftFootEnabled = true;
|
||||
handAndFeetParams.leftFootPosition = Quaternions::Y_180 * leftFootPose.getTranslation();
|
||||
handAndFeetParams.leftFootOrientation = Quaternions::Y_180 * leftFootPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isLeftFootEnabled = false;
|
||||
}
|
||||
|
||||
auto rightFootPose = myAvatar->getRightFootControllerPoseInAvatarFrame();
|
||||
if (rightFootPose.isValid()) {
|
||||
handAndFeetParams.isRightFootEnabled = true;
|
||||
handAndFeetParams.rightFootPosition = Quaternions::Y_180 * rightFootPose.getTranslation();
|
||||
handAndFeetParams.rightFootOrientation = Quaternions::Y_180 * rightFootPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isRightFootEnabled = false;
|
||||
}
|
||||
|
||||
handAndFeetParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
|
||||
handAndFeetParams.bodyCapsuleHalfHeight = myAvatar->getCharacterController()->getCapsuleHalfHeight();
|
||||
handAndFeetParams.bodyCapsuleLocalOffset = myAvatar->getCharacterController()->getCapsuleLocalOffset();
|
||||
|
||||
_rig->updateFromHandAndFeetParameters(handAndFeetParams, deltaTime);
|
||||
|
||||
Rig::CharacterControllerState ccState = convertCharacterControllerState(myAvatar->getCharacterController()->getState());
|
||||
|
||||
auto velocity = myAvatar->getLocalVelocity();
|
||||
auto position = myAvatar->getLocalPosition();
|
||||
auto orientation = myAvatar->getLocalOrientation();
|
||||
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
|
||||
|
||||
Rig::EyeParameters eyeParams;
|
||||
eyeParams.eyeLookAt = lookAt;
|
||||
eyeParams.eyeSaccade = head->getSaccade();
|
||||
eyeParams.modelRotation = getRotation();
|
||||
eyeParams.modelTranslation = getTranslation();
|
||||
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
|
||||
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
||||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
|
||||
// evaluate AnimGraph animation and update jointStates.
|
||||
Parent::updateRig(deltaTime, parentTransform);
|
||||
}
|
||||
|
26
interface/src/avatar/MySkeletonModel.h
Normal file
26
interface/src/avatar/MySkeletonModel.h
Normal file
|
@ -0,0 +1,26 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_MySkeletonModel_h
|
||||
#define hifi_MySkeletonModel_h
|
||||
|
||||
#include <avatars-renderer/SkeletonModel.h>
|
||||
|
||||
/// A skeleton loaded from a model.
|
||||
class MySkeletonModel : public SkeletonModel {
|
||||
Q_OBJECT
|
||||
|
||||
private:
|
||||
using Parent = SkeletonModel;
|
||||
|
||||
public:
|
||||
MySkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr, RigPointer rig = nullptr);
|
||||
void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
};
|
||||
|
||||
#endif // hifi_MySkeletonModel_h
|
|
@ -9,20 +9,21 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "DdeFaceTracker.h"
|
||||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include <QCoreApplication>
|
||||
#include <QJsonDocument>
|
||||
#include <QJsonArray>
|
||||
#include <QJsonObject>
|
||||
#include <QTimer>
|
||||
#include <QtCore/QCoreApplication>
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QJsonArray>
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QtCore/QTimer>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
#include <NumericalConstants.h>
|
||||
#include <FaceshiftConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "DdeFaceTracker.h"
|
||||
#include "FaceshiftConstants.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Menu.h"
|
||||
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
#ifndef hifi_DdeFaceTracker_h
|
||||
#define hifi_DdeFaceTracker_h
|
||||
|
||||
#include <QtCore/QtGlobal>
|
||||
|
||||
#if defined(Q_OS_WIN) || defined(Q_OS_OSX)
|
||||
#define HAVE_DDE
|
||||
#endif
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// Leapmotion.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/2/2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -10,10 +7,11 @@
|
|||
//
|
||||
|
||||
#include "Leapmotion.h"
|
||||
#include "Menu.h"
|
||||
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
#include "Menu.h"
|
||||
|
||||
const int PALMROOT_NUM_JOINTS = 3;
|
||||
const int FINGER_NUM_JOINTS = 4;
|
||||
const int HAND_NUM_JOINTS = FINGER_NUM_JOINTS*5+PALMROOT_NUM_JOINTS;
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// Leapmotion.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/2/2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
|
|
|
@ -34,7 +34,7 @@ class AvatarInputs : public QQuickItem {
|
|||
|
||||
public:
|
||||
static AvatarInputs* getInstance();
|
||||
float loudnessToAudioLevel(float loudness);
|
||||
Q_INVOKABLE float loudnessToAudioLevel(float loudness);
|
||||
AvatarInputs(QQuickItem* parent = nullptr);
|
||||
void update();
|
||||
bool showAudioTools() const { return _showAudioTools; }
|
||||
|
|
|
@ -28,11 +28,15 @@ const int MAX_HISTORY_SIZE = 64;
|
|||
const QString COMMAND_STYLE = "color: #266a9b;";
|
||||
|
||||
const QString RESULT_SUCCESS_STYLE = "color: #677373;";
|
||||
const QString RESULT_INFO_STYLE = "color: #223bd1;";
|
||||
const QString RESULT_WARNING_STYLE = "color: #d13b22;";
|
||||
const QString RESULT_ERROR_STYLE = "color: #d13b22;";
|
||||
|
||||
const QString GUTTER_PREVIOUS_COMMAND = "<span style=\"color: #57b8bb;\"><</span>";
|
||||
const QString GUTTER_ERROR = "<span style=\"color: #d13b22;\">X</span>";
|
||||
|
||||
const QString JSConsole::_consoleFileName { "about:console" };
|
||||
|
||||
JSConsole::JSConsole(QWidget* parent, ScriptEngine* scriptEngine) :
|
||||
QWidget(parent),
|
||||
_ui(new Ui::Console),
|
||||
|
@ -77,6 +81,8 @@ void JSConsole::setScriptEngine(ScriptEngine* scriptEngine) {
|
|||
}
|
||||
if (_scriptEngine != NULL) {
|
||||
disconnect(_scriptEngine, &ScriptEngine::printedMessage, this, &JSConsole::handlePrint);
|
||||
disconnect(_scriptEngine, &ScriptEngine::infoMessage, this, &JSConsole::handleInfo);
|
||||
disconnect(_scriptEngine, &ScriptEngine::warningMessage, this, &JSConsole::handleWarning);
|
||||
disconnect(_scriptEngine, &ScriptEngine::errorMessage, this, &JSConsole::handleError);
|
||||
if (_ownScriptEngine) {
|
||||
_scriptEngine->deleteLater();
|
||||
|
@ -84,10 +90,12 @@ void JSConsole::setScriptEngine(ScriptEngine* scriptEngine) {
|
|||
}
|
||||
|
||||
// if scriptEngine is NULL then create one and keep track of it using _ownScriptEngine
|
||||
_ownScriptEngine = scriptEngine == NULL;
|
||||
_scriptEngine = _ownScriptEngine ? DependencyManager::get<ScriptEngines>()->loadScript(QString(), false) : scriptEngine;
|
||||
_ownScriptEngine = (scriptEngine == NULL);
|
||||
_scriptEngine = _ownScriptEngine ? DependencyManager::get<ScriptEngines>()->loadScript(_consoleFileName, false) : scriptEngine;
|
||||
|
||||
connect(_scriptEngine, &ScriptEngine::printedMessage, this, &JSConsole::handlePrint);
|
||||
connect(_scriptEngine, &ScriptEngine::infoMessage, this, &JSConsole::handleInfo);
|
||||
connect(_scriptEngine, &ScriptEngine::warningMessage, this, &JSConsole::handleWarning);
|
||||
connect(_scriptEngine, &ScriptEngine::errorMessage, this, &JSConsole::handleError);
|
||||
}
|
||||
|
||||
|
@ -107,11 +115,10 @@ void JSConsole::executeCommand(const QString& command) {
|
|||
|
||||
QScriptValue JSConsole::executeCommandInWatcher(const QString& command) {
|
||||
QScriptValue result;
|
||||
static const QString filename = "JSConcole";
|
||||
QMetaObject::invokeMethod(_scriptEngine, "evaluate", Qt::ConnectionType::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(QScriptValue, result),
|
||||
Q_ARG(const QString&, command),
|
||||
Q_ARG(const QString&, filename));
|
||||
Q_ARG(const QString&, _consoleFileName));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -134,16 +141,26 @@ void JSConsole::commandFinished() {
|
|||
resetCurrentCommandHistory();
|
||||
}
|
||||
|
||||
void JSConsole::handleError(const QString& scriptName, const QString& message) {
|
||||
void JSConsole::handleError(const QString& message, const QString& scriptName) {
|
||||
Q_UNUSED(scriptName);
|
||||
appendMessage(GUTTER_ERROR, "<span style='" + RESULT_ERROR_STYLE + "'>" + message.toHtmlEscaped() + "</span>");
|
||||
}
|
||||
|
||||
void JSConsole::handlePrint(const QString& scriptName, const QString& message) {
|
||||
void JSConsole::handlePrint(const QString& message, const QString& scriptName) {
|
||||
Q_UNUSED(scriptName);
|
||||
appendMessage("", message);
|
||||
}
|
||||
|
||||
void JSConsole::handleInfo(const QString& message, const QString& scriptName) {
|
||||
Q_UNUSED(scriptName);
|
||||
appendMessage("", "<span style='" + RESULT_INFO_STYLE + "'>" + message.toHtmlEscaped() + "</span>");
|
||||
}
|
||||
|
||||
void JSConsole::handleWarning(const QString& message, const QString& scriptName) {
|
||||
Q_UNUSED(scriptName);
|
||||
appendMessage("", "<span style='" + RESULT_WARNING_STYLE + "'>" + message.toHtmlEscaped() + "</span>");
|
||||
}
|
||||
|
||||
void JSConsole::mouseReleaseEvent(QMouseEvent* event) {
|
||||
_ui->promptTextEdit->setFocus();
|
||||
}
|
||||
|
|
|
@ -47,8 +47,10 @@ protected:
|
|||
protected slots:
|
||||
void scrollToBottom();
|
||||
void resizeTextInput();
|
||||
void handlePrint(const QString& scriptName, const QString& message);
|
||||
void handleError(const QString& scriptName, const QString& message);
|
||||
void handlePrint(const QString& message, const QString& scriptName);
|
||||
void handleInfo(const QString& message, const QString& scriptName);
|
||||
void handleWarning(const QString& message, const QString& scriptName);
|
||||
void handleError(const QString& message, const QString& scriptName);
|
||||
void commandFinished();
|
||||
|
||||
private:
|
||||
|
@ -66,6 +68,7 @@ private:
|
|||
bool _ownScriptEngine;
|
||||
QString _rootCommand;
|
||||
ScriptEngine* _scriptEngine;
|
||||
static const QString _consoleFileName;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -330,6 +330,30 @@ void setupPreferences() {
|
|||
preferences->addPreference(preference);
|
||||
}
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool { return image::isColorTexturesCompressionEnabled(); };
|
||||
auto setter = [](bool value) { return image::setColorTexturesCompressionEnabled(value); };
|
||||
auto preference = new CheckPreference(RENDER, "Compress Color Textures", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool { return image::isNormalTexturesCompressionEnabled(); };
|
||||
auto setter = [](bool value) { return image::setNormalTexturesCompressionEnabled(value); };
|
||||
auto preference = new CheckPreference(RENDER, "Compress Normal Textures", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool { return image::isGrayscaleTexturesCompressionEnabled(); };
|
||||
auto setter = [](bool value) { return image::setGrayscaleTexturesCompressionEnabled(value); };
|
||||
auto preference = new CheckPreference(RENDER, "Compress Grayscale Textures", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool { return image::isCubeTexturesCompressionEnabled(); };
|
||||
auto setter = [](bool value) { return image::setCubeTexturesCompressionEnabled(value); };
|
||||
auto preference = new CheckPreference(RENDER, "Compress Cube Textures", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
}
|
||||
{
|
||||
static const QString RENDER("Networking");
|
||||
|
|
|
@ -81,6 +81,10 @@ QVariantMap convertOverlayLocationFromScriptSemantics(const QVariantMap& propert
|
|||
void Base3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
||||
QVariantMap properties = originalProperties;
|
||||
|
||||
if (properties["name"].isValid()) {
|
||||
setName(properties["name"].toString());
|
||||
}
|
||||
|
||||
// carry over some legacy keys
|
||||
if (!properties["position"].isValid() && !properties["localPosition"].isValid()) {
|
||||
if (properties["p1"].isValid()) {
|
||||
|
@ -207,6 +211,9 @@ void Base3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
|||
}
|
||||
|
||||
QVariant Base3DOverlay::getProperty(const QString& property) {
|
||||
if (property == "name") {
|
||||
return _name;
|
||||
}
|
||||
if (property == "position" || property == "start" || property == "p1" || property == "point") {
|
||||
return vec3toVariant(getPosition());
|
||||
}
|
||||
|
|
|
@ -26,6 +26,9 @@ public:
|
|||
virtual OverlayID getOverlayID() const override { return OverlayID(getID().toString()); }
|
||||
void setOverlayID(OverlayID overlayID) override { setID(overlayID); }
|
||||
|
||||
virtual QString getName() const override { return QString("Overlay:") + _name; }
|
||||
void setName(QString name) { _name = name; }
|
||||
|
||||
// getters
|
||||
virtual bool is3D() const override { return true; }
|
||||
|
||||
|
@ -74,6 +77,8 @@ protected:
|
|||
bool _drawInFront;
|
||||
bool _isAA;
|
||||
bool _isGrabbable { false };
|
||||
|
||||
QString _name;
|
||||
};
|
||||
|
||||
#endif // hifi_Base3DOverlay_h
|
||||
|
|
|
@ -22,6 +22,7 @@ ModelOverlay::ModelOverlay()
|
|||
_modelTextures(QVariantMap())
|
||||
{
|
||||
_model->init();
|
||||
_model->setLoadingPriority(_loadPriority);
|
||||
_isLoaded = false;
|
||||
}
|
||||
|
||||
|
@ -30,9 +31,11 @@ ModelOverlay::ModelOverlay(const ModelOverlay* modelOverlay) :
|
|||
_model(std::make_shared<Model>(std::make_shared<Rig>(), nullptr, this)),
|
||||
_modelTextures(QVariantMap()),
|
||||
_url(modelOverlay->_url),
|
||||
_updateModel(false)
|
||||
_updateModel(false),
|
||||
_loadPriority(modelOverlay->getLoadPriority())
|
||||
{
|
||||
_model->init();
|
||||
_model->setLoadingPriority(_loadPriority);
|
||||
if (_url.isValid()) {
|
||||
_updateModel = true;
|
||||
_isLoaded = false;
|
||||
|
@ -113,6 +116,12 @@ void ModelOverlay::setProperties(const QVariantMap& properties) {
|
|||
_updateModel = true;
|
||||
}
|
||||
|
||||
auto loadPriorityProperty = properties["loadPriority"];
|
||||
if (loadPriorityProperty.isValid()) {
|
||||
_loadPriority = loadPriorityProperty.toFloat();
|
||||
_model->setLoadingPriority(_loadPriority);
|
||||
}
|
||||
|
||||
auto urlValue = properties["url"];
|
||||
if (urlValue.isValid() && urlValue.canConvert<QString>()) {
|
||||
_url = urlValue.toString();
|
||||
|
@ -279,3 +288,10 @@ void ModelOverlay::locationChanged(bool tellPhysics) {
|
|||
_model->setTranslation(getPosition());
|
||||
}
|
||||
}
|
||||
|
||||
QString ModelOverlay::getName() const {
|
||||
if (_name != "") {
|
||||
return QString("Overlay:") + getType() + ":" + _name;
|
||||
}
|
||||
return QString("Overlay:") + getType() + ":" + _url.toString();
|
||||
}
|
||||
|
|
|
@ -22,6 +22,8 @@ public:
|
|||
static QString const TYPE;
|
||||
virtual QString getType() const override { return TYPE; }
|
||||
|
||||
virtual QString getName() const override;
|
||||
|
||||
ModelOverlay();
|
||||
ModelOverlay(const ModelOverlay* modelOverlay);
|
||||
|
||||
|
@ -41,6 +43,8 @@ public:
|
|||
|
||||
void locationChanged(bool tellPhysics) override;
|
||||
|
||||
float getLoadPriority() const { return _loadPriority; }
|
||||
|
||||
protected:
|
||||
// helper to extract metadata from our Model's rigged joints
|
||||
template <typename itemType> using mapFunction = std::function<itemType(int jointIndex)>;
|
||||
|
@ -55,6 +59,7 @@ private:
|
|||
QUrl _url;
|
||||
bool _updateModel = { false };
|
||||
bool _scaleToFit = { false };
|
||||
float _loadPriority { 0.0f };
|
||||
};
|
||||
|
||||
#endif // hifi_ModelOverlay_h
|
||||
|
|
|
@ -421,6 +421,13 @@ void Web3DOverlay::handlePointerEventAsTouch(const PointerEvent& event) {
|
|||
return;
|
||||
}
|
||||
|
||||
//do not send secondary button events to tablet
|
||||
if (event.getButton() == PointerEvent::SecondaryButton ||
|
||||
//do not block composed events
|
||||
event.getButtons() == PointerEvent::SecondaryButton) {
|
||||
return;
|
||||
}
|
||||
|
||||
QTouchEvent::TouchPoint point;
|
||||
point.setId(event.getID());
|
||||
point.setState(touchPointState);
|
||||
|
|
|
@ -1097,28 +1097,27 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
|
|||
}
|
||||
|
||||
void AudioClient::prepareLocalAudioInjectors() {
|
||||
if (_outputPeriod == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
int bufferCapacity = _localInjectorsStream.getSampleCapacity();
|
||||
if (_localToOutputResampler) {
|
||||
// avoid overwriting the buffer,
|
||||
// instead of failing on writes because the buffer is used as a lock-free pipe
|
||||
bufferCapacity -=
|
||||
_localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL) *
|
||||
AudioConstants::STEREO;
|
||||
bufferCapacity += 1;
|
||||
}
|
||||
|
||||
int samplesNeeded = std::numeric_limits<int>::max();
|
||||
while (samplesNeeded > 0) {
|
||||
// lock for every write to avoid locking out the device callback
|
||||
// this lock is intentional - the buffer is only lock-free in its use in the device callback
|
||||
RecursiveLock lock(_localAudioMutex);
|
||||
// unlock between every write to allow device switching
|
||||
Lock lock(_localAudioMutex);
|
||||
|
||||
// in case of a device switch, consider bufferCapacity volatile across iterations
|
||||
if (_outputPeriod == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
int bufferCapacity = _localInjectorsStream.getSampleCapacity();
|
||||
int maxOutputSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * AudioConstants::STEREO;
|
||||
if (_localToOutputResampler) {
|
||||
maxOutputSamples =
|
||||
_localToOutputResampler->getMaxOutput(AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL) *
|
||||
AudioConstants::STEREO;
|
||||
}
|
||||
|
||||
samplesNeeded = bufferCapacity - _localSamplesAvailable.load(std::memory_order_relaxed);
|
||||
if (samplesNeeded <= 0) {
|
||||
if (samplesNeeded < maxOutputSamples) {
|
||||
// avoid overwriting the buffer to prevent losing frames
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -1168,16 +1167,18 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
|
|||
memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float));
|
||||
|
||||
for (AudioInjector* injector : _activeLocalAudioInjectors) {
|
||||
if (injector->getLocalBuffer()) {
|
||||
// the lock guarantees that injectorBuffer, if found, is invariant
|
||||
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
|
||||
if (injectorBuffer) {
|
||||
|
||||
static const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
int numChannels = injector->isAmbisonic() ? AudioConstants::AMBISONIC : (injector->isStereo() ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qint64 bytesToRead = numChannels * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
||||
size_t bytesToRead = numChannels * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
||||
|
||||
// get one frame from the injector
|
||||
memset(_localScratchBuffer, 0, bytesToRead);
|
||||
if (0 < injector->getLocalBuffer()->readData((char*)_localScratchBuffer, bytesToRead)) {
|
||||
if (0 < injectorBuffer->readData((char*)_localScratchBuffer, bytesToRead)) {
|
||||
|
||||
if (injector->isAmbisonic()) {
|
||||
|
||||
|
@ -1317,15 +1318,17 @@ void AudioClient::setIsStereoInput(bool isStereoInput) {
|
|||
}
|
||||
|
||||
bool AudioClient::outputLocalInjector(AudioInjector* injector) {
|
||||
Lock lock(_injectorsMutex);
|
||||
if (injector->getLocalBuffer() && _audioInput ) {
|
||||
// just add it to the vector of active local injectors, if
|
||||
// not already there.
|
||||
// Since this is invoked with invokeMethod, there _should_ be
|
||||
// no reason to lock access to the vector of injectors.
|
||||
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
|
||||
if (injectorBuffer) {
|
||||
// local injectors are on the AudioInjectorsThread, so we must guard access
|
||||
Lock lock(_injectorsMutex);
|
||||
if (!_activeLocalAudioInjectors.contains(injector)) {
|
||||
qCDebug(audioclient) << "adding new injector";
|
||||
_activeLocalAudioInjectors.append(injector);
|
||||
|
||||
// move local buffer to the LocalAudioThread to avoid dataraces with AudioInjector (like stop())
|
||||
injectorBuffer->setParent(nullptr);
|
||||
injectorBuffer->moveToThread(&_localAudioThread);
|
||||
} else {
|
||||
qCDebug(audioclient) << "injector exists in active list already";
|
||||
}
|
||||
|
@ -1333,7 +1336,7 @@ bool AudioClient::outputLocalInjector(AudioInjector* injector) {
|
|||
return true;
|
||||
|
||||
} else {
|
||||
// no local buffer or audio
|
||||
// no local buffer
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1452,7 +1455,7 @@ void AudioClient::outputNotify() {
|
|||
bool AudioClient::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo) {
|
||||
bool supportedFormat = false;
|
||||
|
||||
RecursiveLock lock(_localAudioMutex);
|
||||
Lock lock(_localAudioMutex);
|
||||
_localSamplesAvailable.exchange(0, std::memory_order_release);
|
||||
|
||||
// cleanup any previously initialized device
|
||||
|
@ -1681,8 +1684,12 @@ qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
|||
|
||||
int injectorSamplesPopped = 0;
|
||||
{
|
||||
RecursiveLock lock(_audio->_localAudioMutex);
|
||||
bool append = networkSamplesPopped > 0;
|
||||
// this does not require a lock as of the only two functions adding to _localSamplesAvailable (samples count):
|
||||
// - prepareLocalAudioInjectors will only increase samples count
|
||||
// - switchOutputToAudioDevice will zero samples count
|
||||
// stop the device, so that readData will exhaust the existing buffer or see a zeroed samples count
|
||||
// and start the device, which can only see a zeroed samples count
|
||||
samplesRequested = std::min(samplesRequested, _audio->_localSamplesAvailable.load(std::memory_order_acquire));
|
||||
if ((injectorSamplesPopped = _localInjectorsStream.appendSamples(mixBuffer, samplesRequested, append)) > 0) {
|
||||
_audio->_localSamplesAvailable.fetch_sub(injectorSamplesPopped, std::memory_order_release);
|
||||
|
|
|
@ -96,8 +96,6 @@ public:
|
|||
using AudioPositionGetter = std::function<glm::vec3()>;
|
||||
using AudioOrientationGetter = std::function<glm::quat()>;
|
||||
|
||||
using RecursiveMutex = std::recursive_mutex;
|
||||
using RecursiveLock = std::unique_lock<RecursiveMutex>;
|
||||
using Mutex = std::mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
|
@ -345,7 +343,7 @@ private:
|
|||
int16_t _localScratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_AMBISONIC];
|
||||
float* _localOutputMixBuffer { NULL };
|
||||
AudioInjectorsThread _localAudioThread;
|
||||
RecursiveMutex _localAudioMutex;
|
||||
Mutex _localAudioMutex;
|
||||
|
||||
// for output audio (used by this thread)
|
||||
int _outputPeriod { 0 };
|
||||
|
|
|
@ -33,7 +33,11 @@ public:
|
|||
PacketType packetType, QString codecName = QString(""));
|
||||
|
||||
public slots:
|
||||
// threadsafe
|
||||
// moves injector->getLocalBuffer() to another thread (so removes its parent)
|
||||
// take care to delete it when ~AudioInjector, as parenting Qt semantics will not work
|
||||
virtual bool outputLocalInjector(AudioInjector* injector) = 0;
|
||||
|
||||
virtual bool shouldLoopbackInjectors() { return false; }
|
||||
|
||||
virtual void setIsStereoInput(bool stereo) = 0;
|
||||
|
|
|
@ -51,6 +51,10 @@ AudioInjector::AudioInjector(const QByteArray& audioData, const AudioInjectorOpt
|
|||
{
|
||||
}
|
||||
|
||||
AudioInjector::~AudioInjector() {
|
||||
deleteLocalBuffer();
|
||||
}
|
||||
|
||||
bool AudioInjector::stateHas(AudioInjectorState state) const {
|
||||
return (_state & state) == state;
|
||||
}
|
||||
|
@ -87,11 +91,7 @@ void AudioInjector::finish() {
|
|||
|
||||
emit finished();
|
||||
|
||||
if (_localBuffer) {
|
||||
_localBuffer->stop();
|
||||
_localBuffer->deleteLater();
|
||||
_localBuffer = NULL;
|
||||
}
|
||||
deleteLocalBuffer();
|
||||
|
||||
if (stateHas(AudioInjectorState::PendingDelete)) {
|
||||
// we've been asked to delete after finishing, trigger a deleteLater here
|
||||
|
@ -163,7 +163,7 @@ bool AudioInjector::injectLocally() {
|
|||
if (_localAudioInterface) {
|
||||
if (_audioData.size() > 0) {
|
||||
|
||||
_localBuffer = new AudioInjectorLocalBuffer(_audioData, this);
|
||||
_localBuffer = new AudioInjectorLocalBuffer(_audioData);
|
||||
|
||||
_localBuffer->open(QIODevice::ReadOnly);
|
||||
_localBuffer->setShouldLoop(_options.loop);
|
||||
|
@ -172,7 +172,8 @@ bool AudioInjector::injectLocally() {
|
|||
_localBuffer->setCurrentOffset(_currentSendOffset);
|
||||
|
||||
// call this function on the AudioClient's thread
|
||||
success = QMetaObject::invokeMethod(_localAudioInterface, "outputLocalInjector", Q_ARG(AudioInjector*, this));
|
||||
// this will move the local buffer's thread to the LocalInjectorThread
|
||||
success = _localAudioInterface->outputLocalInjector(this);
|
||||
|
||||
if (!success) {
|
||||
qCDebug(audio) << "AudioInjector::injectLocally could not output locally via _localAudioInterface";
|
||||
|
@ -185,6 +186,14 @@ bool AudioInjector::injectLocally() {
|
|||
return success;
|
||||
}
|
||||
|
||||
void AudioInjector::deleteLocalBuffer() {
|
||||
if (_localBuffer) {
|
||||
_localBuffer->stop();
|
||||
_localBuffer->deleteLater();
|
||||
_localBuffer = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
const uchar MAX_INJECTOR_VOLUME = packFloatGainToByte(1.0f);
|
||||
static const int64_t NEXT_FRAME_DELTA_ERROR_OR_FINISHED = -1;
|
||||
static const int64_t NEXT_FRAME_DELTA_IMMEDIATELY = 0;
|
||||
|
|
|
@ -52,6 +52,7 @@ class AudioInjector : public QObject {
|
|||
public:
|
||||
AudioInjector(const Sound& sound, const AudioInjectorOptions& injectorOptions);
|
||||
AudioInjector(const QByteArray& audioData, const AudioInjectorOptions& injectorOptions);
|
||||
~AudioInjector();
|
||||
|
||||
bool isFinished() const { return (stateHas(AudioInjectorState::Finished)); }
|
||||
|
||||
|
@ -99,6 +100,7 @@ private:
|
|||
int64_t injectNextFrame();
|
||||
bool inject(bool(AudioInjectorManager::*injection)(AudioInjector*));
|
||||
bool injectLocally();
|
||||
void deleteLocalBuffer();
|
||||
|
||||
static AbstractAudioInterface* _localAudioInterface;
|
||||
|
||||
|
|
|
@ -11,8 +11,7 @@
|
|||
|
||||
#include "AudioInjectorLocalBuffer.h"
|
||||
|
||||
AudioInjectorLocalBuffer::AudioInjectorLocalBuffer(const QByteArray& rawAudioArray, QObject* parent) :
|
||||
QIODevice(parent),
|
||||
AudioInjectorLocalBuffer::AudioInjectorLocalBuffer(const QByteArray& rawAudioArray) :
|
||||
_rawAudioArray(rawAudioArray),
|
||||
_shouldLoop(false),
|
||||
_isStopped(false),
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
class AudioInjectorLocalBuffer : public QIODevice {
|
||||
Q_OBJECT
|
||||
public:
|
||||
AudioInjectorLocalBuffer(const QByteArray& rawAudioArray, QObject* parent);
|
||||
AudioInjectorLocalBuffer(const QByteArray& rawAudioArray);
|
||||
|
||||
void stop();
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
set(TARGET_NAME avatars-renderer)
|
||||
AUTOSCRIBE_SHADER_LIB(gpu model render render-utils)
|
||||
setup_hifi_library(Widgets Network Script)
|
||||
link_hifi_libraries(shared gpu model animation physics model-networking script-engine render render-utils)
|
||||
link_hifi_libraries(shared gpu model animation physics model-networking script-engine render image render-utils)
|
||||
|
||||
target_bullet()
|
||||
|
|
|
@ -27,16 +27,13 @@
|
|||
#include <TextRenderer3D.h>
|
||||
#include <VariantMapToScriptValue.h>
|
||||
#include <DebugDraw.h>
|
||||
#include <shared/Camera.h>
|
||||
#include <SoftAttachmentModel.h>
|
||||
|
||||
#include "AvatarMotionState.h"
|
||||
#include "Camera.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "SceneScriptingInterface.h"
|
||||
#include "SoftAttachmentModel.h"
|
||||
#include "Logging.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
const glm::vec3 DEFAULT_UP_DIRECTION(0.0f, 1.0f, 0.0f);
|
||||
const int NUM_BODY_CONE_SIDES = 9;
|
||||
const float CHAT_MESSAGE_SCALE = 0.0015f;
|
||||
const float CHAT_MESSAGE_HEIGHT = 0.1f;
|
||||
|
@ -71,6 +68,11 @@ namespace render {
|
|||
}
|
||||
}
|
||||
|
||||
bool showAvatars { true };
|
||||
void Avatar::setShowAvatars(bool render) {
|
||||
showAvatars = render;
|
||||
}
|
||||
|
||||
static bool showReceiveStats = false;
|
||||
void Avatar::setShowReceiveStats(bool receiveStats) {
|
||||
showReceiveStats = receiveStats;
|
||||
|
@ -97,25 +99,6 @@ void Avatar::setShowNamesAboveHeads(bool show) {
|
|||
}
|
||||
|
||||
Avatar::Avatar(QThread* thread, RigPointer rig) :
|
||||
AvatarData(),
|
||||
_skeletonOffset(0.0f),
|
||||
_bodyYawDelta(0.0f),
|
||||
_positionDeltaAccumulator(0.0f),
|
||||
_lastVelocity(0.0f),
|
||||
_acceleration(0.0f),
|
||||
_lastAngularVelocity(0.0f),
|
||||
_lastOrientation(),
|
||||
_worldUpDirection(DEFAULT_UP_DIRECTION),
|
||||
_moving(false),
|
||||
_smoothPositionTime(SMOOTH_TIME_POSITION),
|
||||
_smoothPositionTimer(std::numeric_limits<float>::max()),
|
||||
_smoothOrientationTime(SMOOTH_TIME_ORIENTATION),
|
||||
_smoothOrientationTimer(std::numeric_limits<float>::max()),
|
||||
_smoothPositionInitial(),
|
||||
_smoothPositionTarget(),
|
||||
_smoothOrientationInitial(),
|
||||
_smoothOrientationTarget(),
|
||||
_initialized(false),
|
||||
_voiceSphereID(GeometryCache::UNKNOWN_ID)
|
||||
{
|
||||
// we may have been created in the network thread, but we live in the main thread
|
||||
|
@ -123,12 +106,6 @@ Avatar::Avatar(QThread* thread, RigPointer rig) :
|
|||
|
||||
setScale(glm::vec3(1.0f)); // avatar scale is uniform
|
||||
|
||||
// give the pointer to our head to inherited _headData variable from AvatarData
|
||||
_headData = static_cast<HeadData*>(new Head(this));
|
||||
|
||||
_skeletonModel = std::make_shared<SkeletonModel>(this, nullptr, rig);
|
||||
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
_nameRectGeometryID = geometryCache->allocateID();
|
||||
_leftPointerGeometryID = geometryCache->allocateID();
|
||||
|
@ -283,7 +260,7 @@ void Avatar::updateAvatarEntities() {
|
|||
// and either add or update the entity.
|
||||
QJsonDocument jsonProperties = QJsonDocument::fromBinaryData(data);
|
||||
if (!jsonProperties.isObject()) {
|
||||
qCDebug(interfaceapp) << "got bad avatarEntity json" << QString(data.toHex());
|
||||
qCDebug(avatars_renderer) << "got bad avatarEntity json" << QString(data.toHex());
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -306,7 +283,7 @@ void Avatar::updateAvatarEntities() {
|
|||
// NOTE: if this avatar entity is not attached to us, strip its entity script completely...
|
||||
auto attachedScript = properties.getScript();
|
||||
if (!isMyAvatar() && !attachedScript.isEmpty()) {
|
||||
qCDebug(interfaceapp) << "removing entity script from avatar attached entity:" << entityID << "old script:" << attachedScript;
|
||||
qCDebug(avatars_renderer) << "removing entity script from avatar attached entity:" << entityID << "old script:" << attachedScript;
|
||||
QString noScript;
|
||||
properties.setScript(noScript);
|
||||
}
|
||||
|
@ -410,7 +387,7 @@ void Avatar::simulate(float deltaTime, bool inView) {
|
|||
Head* head = getHead();
|
||||
head->setPosition(headPosition);
|
||||
head->setScale(getUniformScale());
|
||||
head->simulate(deltaTime, false);
|
||||
head->simulate(deltaTime);
|
||||
} else {
|
||||
// a non-full update is still required so that the position, rotation, scale and bounds of the skeletonModel are updated.
|
||||
_skeletonModel->simulate(deltaTime, false);
|
||||
|
@ -525,13 +502,14 @@ static TextRenderer3D* textRenderer(TextRendererType type) {
|
|||
void Avatar::addToScene(AvatarSharedPointer self, const render::ScenePointer& scene, render::Transaction& transaction) {
|
||||
auto avatarPayload = new render::Payload<AvatarData>(self);
|
||||
auto avatarPayloadPointer = Avatar::PayloadPointer(avatarPayload);
|
||||
if (_skeletonModel->addToScene(scene, transaction)) {
|
||||
_renderItemID = scene->allocateID();
|
||||
transaction.resetItem(_renderItemID, avatarPayloadPointer);
|
||||
|
||||
for (auto& attachmentModel : _attachmentModels) {
|
||||
attachmentModel->addToScene(scene, transaction);
|
||||
}
|
||||
if (_renderItemID == render::Item::INVALID_ITEM_ID) {
|
||||
_renderItemID = scene->allocateID();
|
||||
}
|
||||
transaction.resetItem(_renderItemID, avatarPayloadPointer);
|
||||
_skeletonModel->addToScene(scene, transaction);
|
||||
for (auto& attachmentModel : _attachmentModels) {
|
||||
attachmentModel->addToScene(scene, transaction);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -748,12 +726,12 @@ float Avatar::getBoundingRadius() const {
|
|||
#ifdef DEBUG
|
||||
void debugValue(const QString& str, const glm::vec3& value) {
|
||||
if (glm::any(glm::isnan(value)) || glm::any(glm::isinf(value))) {
|
||||
qCWarning(interfaceapp) << "debugValue() " << str << value;
|
||||
qCWarning(avatars_renderer) << "debugValue() " << str << value;
|
||||
}
|
||||
};
|
||||
void debugValue(const QString& str, const float& value) {
|
||||
if (glm::isnan(value) || glm::isinf(value)) {
|
||||
qCWarning(interfaceapp) << "debugValue() " << str << value;
|
||||
qCWarning(avatars_renderer) << "debugValue() " << str << value;
|
||||
}
|
||||
};
|
||||
#define DEBUG_VALUE(str, value) debugValue(str, value)
|
||||
|
@ -783,7 +761,7 @@ glm::vec3 Avatar::getDisplayNamePosition() const {
|
|||
}
|
||||
|
||||
if (glm::any(glm::isnan(namePosition)) || glm::any(glm::isinf(namePosition))) {
|
||||
qCWarning(interfaceapp) << "Invalid display name position" << namePosition
|
||||
qCWarning(avatars_renderer) << "Invalid display name position" << namePosition
|
||||
<< ", setting is to (0.0f, 0.5f, 0.0f)";
|
||||
namePosition = glm::vec3(0.0f, 0.5f, 0.0f);
|
||||
}
|
||||
|
@ -1115,14 +1093,14 @@ void Avatar::setModelURLFinished(bool success) {
|
|||
const int MAX_SKELETON_DOWNLOAD_ATTEMPTS = 4; // NOTE: we don't want to be as generous as ResourceCache is, we only want 4 attempts
|
||||
if (_skeletonModel->getResourceDownloadAttemptsRemaining() <= 0 ||
|
||||
_skeletonModel->getResourceDownloadAttempts() > MAX_SKELETON_DOWNLOAD_ATTEMPTS) {
|
||||
qCWarning(interfaceapp) << "Using default after failing to load Avatar model: " << _skeletonModelURL
|
||||
qCWarning(avatars_renderer) << "Using default after failing to load Avatar model: " << _skeletonModelURL
|
||||
<< "after" << _skeletonModel->getResourceDownloadAttempts() << "attempts.";
|
||||
// call _skeletonModel.setURL, but leave our copy of _skeletonModelURL alone. This is so that
|
||||
// we don't redo this every time we receive an identity packet from the avatar with the bad url.
|
||||
QMetaObject::invokeMethod(_skeletonModel.get(), "setURL",
|
||||
Qt::QueuedConnection, Q_ARG(QUrl, AvatarData::defaultFullAvatarModelUrl()));
|
||||
} else {
|
||||
qCWarning(interfaceapp) << "Avatar model: " << _skeletonModelURL
|
||||
qCWarning(avatars_renderer) << "Avatar model: " << _skeletonModelURL
|
||||
<< "failed to load... attempts:" << _skeletonModel->getResourceDownloadAttempts()
|
||||
<< "out of:" << MAX_SKELETON_DOWNLOAD_ATTEMPTS;
|
||||
}
|
||||
|
@ -1438,7 +1416,7 @@ void Avatar::setParentID(const QUuid& parentID) {
|
|||
if (success) {
|
||||
setTransform(beforeChangeTransform, success);
|
||||
if (!success) {
|
||||
qCDebug(interfaceapp) << "Avatar::setParentID failed to reset avatar's location.";
|
||||
qCDebug(avatars_renderer) << "Avatar::setParentID failed to reset avatar's location.";
|
||||
}
|
||||
if (initialParentID != parentID) {
|
||||
_parentChanged = usecTimestampNow();
|
||||
|
@ -1456,7 +1434,7 @@ void Avatar::setParentJointIndex(quint16 parentJointIndex) {
|
|||
if (success) {
|
||||
setTransform(beforeChangeTransform, success);
|
||||
if (!success) {
|
||||
qCDebug(interfaceapp) << "Avatar::setParentJointIndex failed to reset avatar's location.";
|
||||
qCDebug(avatars_renderer) << "Avatar::setParentJointIndex failed to reset avatar's location.";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1488,7 +1466,7 @@ QList<QVariant> Avatar::getSkeleton() {
|
|||
void Avatar::addToScene(AvatarSharedPointer myHandle, const render::ScenePointer& scene) {
|
||||
if (scene) {
|
||||
auto nodelist = DependencyManager::get<NodeList>();
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()
|
||||
if (showAvatars
|
||||
&& !nodelist->isIgnoringNode(getSessionUUID())
|
||||
&& !nodelist->isRadiusIgnoringNode(getSessionUUID())) {
|
||||
render::Transaction transaction;
|
||||
|
@ -1496,7 +1474,7 @@ void Avatar::addToScene(AvatarSharedPointer myHandle, const render::ScenePointer
|
|||
scene->enqueueTransaction(transaction);
|
||||
}
|
||||
} else {
|
||||
qCWarning(interfaceapp) << "Avatar::addAvatar() : Unexpected null scene, possibly during application shutdown";
|
||||
qCWarning(avatars_renderer) << "Avatar::addAvatar() : Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
}
|
||||
|
|
@ -20,6 +20,7 @@
|
|||
#include <AvatarData.h>
|
||||
#include <ShapeInfo.h>
|
||||
#include <render/Scene.h>
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
|
||||
#include "Head.h"
|
||||
|
@ -68,6 +69,7 @@ class Avatar : public AvatarData {
|
|||
Q_PROPERTY(glm::vec3 skeletonOffset READ getSkeletonOffset WRITE setSkeletonOffset)
|
||||
|
||||
public:
|
||||
static void setShowAvatars(bool render);
|
||||
static void setShowReceiveStats(bool receiveStats);
|
||||
static void setShowMyLookAtVectors(bool showMine);
|
||||
static void setShowOtherLookAtVectors(bool showOthers);
|
||||
|
@ -77,6 +79,8 @@ public:
|
|||
explicit Avatar(QThread* thread, RigPointer rig = nullptr);
|
||||
~Avatar();
|
||||
|
||||
virtual void instantiableAvatar() = 0;
|
||||
|
||||
typedef render::Payload<AvatarData> Payload;
|
||||
typedef std::shared_ptr<render::Item::PayloadInterface> PayloadPointer;
|
||||
|
||||
|
@ -251,7 +255,6 @@ public:
|
|||
bool isInScene() const { return render::Item::isValidID(_renderItemID); }
|
||||
bool isMoving() const { return _moving; }
|
||||
|
||||
//void setMotionState(AvatarMotionState* motionState);
|
||||
void setPhysicsCallback(AvatarPhysicsCallback cb);
|
||||
void addPhysicsFlags(uint32_t flags);
|
||||
bool isInPhysicsSimulation() const { return _physicsCallback != nullptr; }
|
||||
|
@ -268,7 +271,6 @@ public slots:
|
|||
void setModelURLFinished(bool success);
|
||||
|
||||
protected:
|
||||
|
||||
const float SMOOTH_TIME_POSITION = 0.125f;
|
||||
const float SMOOTH_TIME_ORIENTATION = 0.075f;
|
||||
|
||||
|
@ -282,7 +284,7 @@ protected:
|
|||
std::vector<std::shared_ptr<Model>> _attachmentsToRemove;
|
||||
std::vector<std::shared_ptr<Model>> _attachmentsToDelete;
|
||||
|
||||
float _bodyYawDelta; // degrees/sec
|
||||
float _bodyYawDelta { 0.0f }; // degrees/sec
|
||||
|
||||
// These position histories and derivatives are in the world-frame.
|
||||
// The derivatives are the MEASURED results of all external and internal forces
|
||||
|
@ -298,9 +300,8 @@ protected:
|
|||
glm::vec3 _angularAcceleration;
|
||||
glm::quat _lastOrientation;
|
||||
|
||||
glm::vec3 _worldUpDirection;
|
||||
float _stringLength;
|
||||
bool _moving; ///< set when position is changing
|
||||
glm::vec3 _worldUpDirection { Vectors::UP };
|
||||
bool _moving { false }; ///< set when position is changing
|
||||
|
||||
// protected methods...
|
||||
bool isLookingAtMe(AvatarSharedPointer avatar) const;
|
||||
|
@ -336,10 +337,10 @@ protected:
|
|||
RateCounter<> _jointDataSimulationRate;
|
||||
|
||||
// Smoothing data for blending from one position/orientation to another on remote agents.
|
||||
float _smoothPositionTime;
|
||||
float _smoothPositionTimer;
|
||||
float _smoothOrientationTime;
|
||||
float _smoothOrientationTimer;
|
||||
float _smoothPositionTime { SMOOTH_TIME_POSITION };
|
||||
float _smoothPositionTimer { std::numeric_limits<float>::max() };
|
||||
float _smoothOrientationTime { SMOOTH_TIME_ORIENTATION };
|
||||
float _smoothOrientationTimer { std::numeric_limits<float>::max() };
|
||||
glm::vec3 _smoothPositionInitial;
|
||||
glm::vec3 _smoothPositionTarget;
|
||||
glm::quat _smoothOrientationInitial;
|
||||
|
@ -360,7 +361,7 @@ private:
|
|||
int _leftPointerGeometryID { 0 };
|
||||
int _rightPointerGeometryID { 0 };
|
||||
int _nameRectGeometryID { 0 };
|
||||
bool _initialized;
|
||||
bool _initialized { false };
|
||||
bool _isLookAtTarget { false };
|
||||
bool _isAnimatingScale { false };
|
||||
|
|
@ -9,13 +9,12 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AvatarMotionState.h"
|
||||
|
||||
#include <PhysicsCollisionGroups.h>
|
||||
#include <PhysicsEngine.h>
|
||||
#include <PhysicsHelpers.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "AvatarMotionState.h"
|
||||
#include "BulletUtil.h"
|
||||
|
||||
AvatarMotionState::AvatarMotionState(AvatarSharedPointer avatar, const btCollisionShape* shape) : ObjectMotionState(shape), _avatar(avatar) {
|
||||
assert(_avatar);
|
|
@ -15,8 +15,9 @@
|
|||
#include <QSet>
|
||||
|
||||
#include <ObjectMotionState.h>
|
||||
#include <BulletUtil.h>
|
||||
|
||||
class Avatar;
|
||||
#include "Avatar.h"
|
||||
|
||||
class AvatarMotionState : public ObjectMotionState {
|
||||
public:
|
|
@ -8,55 +8,28 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Head.h"
|
||||
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
#include <gpu/Batch.h>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "DependencyManager.h"
|
||||
#include "GeometryUtil.h"
|
||||
#include "Head.h"
|
||||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include <Rig.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
static bool fixGaze { false };
|
||||
static bool disableEyelidAdjustment { false };
|
||||
|
||||
Head::Head(Avatar* owningAvatar) :
|
||||
HeadData((AvatarData*)owningAvatar),
|
||||
_returnHeadToCenter(false),
|
||||
_position(0.0f, 0.0f, 0.0f),
|
||||
_rotation(0.0f, 0.0f, 0.0f),
|
||||
_leftEyePosition(0.0f, 0.0f, 0.0f),
|
||||
_rightEyePosition(0.0f, 0.0f, 0.0f),
|
||||
_eyePosition(0.0f, 0.0f, 0.0f),
|
||||
_scale(1.0f),
|
||||
_lastLoudness(0.0f),
|
||||
_longTermAverageLoudness(-1.0f),
|
||||
_audioAttack(0.0f),
|
||||
_audioJawOpen(0.0f),
|
||||
_trailingAudioJawOpen(0.0f),
|
||||
_mouth2(0.0f),
|
||||
_mouth3(0.0f),
|
||||
_mouth4(0.0f),
|
||||
_mouthTime(0.0f),
|
||||
_saccade(0.0f, 0.0f, 0.0f),
|
||||
_saccadeTarget(0.0f, 0.0f, 0.0f),
|
||||
_leftEyeBlinkVelocity(0.0f),
|
||||
_rightEyeBlinkVelocity(0.0f),
|
||||
_timeWithoutTalking(0.0f),
|
||||
_deltaPitch(0.0f),
|
||||
_deltaYaw(0.0f),
|
||||
_deltaRoll(0.0f),
|
||||
_isCameraMoving(false),
|
||||
_isLookingAtMe(false),
|
||||
_lookingAtMeStarted(0),
|
||||
_wasLastLookingAtMe(0),
|
||||
HeadData(owningAvatar),
|
||||
_leftEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
_rightEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID())
|
||||
{
|
||||
|
@ -69,7 +42,7 @@ void Head::reset() {
|
|||
_baseYaw = _basePitch = _baseRoll = 0.0f;
|
||||
}
|
||||
|
||||
void Head::simulate(float deltaTime, bool isMine) {
|
||||
void Head::simulate(float deltaTime) {
|
||||
const float NORMAL_HZ = 60.0f; // the update rate the constant values were tuned for
|
||||
|
||||
// grab the audio loudness from the owning avatar, if we have one
|
||||
|
@ -90,43 +63,7 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
|
||||
}
|
||||
|
||||
if (isMine) {
|
||||
auto player = DependencyManager::get<recording::Deck>();
|
||||
// Only use face trackers when not playing back a recording.
|
||||
if (!player->isPlaying()) {
|
||||
FaceTracker* faceTracker = qApp->getActiveFaceTracker();
|
||||
_isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
|
||||
if (_isFaceTrackerConnected) {
|
||||
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||
|
||||
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
|
||||
calculateMouthShapes(deltaTime);
|
||||
|
||||
const int JAW_OPEN_BLENDSHAPE = 21;
|
||||
const int MMMM_BLENDSHAPE = 34;
|
||||
const int FUNNEL_BLENDSHAPE = 40;
|
||||
const int SMILE_LEFT_BLENDSHAPE = 28;
|
||||
const int SMILE_RIGHT_BLENDSHAPE = 29;
|
||||
_blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
|
||||
_blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
|
||||
_blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
|
||||
_blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
|
||||
_blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
|
||||
}
|
||||
|
||||
applyEyelidOffset(getFinalOrientationInWorldFrame());
|
||||
}
|
||||
}
|
||||
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||
}
|
||||
}
|
||||
|
||||
if (!_isFaceTrackerConnected) {
|
||||
|
||||
if (!_isEyeTrackerConnected) {
|
||||
// Update eye saccades
|
||||
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
||||
|
@ -222,7 +159,7 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
} else {
|
||||
_saccade = glm::vec3();
|
||||
}
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::FixGaze)) { // if debug menu turns off, use no saccade
|
||||
if (fixGaze) { // if debug menu turns off, use no saccade
|
||||
_saccade = glm::vec3();
|
||||
}
|
||||
|
||||
|
@ -277,7 +214,7 @@ void Head::calculateMouthShapes(float deltaTime) {
|
|||
void Head::applyEyelidOffset(glm::quat headOrientation) {
|
||||
// Adjusts the eyelid blendshape coefficients so that the eyelid follows the iris as the head pitches.
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisableEyelidAdjustment)) {
|
||||
if (disableEyelidAdjustment) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -350,7 +287,7 @@ glm::vec3 Head::getCorrectedLookAtPosition() {
|
|||
}
|
||||
}
|
||||
|
||||
void Head::setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition) {
|
||||
void Head::setCorrectedLookAtPosition(const glm::vec3& correctedLookAtPosition) {
|
||||
if (!isLookingAtMe()) {
|
||||
_lookingAtMeStarted = usecTimestampNow();
|
||||
}
|
||||
|
@ -366,25 +303,6 @@ bool Head::isLookingAtMe() {
|
|||
return _isLookingAtMe || (now - _wasLastLookingAtMe) < LOOKING_AT_ME_GAP_ALLOWED;
|
||||
}
|
||||
|
||||
glm::quat Head::getCameraOrientation() const {
|
||||
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||
// always the same.
|
||||
if (qApp->isHMDMode()) {
|
||||
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
|
||||
if (myAvatar) {
|
||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
||||
} else {
|
||||
return getOrientation();
|
||||
}
|
||||
} else {
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||
}
|
||||
}
|
||||
|
||||
glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
|
||||
glm::quat orientation = getOrientation();
|
||||
glm::vec3 lookAtDelta = _lookAtPosition - eyePosition;
|
|
@ -11,16 +11,10 @@
|
|||
#ifndef hifi_Head_h
|
||||
#define hifi_Head_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include <HeadData.h>
|
||||
|
||||
#include "world.h"
|
||||
|
||||
|
||||
const float EYE_EAR_GAP = 0.08f;
|
||||
|
||||
class Avatar;
|
||||
|
@ -31,9 +25,9 @@ public:
|
|||
|
||||
void init();
|
||||
void reset();
|
||||
void simulate(float deltaTime, bool isMine);
|
||||
virtual void simulate(float deltaTime);
|
||||
void setScale(float scale);
|
||||
void setPosition(glm::vec3 position) { _position = position; }
|
||||
void setPosition(const glm::vec3& position) { _position = position; }
|
||||
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
|
||||
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
|
||||
|
||||
|
@ -43,17 +37,14 @@ public:
|
|||
/// \return orientationBody * (orientationBase+Delta)
|
||||
glm::quat getFinalOrientationInWorldFrame() const;
|
||||
|
||||
/// \return orientationBody * orientationBasePitch
|
||||
glm::quat getCameraOrientation () const;
|
||||
|
||||
void setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition);
|
||||
void setCorrectedLookAtPosition(const glm::vec3& correctedLookAtPosition);
|
||||
glm::vec3 getCorrectedLookAtPosition();
|
||||
void clearCorrectedLookAtPosition() { _isLookingAtMe = false; }
|
||||
bool isLookingAtMe();
|
||||
quint64 getLookingAtMeStarted() { return _lookingAtMeStarted; }
|
||||
|
||||
float getScale() const { return _scale; }
|
||||
glm::vec3 getPosition() const { return _position; }
|
||||
const glm::vec3& getPosition() const { return _position; }
|
||||
const glm::vec3& getEyePosition() const { return _eyePosition; }
|
||||
const glm::vec3& getSaccade() const { return _saccade; }
|
||||
glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
|
@ -91,46 +82,46 @@ public:
|
|||
|
||||
float getTimeWithoutTalking() const { return _timeWithoutTalking; }
|
||||
|
||||
private:
|
||||
protected:
|
||||
glm::vec3 calculateAverageEyePosition() const { return _leftEyePosition + (_rightEyePosition - _leftEyePosition ) * 0.5f; }
|
||||
|
||||
// disallow copies of the Head, copy of owning Avatar is disallowed too
|
||||
Head(const Head&);
|
||||
Head& operator= (const Head&);
|
||||
|
||||
bool _returnHeadToCenter;
|
||||
bool _returnHeadToCenter { false };
|
||||
glm::vec3 _position;
|
||||
glm::vec3 _rotation;
|
||||
glm::vec3 _leftEyePosition;
|
||||
glm::vec3 _rightEyePosition;
|
||||
glm::vec3 _eyePosition;
|
||||
|
||||
float _scale;
|
||||
float _lastLoudness;
|
||||
float _longTermAverageLoudness;
|
||||
float _audioAttack;
|
||||
float _audioJawOpen;
|
||||
float _trailingAudioJawOpen;
|
||||
float _mouth2;
|
||||
float _mouth3;
|
||||
float _mouth4;
|
||||
float _mouthTime;
|
||||
float _scale { 1.0f };
|
||||
float _lastLoudness { 0.0f };
|
||||
float _longTermAverageLoudness { -1.0f };
|
||||
float _audioAttack { 0.0f };
|
||||
float _audioJawOpen { 0.0f };
|
||||
float _trailingAudioJawOpen { 0.0f };
|
||||
float _mouth2 { 0.0f };
|
||||
float _mouth3 { 0.0f };
|
||||
float _mouth4 { 0.0f };
|
||||
float _mouthTime { 0.0f };
|
||||
|
||||
glm::vec3 _saccade;
|
||||
glm::vec3 _saccadeTarget;
|
||||
float _leftEyeBlinkVelocity;
|
||||
float _rightEyeBlinkVelocity;
|
||||
float _timeWithoutTalking;
|
||||
float _leftEyeBlinkVelocity { 0.0f };
|
||||
float _rightEyeBlinkVelocity { 0.0f };
|
||||
float _timeWithoutTalking { 0.0f };
|
||||
|
||||
// delta angles for local head rotation (driven by hardware input)
|
||||
float _deltaPitch;
|
||||
float _deltaYaw;
|
||||
float _deltaRoll;
|
||||
float _deltaPitch { 0.0f };
|
||||
float _deltaYaw { 0.0f };
|
||||
float _deltaRoll { 0.0f };
|
||||
|
||||
bool _isCameraMoving;
|
||||
bool _isLookingAtMe;
|
||||
quint64 _lookingAtMeStarted;
|
||||
quint64 _wasLastLookingAtMe;
|
||||
bool _isCameraMoving { false };
|
||||
bool _isLookingAtMe { false };
|
||||
quint64 _lookingAtMeStarted { 0 };
|
||||
quint64 _wasLastLookingAtMe { 0 };
|
||||
|
||||
glm::vec3 _correctedLookAtPosition;
|
||||
|
|
@ -6,6 +6,6 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AvatarsRendererLogging.h"
|
||||
#include "Logging.h"
|
||||
|
||||
Q_LOGGING_CATEGORY(avatars_renderer, "hifi.avatars.rendering")
|
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "OtherAvatar.h"
|
||||
|
||||
OtherAvatar::OtherAvatar(QThread* thread, RigPointer rig) : Avatar(thread, rig) {
|
||||
// give the pointer to our head to inherited _headData variable from AvatarData
|
||||
_headData = new Head(this);
|
||||
_skeletonModel = std::make_shared<SkeletonModel>(this, nullptr, rig);
|
||||
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/27
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OtherAvatar_h
|
||||
#define hifi_OtherAvatar_h
|
||||
|
||||
#include "Avatar.h"
|
||||
|
||||
class OtherAvatar : public Avatar {
|
||||
public:
|
||||
explicit OtherAvatar(QThread* thread, RigPointer rig = nullptr);
|
||||
void instantiableAvatar() {};
|
||||
};
|
||||
|
||||
#endif // hifi_OtherAvatar_h
|
|
@ -9,19 +9,18 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "SkeletonModel.h"
|
||||
|
||||
#include <glm/gtx/transform.hpp>
|
||||
#include <QMultiMap>
|
||||
|
||||
#include <recording/Deck.h>
|
||||
#include <DebugDraw.h>
|
||||
#include <AnimDebugDraw.h>
|
||||
#include <CharacterController.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "Menu.h"
|
||||
#include "SkeletonModel.h"
|
||||
#include "Util.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "AnimDebugDraw.h"
|
||||
#include "Logging.h"
|
||||
|
||||
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer rig) :
|
||||
CauterizedModel(rig, parent),
|
||||
|
@ -47,7 +46,7 @@ void SkeletonModel::initJointStates() {
|
|||
// Determine the default eye position for avatar scale = 1.0
|
||||
int headJointIndex = geometry.headJointIndex;
|
||||
if (0 > headJointIndex || headJointIndex >= _rig->getJointStateCount()) {
|
||||
qCWarning(interfaceapp) << "Bad head joint! Got:" << headJointIndex << "jointCount:" << _rig->getJointStateCount();
|
||||
qCWarning(avatars_renderer) << "Bad head joint! Got:" << headJointIndex << "jointCount:" << _rig->getJointStateCount();
|
||||
}
|
||||
glm::vec3 leftEyePosition, rightEyePosition;
|
||||
getEyeModelPositions(leftEyePosition, rightEyePosition);
|
||||
|
@ -72,21 +71,6 @@ void SkeletonModel::initJointStates() {
|
|||
emit skeletonLoaded();
|
||||
}
|
||||
|
||||
Rig::CharacterControllerState convertCharacterControllerState(CharacterController::State state) {
|
||||
switch (state) {
|
||||
default:
|
||||
case CharacterController::State::Ground:
|
||||
return Rig::CharacterControllerState::Ground;
|
||||
case CharacterController::State::Takeoff:
|
||||
return Rig::CharacterControllerState::Takeoff;
|
||||
case CharacterController::State::InAir:
|
||||
return Rig::CharacterControllerState::InAir;
|
||||
case CharacterController::State::Hover:
|
||||
return Rig::CharacterControllerState::Hover;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// Called within Model::simulate call, below.
|
||||
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
const FBXGeometry& geometry = getFBXGeometry();
|
||||
|
@ -102,122 +86,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
|
||||
}
|
||||
|
||||
if (_owningAvatar->isMyAvatar()) {
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
|
||||
Rig::HeadParameters headParams;
|
||||
|
||||
// input action is the highest priority source for head orientation.
|
||||
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
|
||||
if (avatarHeadPose.isValid()) {
|
||||
glm::mat4 rigHeadMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
|
||||
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
|
||||
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
if (qApp->isHMDMode()) {
|
||||
// get HMD position from sensor space into world space, and back into rig space
|
||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
|
||||
glm::mat4 worldToRig = glm::inverse(rigToWorld);
|
||||
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
|
||||
_rig->computeHeadFromHMD(AnimPose(rigHMDMat), headParams.rigHeadPosition, headParams.rigHeadOrientation);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and down in desktop mode.
|
||||
// preMult 180 is necessary to convert from avatar to rig coordinates.
|
||||
// postMult 180 is necessary to convert head from -z forward to z forward.
|
||||
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
|
||||
headParams.headEnabled = false;
|
||||
}
|
||||
}
|
||||
|
||||
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
|
||||
if (avatarHipsPose.isValid()) {
|
||||
glm::mat4 rigHipsMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHipsPose.getRotation(), avatarHipsPose.getTranslation());
|
||||
headParams.hipsMatrix = rigHipsMat;
|
||||
headParams.hipsEnabled = true;
|
||||
} else {
|
||||
headParams.hipsEnabled = false;
|
||||
}
|
||||
|
||||
auto avatarSpine2Pose = myAvatar->getSpine2ControllerPoseInAvatarFrame();
|
||||
if (avatarSpine2Pose.isValid()) {
|
||||
glm::mat4 rigSpine2Mat = Matrices::Y_180 * createMatFromQuatAndPos(avatarSpine2Pose.getRotation(), avatarSpine2Pose.getTranslation());
|
||||
headParams.spine2Matrix = rigSpine2Mat;
|
||||
headParams.spine2Enabled = true;
|
||||
} else {
|
||||
headParams.spine2Enabled = false;
|
||||
}
|
||||
|
||||
headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;
|
||||
|
||||
_rig->updateFromHeadParameters(headParams, deltaTime);
|
||||
|
||||
Rig::HandAndFeetParameters handAndFeetParams;
|
||||
|
||||
auto leftPose = myAvatar->getLeftHandControllerPoseInAvatarFrame();
|
||||
if (leftPose.isValid()) {
|
||||
handAndFeetParams.isLeftEnabled = true;
|
||||
handAndFeetParams.leftPosition = Quaternions::Y_180 * leftPose.getTranslation();
|
||||
handAndFeetParams.leftOrientation = Quaternions::Y_180 * leftPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isLeftEnabled = false;
|
||||
}
|
||||
|
||||
auto rightPose = myAvatar->getRightHandControllerPoseInAvatarFrame();
|
||||
if (rightPose.isValid()) {
|
||||
handAndFeetParams.isRightEnabled = true;
|
||||
handAndFeetParams.rightPosition = Quaternions::Y_180 * rightPose.getTranslation();
|
||||
handAndFeetParams.rightOrientation = Quaternions::Y_180 * rightPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isRightEnabled = false;
|
||||
}
|
||||
|
||||
auto leftFootPose = myAvatar->getLeftFootControllerPoseInAvatarFrame();
|
||||
if (leftFootPose.isValid()) {
|
||||
handAndFeetParams.isLeftFootEnabled = true;
|
||||
handAndFeetParams.leftFootPosition = Quaternions::Y_180 * leftFootPose.getTranslation();
|
||||
handAndFeetParams.leftFootOrientation = Quaternions::Y_180 * leftFootPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isLeftFootEnabled = false;
|
||||
}
|
||||
|
||||
auto rightFootPose = myAvatar->getRightFootControllerPoseInAvatarFrame();
|
||||
if (rightFootPose.isValid()) {
|
||||
handAndFeetParams.isRightFootEnabled = true;
|
||||
handAndFeetParams.rightFootPosition = Quaternions::Y_180 * rightFootPose.getTranslation();
|
||||
handAndFeetParams.rightFootOrientation = Quaternions::Y_180 * rightFootPose.getRotation();
|
||||
} else {
|
||||
handAndFeetParams.isRightFootEnabled = false;
|
||||
}
|
||||
|
||||
handAndFeetParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
|
||||
handAndFeetParams.bodyCapsuleHalfHeight = myAvatar->getCharacterController()->getCapsuleHalfHeight();
|
||||
handAndFeetParams.bodyCapsuleLocalOffset = myAvatar->getCharacterController()->getCapsuleLocalOffset();
|
||||
|
||||
_rig->updateFromHandAndFeetParameters(handAndFeetParams, deltaTime);
|
||||
|
||||
Rig::CharacterControllerState ccState = convertCharacterControllerState(myAvatar->getCharacterController()->getState());
|
||||
|
||||
auto velocity = myAvatar->getLocalVelocity();
|
||||
auto position = myAvatar->getLocalPosition();
|
||||
auto orientation = myAvatar->getLocalOrientation();
|
||||
_rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);
|
||||
|
||||
// evaluate AnimGraph animation and update jointStates.
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
|
||||
Rig::EyeParameters eyeParams;
|
||||
eyeParams.eyeLookAt = lookAt;
|
||||
eyeParams.eyeSaccade = head->getSaccade();
|
||||
eyeParams.modelRotation = getRotation();
|
||||
eyeParams.modelTranslation = getTranslation();
|
||||
eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
|
||||
eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;
|
||||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
} else {
|
||||
if (!_owningAvatar->isMyAvatar()) {
|
||||
// no need to call Model::updateRig() because otherAvatars get their joint state
|
||||
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
|
||||
_needsUpdateClusterMatrices = true;
|
||||
|
@ -249,6 +118,9 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
|
||||
_rig->updateFromEyeParameters(eyeParams);
|
||||
}
|
||||
|
||||
// evaluate AnimGraph animation and update jointStates.
|
||||
Parent::updateRig(deltaTime, parentTransform);
|
||||
}
|
||||
|
||||
void SkeletonModel::updateAttitude() {
|
||||
|
@ -264,7 +136,7 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
if (fullUpdate) {
|
||||
setBlendshapeCoefficients(_owningAvatar->getHead()->getSummedBlendshapeCoefficients());
|
||||
|
||||
Model::simulate(deltaTime, fullUpdate);
|
||||
Parent::simulate(deltaTime, fullUpdate);
|
||||
|
||||
// let rig compute the model offset
|
||||
glm::vec3 registrationPoint;
|
||||
|
@ -272,7 +144,7 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
|||
setOffset(registrationPoint);
|
||||
}
|
||||
} else {
|
||||
Model::simulate(deltaTime, fullUpdate);
|
||||
Parent::simulate(deltaTime, fullUpdate);
|
||||
}
|
||||
|
||||
if (!isActive() || !_owningAvatar->isMyAvatar()) {
|
|
@ -23,6 +23,7 @@ using SkeletonModelWeakPointer = std::weak_ptr<SkeletonModel>;
|
|||
|
||||
/// A skeleton loaded from a model.
|
||||
class SkeletonModel : public CauterizedModel {
|
||||
using Parent = CauterizedModel;
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
|
@ -114,7 +115,7 @@ protected:
|
|||
|
||||
void computeBoundingShape();
|
||||
|
||||
private:
|
||||
protected:
|
||||
|
||||
bool getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
|
||||
|
|
@ -94,7 +94,7 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
|||
// +-----+-----+-+-+-+--+
|
||||
// Key state - K0,K1 is found in the 1st and 2nd bits
|
||||
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
|
||||
// Faceshift - F is found in the 5th bit
|
||||
// Face tracker - F is found in the 5th bit
|
||||
// Eye tracker - E is found in the 6th bit
|
||||
// Referential Data - R is found in the 7th bit
|
||||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
|
||||
|
@ -123,7 +123,7 @@ namespace AvatarDataPacket {
|
|||
// it might be nice to use a dictionary to compress that
|
||||
|
||||
// Packet State Flags - we store the details about the existence of other records in this bitset:
|
||||
// AvatarGlobalPosition, Avatar Faceshift, eye tracking, and existence of
|
||||
// AvatarGlobalPosition, Avatar face tracker, eye tracking, and existence of
|
||||
using HasFlags = uint16_t;
|
||||
const HasFlags PACKET_HAS_AVATAR_GLOBAL_POSITION = 1U << 0;
|
||||
const HasFlags PACKET_HAS_AVATAR_BOUNDING_BOX = 1U << 1;
|
||||
|
@ -357,6 +357,8 @@ class AvatarData : public QObject, public SpatiallyNestable {
|
|||
|
||||
public:
|
||||
|
||||
virtual QString getName() const override { return QString("Avatar:") + _displayName; }
|
||||
|
||||
static const QString FRAME_NAME;
|
||||
|
||||
static void fromFrame(const QByteArray& frameData, AvatarData& avatar, bool useFrameSkeleton = true);
|
||||
|
|
|
@ -23,11 +23,6 @@
|
|||
|
||||
#include "AvatarData.h"
|
||||
|
||||
/// The names of the blendshapes expected by Faceshift, terminated with an empty string.
|
||||
extern const char* FACESHIFT_BLENDSHAPES[];
|
||||
/// The size of FACESHIFT_BLENDSHAPES
|
||||
extern const int NUM_FACESHIFT_BLENDSHAPES;
|
||||
|
||||
HeadData::HeadData(AvatarData* owningAvatar) :
|
||||
_baseYaw(0.0f),
|
||||
_basePitch(0.0f),
|
||||
|
|
|
@ -281,7 +281,7 @@ public:
|
|||
float getAngularDamping() const;
|
||||
void setAngularDamping(float value);
|
||||
|
||||
QString getName() const;
|
||||
virtual QString getName() const override;
|
||||
void setName(const QString& value);
|
||||
QString getDebugName();
|
||||
|
||||
|
|
|
@ -407,9 +407,11 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
|
|||
// return QUuid();
|
||||
// }
|
||||
|
||||
bool entityFound { false };
|
||||
_entityTree->withReadLock([&] {
|
||||
EntityItemPointer entity = _entityTree->findEntityByEntityItemID(entityID);
|
||||
if (entity) {
|
||||
entityFound = true;
|
||||
// make sure the properties has a type, so that the encode can know which properties to include
|
||||
properties.setType(entity->getType());
|
||||
bool hasTerseUpdateChanges = properties.hasTerseUpdateChanges();
|
||||
|
@ -464,6 +466,27 @@ QUuid EntityScriptingInterface::editEntity(QUuid id, const EntityItemProperties&
|
|||
});
|
||||
}
|
||||
});
|
||||
if (!entityFound) {
|
||||
// we've made an edit to an entity we don't know about, or to a non-entity. If it's a known non-entity,
|
||||
// print a warning and don't send an edit packet to the entity-server.
|
||||
QSharedPointer<SpatialParentFinder> parentFinder = DependencyManager::get<SpatialParentFinder>();
|
||||
if (parentFinder) {
|
||||
bool success;
|
||||
auto nestableWP = parentFinder->find(id, success, static_cast<SpatialParentTree*>(_entityTree.get()));
|
||||
if (success) {
|
||||
auto nestable = nestableWP.lock();
|
||||
if (nestable) {
|
||||
NestableType nestableType = nestable->getNestableType();
|
||||
if (nestableType == NestableType::Overlay || nestableType == NestableType::Avatar) {
|
||||
qCWarning(entities) << "attempted edit on non-entity: " << id << nestable->getName();
|
||||
return QUuid(); // null UUID to indicate failure
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// we queue edit packets even if we don't know about the entity. This is to allow AC agents
|
||||
// to edit entities they know only by ID.
|
||||
queueEntityMessage(PacketType::EntityEdit, entityID, properties);
|
||||
return id;
|
||||
}
|
||||
|
@ -1515,6 +1538,24 @@ bool EntityScriptingInterface::isChildOfParent(QUuid childID, QUuid parentID) {
|
|||
return isChild;
|
||||
}
|
||||
|
||||
QString EntityScriptingInterface::getNestableType(QUuid id) {
|
||||
QSharedPointer<SpatialParentFinder> parentFinder = DependencyManager::get<SpatialParentFinder>();
|
||||
if (!parentFinder) {
|
||||
return "unknown";
|
||||
}
|
||||
bool success;
|
||||
SpatiallyNestableWeakPointer objectWP = parentFinder->find(id, success);
|
||||
if (!success) {
|
||||
return "unknown";
|
||||
}
|
||||
SpatiallyNestablePointer object = objectWP.lock();
|
||||
if (!object) {
|
||||
return "unknown";
|
||||
}
|
||||
NestableType nestableType = object->getNestableType();
|
||||
return SpatiallyNestable::nestableTypeToString(nestableType);
|
||||
}
|
||||
|
||||
QVector<QUuid> EntityScriptingInterface::getChildrenIDsOfJoint(const QUuid& parentID, int jointIndex) {
|
||||
QVector<QUuid> result;
|
||||
if (!_entityTree) {
|
||||
|
|
|
@ -304,6 +304,8 @@ public slots:
|
|||
Q_INVOKABLE QVector<QUuid> getChildrenIDsOfJoint(const QUuid& parentID, int jointIndex);
|
||||
Q_INVOKABLE bool isChildOfParent(QUuid childID, QUuid parentID);
|
||||
|
||||
Q_INVOKABLE QString getNestableType(QUuid id);
|
||||
|
||||
Q_INVOKABLE QUuid getKeyboardFocusEntity() const;
|
||||
Q_INVOKABLE void setKeyboardFocusEntity(QUuid id);
|
||||
|
||||
|
|
|
@ -990,6 +990,17 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
|
|||
entityItemID, properties);
|
||||
endDecode = usecTimestampNow();
|
||||
|
||||
EntityItemPointer existingEntity;
|
||||
if (!isAdd) {
|
||||
// search for the entity by EntityItemID
|
||||
startLookup = usecTimestampNow();
|
||||
existingEntity = findEntityByEntityItemID(entityItemID);
|
||||
endLookup = usecTimestampNow();
|
||||
if (!existingEntity) {
|
||||
// this is not an add-entity operation, and we don't know about the identified entity.
|
||||
validEditPacket = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (validEditPacket && !_entityScriptSourceWhitelist.isEmpty() && !properties.getScript().isEmpty()) {
|
||||
bool passedWhiteList = false;
|
||||
|
@ -1036,12 +1047,6 @@ int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned c
|
|||
// If we got a valid edit packet, then it could be a new entity or it could be an update to
|
||||
// an existing entity... handle appropriately
|
||||
if (validEditPacket) {
|
||||
|
||||
// search for the entity by EntityItemID
|
||||
startLookup = usecTimestampNow();
|
||||
EntityItemPointer existingEntity = findEntityByEntityItemID(entityItemID);
|
||||
endLookup = usecTimestampNow();
|
||||
|
||||
startFilter = usecTimestampNow();
|
||||
bool wasChanged = false;
|
||||
// Having (un)lock rights bypasses the filter, unless it's a physics result.
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
#include <shared/NsightHelpers.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
#include <ResourceManager.h>
|
||||
|
||||
#include "FBXReader.h"
|
||||
#include "ModelFormatLogging.h"
|
||||
|
@ -165,6 +166,7 @@ bool OBJFace::add(const QByteArray& vertexIndex, const QByteArray& textureIndex,
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
QVector<OBJFace> OBJFace::triangulate() {
|
||||
QVector<OBJFace> newFaces;
|
||||
const int nVerticesInATriangle = 3;
|
||||
|
@ -183,6 +185,7 @@ QVector<OBJFace> OBJFace::triangulate() {
|
|||
}
|
||||
return newFaces;
|
||||
}
|
||||
|
||||
void OBJFace::addFrom(const OBJFace* face, int index) { // add using data from f at index i
|
||||
vertexIndices.append(face->vertexIndices[index]);
|
||||
if (face->textureUVIndices.count() > 0) { // Any at all. Runtime error if not consistent.
|
||||
|
@ -193,24 +196,13 @@ void OBJFace::addFrom(const OBJFace* face, int index) { // add using data from f
|
|||
}
|
||||
}
|
||||
|
||||
static bool replyOK(QNetworkReply* netReply, QUrl url) { // This will be reworked when we make things asynchronous
|
||||
return (netReply && netReply->isFinished() &&
|
||||
(url.toString().startsWith("file", Qt::CaseInsensitive) ? // file urls don't have http status codes
|
||||
netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString().isEmpty() :
|
||||
(netReply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() == 200)));
|
||||
}
|
||||
|
||||
bool OBJReader::isValidTexture(const QByteArray &filename) {
|
||||
if (_url.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
QUrl candidateUrl = _url.resolved(QUrl(filename));
|
||||
QNetworkReply *netReply = request(candidateUrl, true);
|
||||
bool isValid = replyOK(netReply, candidateUrl);
|
||||
if (netReply) {
|
||||
netReply->deleteLater();
|
||||
}
|
||||
return isValid;
|
||||
|
||||
return ResourceManager::resourceExists(candidateUrl);
|
||||
}
|
||||
|
||||
void OBJReader::parseMaterialLibrary(QIODevice* device) {
|
||||
|
@ -274,7 +266,28 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
|
|||
}
|
||||
}
|
||||
|
||||
QNetworkReply* OBJReader::request(QUrl& url, bool isTest) {
|
||||
std::tuple<bool, QByteArray> requestData(QUrl& url) {
|
||||
auto request = ResourceManager::createResourceRequest(nullptr, url);
|
||||
|
||||
if (!request) {
|
||||
return std::make_tuple(false, QByteArray());
|
||||
}
|
||||
|
||||
request->send();
|
||||
|
||||
QEventLoop loop;
|
||||
QObject::connect(request, &ResourceRequest::finished, &loop, &QEventLoop::quit);
|
||||
loop.exec();
|
||||
|
||||
if (request->getResult() == ResourceRequest::Success) {
|
||||
return std::make_tuple(true, request->getData());
|
||||
} else {
|
||||
return std::make_tuple(false, QByteArray());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
QNetworkReply* request(QUrl& url, bool isTest) {
|
||||
if (!qApp) {
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -293,10 +306,7 @@ QNetworkReply* OBJReader::request(QUrl& url, bool isTest) {
|
|||
QEventLoop loop; // Create an event loop that will quit when we get the finished signal
|
||||
QObject::connect(netReply, SIGNAL(finished()), &loop, SLOT(quit()));
|
||||
loop.exec(); // Nothing is going to happen on this whole run thread until we get this
|
||||
static const int WAIT_TIMEOUT_MS = 500;
|
||||
while (!aboutToQuit && qApp && !netReply->isReadable()) {
|
||||
netReply->waitForReadyRead(WAIT_TIMEOUT_MS); // so we might as well block this thread waiting for the response, rather than
|
||||
}
|
||||
|
||||
QObject::disconnect(connection);
|
||||
return netReply; // trying to sync later on.
|
||||
}
|
||||
|
@ -446,142 +456,142 @@ FBXGeometry* OBJReader::readOBJ(QByteArray& model, const QVariantHash& mapping,
|
|||
// add a new meshPart to the geometry's single mesh.
|
||||
while (parseOBJGroup(tokenizer, mapping, geometry, scaleGuess, combineParts)) {}
|
||||
|
||||
FBXMesh& mesh = geometry.meshes[0];
|
||||
mesh.meshIndex = 0;
|
||||
FBXMesh& mesh = geometry.meshes[0];
|
||||
mesh.meshIndex = 0;
|
||||
|
||||
geometry.joints.resize(1);
|
||||
geometry.joints[0].isFree = false;
|
||||
geometry.joints[0].parentIndex = -1;
|
||||
geometry.joints[0].distanceToParent = 0;
|
||||
geometry.joints[0].translation = glm::vec3(0, 0, 0);
|
||||
geometry.joints[0].rotationMin = glm::vec3(0, 0, 0);
|
||||
geometry.joints[0].rotationMax = glm::vec3(0, 0, 0);
|
||||
geometry.joints[0].name = "OBJ";
|
||||
geometry.joints[0].isSkeletonJoint = true;
|
||||
geometry.joints.resize(1);
|
||||
geometry.joints[0].isFree = false;
|
||||
geometry.joints[0].parentIndex = -1;
|
||||
geometry.joints[0].distanceToParent = 0;
|
||||
geometry.joints[0].translation = glm::vec3(0, 0, 0);
|
||||
geometry.joints[0].rotationMin = glm::vec3(0, 0, 0);
|
||||
geometry.joints[0].rotationMax = glm::vec3(0, 0, 0);
|
||||
geometry.joints[0].name = "OBJ";
|
||||
geometry.joints[0].isSkeletonJoint = true;
|
||||
|
||||
geometry.jointIndices["x"] = 1;
|
||||
geometry.jointIndices["x"] = 1;
|
||||
|
||||
FBXCluster cluster;
|
||||
cluster.jointIndex = 0;
|
||||
cluster.inverseBindMatrix = glm::mat4(1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 0, 0, 1);
|
||||
mesh.clusters.append(cluster);
|
||||
FBXCluster cluster;
|
||||
cluster.jointIndex = 0;
|
||||
cluster.inverseBindMatrix = glm::mat4(1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 0, 0, 1);
|
||||
mesh.clusters.append(cluster);
|
||||
|
||||
QMap<QString, int> materialMeshIdMap;
|
||||
QVector<FBXMeshPart> fbxMeshParts;
|
||||
for (int i = 0, meshPartCount = 0; i < mesh.parts.count(); i++, meshPartCount++) {
|
||||
FBXMeshPart& meshPart = mesh.parts[i];
|
||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||
QMap<QString, int> materialMeshIdMap;
|
||||
QVector<FBXMeshPart> fbxMeshParts;
|
||||
for (int i = 0, meshPartCount = 0; i < mesh.parts.count(); i++, meshPartCount++) {
|
||||
FBXMeshPart& meshPart = mesh.parts[i];
|
||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||
bool specifiesUV = false;
|
||||
foreach(OBJFace face, faceGroup) {
|
||||
// Go through all of the OBJ faces and determine the number of different materials necessary (each different material will be a unique mesh).
|
||||
// NOTE (trent/mittens 3/30/17): this seems hardcore wasteful and is slowed down a bit by iterating through the face group twice, but it's the best way I've thought of to hack multi-material support in an OBJ into this pipeline.
|
||||
if (!materialMeshIdMap.contains(face.materialName)) {
|
||||
// Create a new FBXMesh for this material mapping.
|
||||
materialMeshIdMap.insert(face.materialName, materialMeshIdMap.count());
|
||||
foreach(OBJFace face, faceGroup) {
|
||||
// Go through all of the OBJ faces and determine the number of different materials necessary (each different material will be a unique mesh).
|
||||
// NOTE (trent/mittens 3/30/17): this seems hardcore wasteful and is slowed down a bit by iterating through the face group twice, but it's the best way I've thought of to hack multi-material support in an OBJ into this pipeline.
|
||||
if (!materialMeshIdMap.contains(face.materialName)) {
|
||||
// Create a new FBXMesh for this material mapping.
|
||||
materialMeshIdMap.insert(face.materialName, materialMeshIdMap.count());
|
||||
|
||||
fbxMeshParts.append(FBXMeshPart());
|
||||
FBXMeshPart& meshPartNew = fbxMeshParts.last();
|
||||
meshPartNew.quadIndices = QVector<int>(meshPart.quadIndices); // Copy over quad indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
||||
meshPartNew.quadTrianglesIndices = QVector<int>(meshPart.quadTrianglesIndices); // Copy over quad triangulated indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
||||
meshPartNew.triangleIndices = QVector<int>(meshPart.triangleIndices); // Copy over triangle indices.
|
||||
fbxMeshParts.append(FBXMeshPart());
|
||||
FBXMeshPart& meshPartNew = fbxMeshParts.last();
|
||||
meshPartNew.quadIndices = QVector<int>(meshPart.quadIndices); // Copy over quad indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
||||
meshPartNew.quadTrianglesIndices = QVector<int>(meshPart.quadTrianglesIndices); // Copy over quad triangulated indices [NOTE (trent/mittens, 4/3/17): Likely unnecessary since they go unused anyway].
|
||||
meshPartNew.triangleIndices = QVector<int>(meshPart.triangleIndices); // Copy over triangle indices.
|
||||
|
||||
// Do some of the material logic (which previously lived below) now.
|
||||
// All the faces in the same group will have the same name and material.
|
||||
QString groupMaterialName = face.materialName;
|
||||
if (groupMaterialName.isEmpty() && specifiesUV) {
|
||||
// Do some of the material logic (which previously lived below) now.
|
||||
// All the faces in the same group will have the same name and material.
|
||||
QString groupMaterialName = face.materialName;
|
||||
if (groupMaterialName.isEmpty() && specifiesUV) {
|
||||
#ifdef WANT_DEBUG
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: " << url
|
||||
<< " needs a texture that isn't specified. Using default mechanism.";
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: " << url
|
||||
<< " needs a texture that isn't specified. Using default mechanism.";
|
||||
#endif
|
||||
groupMaterialName = SMART_DEFAULT_MATERIAL_NAME;
|
||||
}
|
||||
if (!groupMaterialName.isEmpty()) {
|
||||
OBJMaterial& material = materials[groupMaterialName];
|
||||
if (specifiesUV) {
|
||||
material.userSpecifiesUV = true; // Note might not be true in a later usage.
|
||||
}
|
||||
if (specifiesUV || (groupMaterialName.compare("none", Qt::CaseInsensitive) != 0)) {
|
||||
// Blender has a convention that a material named "None" isn't really used (or defined).
|
||||
material.used = true;
|
||||
needsMaterialLibrary = groupMaterialName != SMART_DEFAULT_MATERIAL_NAME;
|
||||
}
|
||||
materials[groupMaterialName] = material;
|
||||
meshPartNew.materialID = groupMaterialName;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// clean up old mesh parts.
|
||||
int unmodifiedMeshPartCount = mesh.parts.count();
|
||||
mesh.parts.clear();
|
||||
mesh.parts = QVector<FBXMeshPart>(fbxMeshParts);
|
||||
|
||||
for (int i = 0, meshPartCount = 0; i < unmodifiedMeshPartCount; i++, meshPartCount++) {
|
||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||
|
||||
// Now that each mesh has been created with its own unique material mappings, fill them with data (vertex data is duplicated, face data is not).
|
||||
foreach(OBJFace face, faceGroup) {
|
||||
FBXMeshPart& meshPart = mesh.parts[materialMeshIdMap[face.materialName]];
|
||||
|
||||
glm::vec3 v0 = checked_at(vertices, face.vertexIndices[0]);
|
||||
glm::vec3 v1 = checked_at(vertices, face.vertexIndices[1]);
|
||||
glm::vec3 v2 = checked_at(vertices, face.vertexIndices[2]);
|
||||
|
||||
// Scale the vertices if the OBJ file scale is specified as non-one.
|
||||
if (scaleGuess != 1.0f) {
|
||||
v0 *= scaleGuess;
|
||||
v1 *= scaleGuess;
|
||||
v2 *= scaleGuess;
|
||||
}
|
||||
|
||||
// Add the vertices.
|
||||
meshPart.triangleIndices.append(mesh.vertices.count()); // not face.vertexIndices into vertices
|
||||
mesh.vertices << v0;
|
||||
meshPart.triangleIndices.append(mesh.vertices.count());
|
||||
mesh.vertices << v1;
|
||||
meshPart.triangleIndices.append(mesh.vertices.count());
|
||||
mesh.vertices << v2;
|
||||
|
||||
glm::vec3 n0, n1, n2;
|
||||
if (face.normalIndices.count()) {
|
||||
n0 = checked_at(normals, face.normalIndices[0]);
|
||||
n1 = checked_at(normals, face.normalIndices[1]);
|
||||
n2 = checked_at(normals, face.normalIndices[2]);
|
||||
} else {
|
||||
// generate normals from triangle plane if not provided
|
||||
n0 = n1 = n2 = glm::cross(v1 - v0, v2 - v0);
|
||||
}
|
||||
|
||||
mesh.normals.append(n0);
|
||||
mesh.normals.append(n1);
|
||||
mesh.normals.append(n2);
|
||||
|
||||
if (face.textureUVIndices.count()) {
|
||||
mesh.texCoords <<
|
||||
checked_at(textureUVs, face.textureUVIndices[0]) <<
|
||||
checked_at(textureUVs, face.textureUVIndices[1]) <<
|
||||
checked_at(textureUVs, face.textureUVIndices[2]);
|
||||
} else {
|
||||
glm::vec2 corner(0.0f, 1.0f);
|
||||
mesh.texCoords << corner << corner << corner;
|
||||
}
|
||||
}
|
||||
groupMaterialName = SMART_DEFAULT_MATERIAL_NAME;
|
||||
}
|
||||
if (!groupMaterialName.isEmpty()) {
|
||||
OBJMaterial& material = materials[groupMaterialName];
|
||||
if (specifiesUV) {
|
||||
material.userSpecifiesUV = true; // Note might not be true in a later usage.
|
||||
}
|
||||
if (specifiesUV || (groupMaterialName.compare("none", Qt::CaseInsensitive) != 0)) {
|
||||
// Blender has a convention that a material named "None" isn't really used (or defined).
|
||||
material.used = true;
|
||||
needsMaterialLibrary = groupMaterialName != SMART_DEFAULT_MATERIAL_NAME;
|
||||
}
|
||||
materials[groupMaterialName] = material;
|
||||
meshPartNew.materialID = groupMaterialName;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mesh.meshExtents.reset();
|
||||
foreach(const glm::vec3& vertex, mesh.vertices) {
|
||||
mesh.meshExtents.addPoint(vertex);
|
||||
geometry.meshExtents.addPoint(vertex);
|
||||
}
|
||||
// clean up old mesh parts.
|
||||
int unmodifiedMeshPartCount = mesh.parts.count();
|
||||
mesh.parts.clear();
|
||||
mesh.parts = QVector<FBXMeshPart>(fbxMeshParts);
|
||||
|
||||
// Build the single mesh.
|
||||
FBXReader::buildModelMesh(mesh, url.toString());
|
||||
for (int i = 0, meshPartCount = 0; i < unmodifiedMeshPartCount; i++, meshPartCount++) {
|
||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||
|
||||
// fbxDebugDump(geometry);
|
||||
// Now that each mesh has been created with its own unique material mappings, fill them with data (vertex data is duplicated, face data is not).
|
||||
foreach(OBJFace face, faceGroup) {
|
||||
FBXMeshPart& meshPart = mesh.parts[materialMeshIdMap[face.materialName]];
|
||||
|
||||
glm::vec3 v0 = checked_at(vertices, face.vertexIndices[0]);
|
||||
glm::vec3 v1 = checked_at(vertices, face.vertexIndices[1]);
|
||||
glm::vec3 v2 = checked_at(vertices, face.vertexIndices[2]);
|
||||
|
||||
// Scale the vertices if the OBJ file scale is specified as non-one.
|
||||
if (scaleGuess != 1.0f) {
|
||||
v0 *= scaleGuess;
|
||||
v1 *= scaleGuess;
|
||||
v2 *= scaleGuess;
|
||||
}
|
||||
|
||||
// Add the vertices.
|
||||
meshPart.triangleIndices.append(mesh.vertices.count()); // not face.vertexIndices into vertices
|
||||
mesh.vertices << v0;
|
||||
meshPart.triangleIndices.append(mesh.vertices.count());
|
||||
mesh.vertices << v1;
|
||||
meshPart.triangleIndices.append(mesh.vertices.count());
|
||||
mesh.vertices << v2;
|
||||
|
||||
glm::vec3 n0, n1, n2;
|
||||
if (face.normalIndices.count()) {
|
||||
n0 = checked_at(normals, face.normalIndices[0]);
|
||||
n1 = checked_at(normals, face.normalIndices[1]);
|
||||
n2 = checked_at(normals, face.normalIndices[2]);
|
||||
} else {
|
||||
// generate normals from triangle plane if not provided
|
||||
n0 = n1 = n2 = glm::cross(v1 - v0, v2 - v0);
|
||||
}
|
||||
|
||||
mesh.normals.append(n0);
|
||||
mesh.normals.append(n1);
|
||||
mesh.normals.append(n2);
|
||||
|
||||
if (face.textureUVIndices.count()) {
|
||||
mesh.texCoords <<
|
||||
checked_at(textureUVs, face.textureUVIndices[0]) <<
|
||||
checked_at(textureUVs, face.textureUVIndices[1]) <<
|
||||
checked_at(textureUVs, face.textureUVIndices[2]);
|
||||
} else {
|
||||
glm::vec2 corner(0.0f, 1.0f);
|
||||
mesh.texCoords << corner << corner << corner;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mesh.meshExtents.reset();
|
||||
foreach(const glm::vec3& vertex, mesh.vertices) {
|
||||
mesh.meshExtents.addPoint(vertex);
|
||||
geometry.meshExtents.addPoint(vertex);
|
||||
}
|
||||
|
||||
// Build the single mesh.
|
||||
FBXReader::buildModelMesh(mesh, url.toString());
|
||||
|
||||
// fbxDebugDump(geometry);
|
||||
} catch(const std::exception& e) {
|
||||
qCDebug(modelformat) << "OBJ reader fail: " << e.what();
|
||||
}
|
||||
|
@ -624,15 +634,15 @@ FBXGeometry* OBJReader::readOBJ(QByteArray& model, const QVariantHash& mapping,
|
|||
// Throw away any path part of libraryName, and merge against original url.
|
||||
QUrl libraryUrl = _url.resolved(QUrl(libraryName).fileName());
|
||||
qCDebug(modelformat) << "OBJ Reader material library" << libraryName << "used in" << _url;
|
||||
QNetworkReply* netReply = request(libraryUrl, false);
|
||||
if (replyOK(netReply, libraryUrl)) {
|
||||
parseMaterialLibrary(netReply);
|
||||
bool success;
|
||||
QByteArray data;
|
||||
std::tie<bool, QByteArray>(success, data) = requestData(libraryUrl);
|
||||
if (success) {
|
||||
QBuffer buffer { &data };
|
||||
buffer.open(QIODevice::ReadOnly);
|
||||
parseMaterialLibrary(&buffer);
|
||||
} else {
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING:" << libraryName << "did not answer. Got"
|
||||
<< (!netReply ? "aborted" : netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString());
|
||||
}
|
||||
if (netReply) {
|
||||
netReply->deleteLater();
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING:" << libraryName << "did not answer";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -655,9 +665,9 @@ FBXGeometry* OBJReader::readOBJ(QByteArray& model, const QVariantHash& mapping,
|
|||
if (!objMaterial.diffuseTextureFilename.isEmpty()) {
|
||||
fbxMaterial.albedoTexture.filename = objMaterial.diffuseTextureFilename;
|
||||
}
|
||||
if (!objMaterial.specularTextureFilename.isEmpty()) {
|
||||
fbxMaterial.specularTexture.filename = objMaterial.specularTextureFilename;
|
||||
}
|
||||
if (!objMaterial.specularTextureFilename.isEmpty()) {
|
||||
fbxMaterial.specularTexture.filename = objMaterial.specularTextureFilename;
|
||||
}
|
||||
|
||||
modelMaterial->setEmissive(fbxMaterial.emissiveColor);
|
||||
modelMaterial->setAlbedo(fbxMaterial.diffuseColor);
|
||||
|
|
|
@ -72,7 +72,6 @@ public:
|
|||
QString currentMaterialName;
|
||||
QHash<QString, OBJMaterial> materials;
|
||||
|
||||
QNetworkReply* request(QUrl& url, bool isTest);
|
||||
FBXGeometry* readOBJ(QByteArray& model, const QVariantHash& mapping, bool combineParts, const QUrl& url = QUrl());
|
||||
|
||||
private:
|
||||
|
|
|
@ -149,6 +149,10 @@ void GLBackend::resetUniformStage() {
|
|||
|
||||
void GLBackend::do_setUniformBuffer(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 3]._uint;
|
||||
if (slot >(GLuint)MAX_NUM_UNIFORM_BUFFERS) {
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setUniformBuffer: Trying to set a uniform Buffer at slot #" << slot << " which doesn't exist. MaxNumUniformBuffers = " << getMaxNumUniformBuffers();
|
||||
return;
|
||||
}
|
||||
BufferPointer uniformBuffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
|
||||
GLintptr rangeStart = batch._params[paramOffset + 1]._uint;
|
||||
GLsizeiptr rangeSize = batch._params[paramOffset + 0]._uint;
|
||||
|
@ -203,7 +207,7 @@ void GLBackend::resetResourceStage() {
|
|||
void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 1]._uint;
|
||||
if (slot >= (GLuint)MAX_NUM_RESOURCE_BUFFERS) {
|
||||
// "GLBackend::do_setResourceBuffer: Trying to set a resource Buffer at slot #" + slot + " which doesn't exist. MaxNumResourceBuffers = " + getMaxNumResourceBuffers());
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setResourceBuffer: Trying to set a resource Buffer at slot #" << slot << " which doesn't exist. MaxNumResourceBuffers = " << getMaxNumResourceBuffers();
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -233,7 +237,7 @@ void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
|
|||
void GLBackend::do_setResourceTexture(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 1]._uint;
|
||||
if (slot >= (GLuint) MAX_NUM_RESOURCE_TEXTURES) {
|
||||
// "GLBackend::do_setResourceTexture: Trying to set a resource Texture at slot #" + slot + " which doesn't exist. MaxNumResourceTextures = " + getMaxNumResourceTextures());
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setResourceTexture: Trying to set a resource Texture at slot #" << slot << " which doesn't exist. MaxNumResourceTextures = " << getMaxNumResourceTextures();
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -140,6 +140,7 @@ GLenum GLTexelFormat::evalGLTexelFormatInternal(const gpu::Element& dstFormat) {
|
|||
switch (dstFormat.getSemantic()) {
|
||||
case gpu::RGB:
|
||||
case gpu::RGBA:
|
||||
case gpu::XY:
|
||||
result = GL_RG8;
|
||||
break;
|
||||
default:
|
||||
|
@ -289,6 +290,7 @@ GLTexelFormat GLTexelFormat::evalGLTexelFormat(const Element& dstFormat, const E
|
|||
switch (dstFormat.getSemantic()) {
|
||||
case gpu::RGB:
|
||||
case gpu::RGBA:
|
||||
case gpu::XY:
|
||||
texel.internalFormat = GL_RG8;
|
||||
break;
|
||||
default:
|
||||
|
@ -516,6 +518,7 @@ GLTexelFormat GLTexelFormat::evalGLTexelFormat(const Element& dstFormat, const E
|
|||
switch (dstFormat.getSemantic()) {
|
||||
case gpu::RGB:
|
||||
case gpu::RGBA:
|
||||
case gpu::XY:
|
||||
texel.internalFormat = GL_RG8;
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -525,11 +525,14 @@ void GL41VariableAllocationTexture::populateTransferQueue() {
|
|||
|
||||
// break down the transfers into chunks so that no single transfer is
|
||||
// consuming more than X bandwidth
|
||||
// For compressed format, regions must be a multiple of the 4x4 tiles, so enforce 4 lines as the minimum block
|
||||
auto mipSize = _gpuObject.getStoredMipFaceSize(sourceMip, face);
|
||||
const auto lines = mipDimensions.y;
|
||||
auto bytesPerLine = mipSize / lines;
|
||||
const uint32_t BLOCK_NUM_LINES { 4 };
|
||||
const auto numBlocks = (lines + (BLOCK_NUM_LINES - 1)) / BLOCK_NUM_LINES;
|
||||
auto bytesPerBlock = mipSize / numBlocks;
|
||||
Q_ASSERT(0 == (mipSize % lines));
|
||||
uint32_t linesPerTransfer = (uint32_t)(MAX_TRANSFER_SIZE / bytesPerLine);
|
||||
uint32_t linesPerTransfer = BLOCK_NUM_LINES * (uint32_t)(MAX_TRANSFER_SIZE / bytesPerBlock);
|
||||
uint32_t lineOffset = 0;
|
||||
while (lineOffset < lines) {
|
||||
uint32_t linesToCopy = std::min<uint32_t>(lines - lineOffset, linesPerTransfer);
|
||||
|
|
|
@ -196,11 +196,14 @@ void GL45ResourceTexture::populateTransferQueue() {
|
|||
|
||||
// break down the transfers into chunks so that no single transfer is
|
||||
// consuming more than X bandwidth
|
||||
// For compressed format, regions must be a multiple of the 4x4 tiles, so enforce 4 lines as the minimum block
|
||||
auto mipSize = _gpuObject.getStoredMipFaceSize(sourceMip, face);
|
||||
const auto lines = mipDimensions.y;
|
||||
auto bytesPerLine = mipSize / lines;
|
||||
const uint32_t BLOCK_NUM_LINES { 4 };
|
||||
const auto numBlocks = (lines + (BLOCK_NUM_LINES - 1)) / BLOCK_NUM_LINES;
|
||||
auto bytesPerBlock = mipSize / numBlocks;
|
||||
Q_ASSERT(0 == (mipSize % lines));
|
||||
uint32_t linesPerTransfer = (uint32_t)(MAX_TRANSFER_SIZE / bytesPerLine);
|
||||
uint32_t linesPerTransfer = BLOCK_NUM_LINES * (uint32_t)(MAX_TRANSFER_SIZE / bytesPerBlock);
|
||||
uint32_t lineOffset = 0;
|
||||
while (lineOffset < lines) {
|
||||
uint32_t linesToCopy = std::min<uint32_t>(lines - lineOffset, linesPerTransfer);
|
||||
|
|
|
@ -25,6 +25,8 @@ const Element Element::COLOR_COMPRESSED_SRGBA_MASK{ VEC4, NUINT8, COMPRESSED_BC1
|
|||
const Element Element::COLOR_COMPRESSED_SRGBA{ VEC4, NUINT8, COMPRESSED_BC3_SRGBA };
|
||||
const Element Element::COLOR_COMPRESSED_XY{ VEC4, NUINT8, COMPRESSED_BC5_XY };
|
||||
|
||||
const Element Element::VEC2NU8_XY{ VEC2, NUINT8, XY };
|
||||
|
||||
const Element Element::COLOR_R11G11B10{ SCALAR, FLOAT, R11G11B10 };
|
||||
const Element Element::VEC4F_COLOR_RGBA{ VEC4, FLOAT, RGBA };
|
||||
const Element Element::VEC2F_UV{ VEC2, FLOAT, UV };
|
||||
|
|
|
@ -234,6 +234,7 @@ public:
|
|||
static const Element COLOR_COMPRESSED_SRGBA_MASK;
|
||||
static const Element COLOR_COMPRESSED_SRGBA;
|
||||
static const Element COLOR_COMPRESSED_XY;
|
||||
static const Element VEC2NU8_XY;
|
||||
static const Element VEC4F_COLOR_RGBA;
|
||||
static const Element VEC2F_UV;
|
||||
static const Element VEC2F_XY;
|
||||
|
|
|
@ -99,6 +99,61 @@ struct GPUKTXPayload {
|
|||
};
|
||||
const std::string GPUKTXPayload::KEY { "hifi.gpu" };
|
||||
|
||||
|
||||
struct IrradianceKTXPayload {
|
||||
using Version = uint8;
|
||||
|
||||
static const std::string KEY;
|
||||
static const Version CURRENT_VERSION{ 0 };
|
||||
static const size_t PADDING{ 3 };
|
||||
static const size_t SIZE{ sizeof(Version) + sizeof(SphericalHarmonics) + PADDING };
|
||||
static_assert(IrradianceKTXPayload::SIZE == 148, "Packing size may differ between platforms");
|
||||
static_assert(IrradianceKTXPayload::SIZE % 4 == 0, "IrradianceKTXPayload is not 4 bytes aligned");
|
||||
|
||||
SphericalHarmonics _irradianceSH;
|
||||
|
||||
Byte* serialize(Byte* data) const {
|
||||
*(Version*)data = CURRENT_VERSION;
|
||||
data += sizeof(Version);
|
||||
|
||||
memcpy(data, &_irradianceSH, sizeof(SphericalHarmonics));
|
||||
data += sizeof(SphericalHarmonics);
|
||||
|
||||
return data + PADDING;
|
||||
}
|
||||
|
||||
bool unserialize(const Byte* data, size_t size) {
|
||||
if (size != SIZE) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Version version = *(const Version*)data;
|
||||
if (version != CURRENT_VERSION) {
|
||||
return false;
|
||||
}
|
||||
data += sizeof(Version);
|
||||
|
||||
memcpy(&_irradianceSH, data, sizeof(SphericalHarmonics));
|
||||
data += sizeof(SphericalHarmonics);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool isIrradianceKTX(const ktx::KeyValue& val) {
|
||||
return (val._key.compare(KEY) == 0);
|
||||
}
|
||||
|
||||
static bool findInKeyValues(const ktx::KeyValues& keyValues, IrradianceKTXPayload& payload) {
|
||||
auto found = std::find_if(keyValues.begin(), keyValues.end(), isIrradianceKTX);
|
||||
if (found != keyValues.end()) {
|
||||
auto value = found->_value;
|
||||
return payload.unserialize(value.data(), value.size());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
const std::string IrradianceKTXPayload::KEY{ "hifi.irradianceSH" };
|
||||
|
||||
KtxStorage::KtxStorage(const std::string& filename) : _filename(filename) {
|
||||
{
|
||||
// We are doing a lot of work here just to get descriptor data
|
||||
|
@ -304,16 +359,27 @@ ktx::KTXUniquePointer Texture::serialize(const Texture& texture) {
|
|||
}
|
||||
}
|
||||
|
||||
GPUKTXPayload keyval;
|
||||
keyval._samplerDesc = texture.getSampler().getDesc();
|
||||
keyval._usage = texture.getUsage();
|
||||
keyval._usageType = texture.getUsageType();
|
||||
GPUKTXPayload gpuKeyval;
|
||||
gpuKeyval._samplerDesc = texture.getSampler().getDesc();
|
||||
gpuKeyval._usage = texture.getUsage();
|
||||
gpuKeyval._usageType = texture.getUsageType();
|
||||
|
||||
Byte keyvalPayload[GPUKTXPayload::SIZE];
|
||||
keyval.serialize(keyvalPayload);
|
||||
gpuKeyval.serialize(keyvalPayload);
|
||||
|
||||
ktx::KeyValues keyValues;
|
||||
keyValues.emplace_back(GPUKTXPayload::KEY, (uint32)GPUKTXPayload::SIZE, (ktx::Byte*) &keyvalPayload);
|
||||
|
||||
if (texture.getIrradiance()) {
|
||||
IrradianceKTXPayload irradianceKeyval;
|
||||
irradianceKeyval._irradianceSH = *texture.getIrradiance();
|
||||
|
||||
Byte irradianceKeyvalPayload[IrradianceKTXPayload::SIZE];
|
||||
irradianceKeyval.serialize(irradianceKeyvalPayload);
|
||||
|
||||
keyValues.emplace_back(IrradianceKTXPayload::KEY, (uint32)IrradianceKTXPayload::SIZE, (ktx::Byte*) &irradianceKeyvalPayload);
|
||||
}
|
||||
|
||||
auto hash = texture.sourceHash();
|
||||
if (!hash.empty()) {
|
||||
// the sourceHash is an std::string in hex
|
||||
|
@ -409,6 +475,12 @@ TexturePointer Texture::unserialize(const std::string& ktxfile, const ktx::KTXDe
|
|||
// Assing the mips availables
|
||||
texture->setStoredMipFormat(mipFormat);
|
||||
texture->setKtxBacking(ktxfile);
|
||||
|
||||
IrradianceKTXPayload irradianceKtxKeyValue;
|
||||
if (IrradianceKTXPayload::findInKeyValues(descriptor.keyValues, irradianceKtxKeyValue)) {
|
||||
texture->overrideIrradiance(std::make_shared<SphericalHarmonics>(irradianceKtxKeyValue._irradianceSH));
|
||||
}
|
||||
|
||||
return texture;
|
||||
}
|
||||
|
||||
|
@ -423,6 +495,8 @@ bool Texture::evalKTXFormat(const Element& mipFormat, const Element& texelFormat
|
|||
header.setUncompressed(ktx::GLType::UNSIGNED_BYTE, 1, ktx::GLFormat::RGBA, ktx::GLInternalFormat_Uncompressed::SRGB8_ALPHA8, ktx::GLBaseInternalFormat::RGBA);
|
||||
} else if (texelFormat == Format::COLOR_R_8 && mipFormat == Format::COLOR_R_8) {
|
||||
header.setUncompressed(ktx::GLType::UNSIGNED_BYTE, 1, ktx::GLFormat::RED, ktx::GLInternalFormat_Uncompressed::R8, ktx::GLBaseInternalFormat::RED);
|
||||
} else if (texelFormat == Format::VEC2NU8_XY && mipFormat == Format::VEC2NU8_XY) {
|
||||
header.setUncompressed(ktx::GLType::UNSIGNED_BYTE, 1, ktx::GLFormat::RG, ktx::GLInternalFormat_Uncompressed::RG8, ktx::GLBaseInternalFormat::RG);
|
||||
} else if (texelFormat == Format::COLOR_COMPRESSED_SRGB && mipFormat == Format::COLOR_COMPRESSED_SRGB) {
|
||||
header.setCompressed(ktx::GLInternalFormat_Compressed::COMPRESSED_SRGB_S3TC_DXT1_EXT, ktx::GLBaseInternalFormat::RGB);
|
||||
} else if (texelFormat == Format::COLOR_COMPRESSED_SRGBA_MASK && mipFormat == Format::COLOR_COMPRESSED_SRGBA_MASK) {
|
||||
|
|
|
@ -22,16 +22,19 @@
|
|||
#include <Profile.h>
|
||||
#include <StatTracker.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <SettingHandle.h>
|
||||
|
||||
#include "ImageLogging.h"
|
||||
|
||||
using namespace gpu;
|
||||
|
||||
#define CPU_MIPMAPS 1
|
||||
#define COMPRESS_COLOR_TEXTURES 0
|
||||
#define COMPRESS_NORMALMAP_TEXTURES 0 // Disable Normalmap compression for now
|
||||
#define COMPRESS_GRAYSCALE_TEXTURES 0
|
||||
#define COMPRESS_CUBEMAP_TEXTURES 0 // Disable Cubemap compression for now
|
||||
|
||||
static std::mutex settingsMutex;
|
||||
static Setting::Handle<bool> compressColorTextures("hifi.graphics.compressColorTextures", false);
|
||||
static Setting::Handle<bool> compressNormalTextures("hifi.graphics.compressNormalTextures", false);
|
||||
static Setting::Handle<bool> compressGrayscaleTextures("hifi.graphics.compressGrayscaleTextures", false);
|
||||
static Setting::Handle<bool> compressCubeTextures("hifi.graphics.compressCubeTextures", false);
|
||||
|
||||
static const glm::uvec2 SPARSE_PAGE_SIZE(128);
|
||||
static const glm::uvec2 MAX_TEXTURE_SIZE(4096);
|
||||
|
@ -144,6 +147,64 @@ gpu::TexturePointer TextureUsage::createCubeTextureFromImageWithoutIrradiance(co
|
|||
return processCubeTextureColorFromImage(srcImage, srcImageName, false);
|
||||
}
|
||||
|
||||
|
||||
bool isColorTexturesCompressionEnabled() {
|
||||
#if CPU_MIPMAPS
|
||||
std::lock_guard<std::mutex> guard(settingsMutex);
|
||||
return compressColorTextures.get();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
bool isNormalTexturesCompressionEnabled() {
|
||||
#if CPU_MIPMAPS
|
||||
std::lock_guard<std::mutex> guard(settingsMutex);
|
||||
return compressNormalTextures.get();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
bool isGrayscaleTexturesCompressionEnabled() {
|
||||
#if CPU_MIPMAPS
|
||||
std::lock_guard<std::mutex> guard(settingsMutex);
|
||||
return compressGrayscaleTextures.get();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
bool isCubeTexturesCompressionEnabled() {
|
||||
#if CPU_MIPMAPS
|
||||
std::lock_guard<std::mutex> guard(settingsMutex);
|
||||
return compressCubeTextures.get();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void setColorTexturesCompressionEnabled(bool enabled) {
|
||||
std::lock_guard<std::mutex> guard(settingsMutex);
|
||||
compressColorTextures.set(enabled);
|
||||
}
|
||||
|
||||
void setNormalTexturesCompressionEnabled(bool enabled) {
|
||||
std::lock_guard<std::mutex> guard(settingsMutex);
|
||||
compressNormalTextures.set(enabled);
|
||||
}
|
||||
|
||||
void setGrayscaleTexturesCompressionEnabled(bool enabled) {
|
||||
std::lock_guard<std::mutex> guard(settingsMutex);
|
||||
compressGrayscaleTextures.set(enabled);
|
||||
}
|
||||
|
||||
void setCubeTexturesCompressionEnabled(bool enabled) {
|
||||
std::lock_guard<std::mutex> guard(settingsMutex);
|
||||
compressCubeTextures.set(enabled);
|
||||
}
|
||||
|
||||
|
||||
gpu::TexturePointer processImage(const QByteArray& content, const std::string& filename, int maxNumPixels, TextureUsage::Type textureType) {
|
||||
// Help the QImage loader by extracting the image file format from the url filename ext.
|
||||
// Some tga are not created properly without it.
|
||||
|
@ -290,6 +351,19 @@ void generateMips(gpu::Texture* texture, QImage& image, int face = -1) {
|
|||
float inputGamma = 2.2f;
|
||||
float outputGamma = 2.2f;
|
||||
|
||||
nvtt::InputOptions inputOptions;
|
||||
inputOptions.setTextureLayout(textureType, width, height);
|
||||
inputOptions.setMipmapData(data, width, height);
|
||||
|
||||
inputOptions.setFormat(inputFormat);
|
||||
inputOptions.setGamma(inputGamma, outputGamma);
|
||||
inputOptions.setAlphaMode(alphaMode);
|
||||
inputOptions.setWrapMode(wrapMode);
|
||||
inputOptions.setRoundMode(roundMode);
|
||||
|
||||
inputOptions.setMipmapGeneration(true);
|
||||
inputOptions.setMipmapFilter(nvtt::MipmapFilter_Box);
|
||||
|
||||
nvtt::CompressionOptions compressionOptions;
|
||||
compressionOptions.setQuality(nvtt::Quality_Production);
|
||||
|
||||
|
@ -346,26 +420,17 @@ void generateMips(gpu::Texture* texture, QImage& image, int face = -1) {
|
|||
compressionOptions.setFormat(nvtt::Format_RGB);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPixelFormat(8, 0, 0, 0);
|
||||
} else if (mipFormat == gpu::Element::VEC2NU8_XY) {
|
||||
inputOptions.setNormalMap(true);
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPixelFormat(8, 8, 0, 0);
|
||||
} else {
|
||||
qCWarning(imagelogging) << "Unknown mip format";
|
||||
Q_UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
nvtt::InputOptions inputOptions;
|
||||
inputOptions.setTextureLayout(textureType, width, height);
|
||||
inputOptions.setMipmapData(data, width, height);
|
||||
|
||||
inputOptions.setFormat(inputFormat);
|
||||
inputOptions.setGamma(inputGamma, outputGamma);
|
||||
inputOptions.setAlphaMode(alphaMode);
|
||||
inputOptions.setWrapMode(wrapMode);
|
||||
inputOptions.setRoundMode(roundMode);
|
||||
|
||||
inputOptions.setMipmapGeneration(true);
|
||||
inputOptions.setMipmapFilter(nvtt::MipmapFilter_Box);
|
||||
|
||||
nvtt::OutputOptions outputOptions;
|
||||
outputOptions.setOutputHeader(false);
|
||||
MyOutputHandler outputHandler(texture, face);
|
||||
|
@ -424,18 +489,19 @@ gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(const QImage& s
|
|||
gpu::TexturePointer theTexture = nullptr;
|
||||
|
||||
if ((image.width() > 0) && (image.height() > 0)) {
|
||||
#if CPU_MIPMAPS && COMPRESS_COLOR_TEXTURES
|
||||
gpu::Element formatMip;
|
||||
gpu::Element formatGPU;
|
||||
if (validAlpha) {
|
||||
formatGPU = alphaAsMask ? gpu::Element::COLOR_COMPRESSED_SRGBA_MASK : gpu::Element::COLOR_COMPRESSED_SRGBA;
|
||||
if (isColorTexturesCompressionEnabled()) {
|
||||
if (validAlpha) {
|
||||
formatGPU = alphaAsMask ? gpu::Element::COLOR_COMPRESSED_SRGBA_MASK : gpu::Element::COLOR_COMPRESSED_SRGBA;
|
||||
} else {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_SRGB;
|
||||
}
|
||||
formatMip = formatGPU;
|
||||
} else {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_SRGB;
|
||||
formatMip = gpu::Element::COLOR_SBGRA_32;
|
||||
formatGPU = gpu::Element::COLOR_SRGBA_32;
|
||||
}
|
||||
gpu::Element formatMip = formatGPU;
|
||||
#else
|
||||
gpu::Element formatMip = gpu::Element::COLOR_SBGRA_32;
|
||||
gpu::Element formatGPU = gpu::Element::COLOR_SRGBA_32;
|
||||
#endif
|
||||
|
||||
if (isStrict) {
|
||||
theTexture = gpu::Texture::createStrict(formatGPU, image.width(), image.height(), gpu::Texture::MAX_NUM_MIPS, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR));
|
||||
|
@ -543,14 +609,12 @@ gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(const QImag
|
|||
|
||||
gpu::TexturePointer theTexture = nullptr;
|
||||
if ((image.width() > 0) && (image.height() > 0)) {
|
||||
|
||||
#if CPU_MIPMAPS && COMPRESS_NORMALMAP_TEXTURES
|
||||
gpu::Element formatMip = gpu::Element::COLOR_COMPRESSED_XY;
|
||||
gpu::Element formatGPU = gpu::Element::COLOR_COMPRESSED_XY;
|
||||
#else
|
||||
gpu::Element formatMip = gpu::Element::COLOR_RGBA_32;
|
||||
gpu::Element formatGPU = gpu::Element::COLOR_RGBA_32;
|
||||
#endif
|
||||
gpu::Element formatMip = gpu::Element::VEC2NU8_XY;
|
||||
gpu::Element formatGPU = gpu::Element::VEC2NU8_XY;
|
||||
if (isNormalTexturesCompressionEnabled()) {
|
||||
formatMip = gpu::Element::COLOR_COMPRESSED_XY;
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_XY;
|
||||
}
|
||||
|
||||
theTexture = gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Texture::MAX_NUM_MIPS, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR));
|
||||
theTexture->setSource(srcImageName);
|
||||
|
@ -576,14 +640,15 @@ gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(const QImag
|
|||
|
||||
gpu::TexturePointer theTexture = nullptr;
|
||||
if ((image.width() > 0) && (image.height() > 0)) {
|
||||
|
||||
#if CPU_MIPMAPS && COMPRESS_GRAYSCALE_TEXTURES
|
||||
gpu::Element formatMip = gpu::Element::COLOR_COMPRESSED_RED;
|
||||
gpu::Element formatGPU = gpu::Element::COLOR_COMPRESSED_RED;
|
||||
#else
|
||||
gpu::Element formatMip = gpu::Element::COLOR_R_8;
|
||||
gpu::Element formatGPU = gpu::Element::COLOR_R_8;
|
||||
#endif
|
||||
gpu::Element formatMip;
|
||||
gpu::Element formatGPU;
|
||||
if (isGrayscaleTexturesCompressionEnabled()) {
|
||||
formatMip = gpu::Element::COLOR_COMPRESSED_RED;
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_RED;
|
||||
} else {
|
||||
formatMip = gpu::Element::COLOR_R_8;
|
||||
formatGPU = gpu::Element::COLOR_R_8;
|
||||
}
|
||||
|
||||
theTexture = gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Texture::MAX_NUM_MIPS, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR));
|
||||
theTexture->setSource(srcImageName);
|
||||
|
@ -860,13 +925,15 @@ gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(const QImage&
|
|||
image = image.convertToFormat(QImage::Format_ARGB32);
|
||||
}
|
||||
|
||||
#if CPU_MIPMAPS && COMPRESS_CUBEMAP_TEXTURES
|
||||
gpu::Element formatMip = gpu::Element::COLOR_COMPRESSED_SRGBA;
|
||||
gpu::Element formatGPU = gpu::Element::COLOR_COMPRESSED_SRGBA;
|
||||
#else
|
||||
gpu::Element formatMip = gpu::Element::COLOR_SRGBA_32;
|
||||
gpu::Element formatGPU = gpu::Element::COLOR_SRGBA_32;
|
||||
#endif
|
||||
gpu::Element formatMip;
|
||||
gpu::Element formatGPU;
|
||||
if (isCubeTexturesCompressionEnabled()) {
|
||||
formatMip = gpu::Element::COLOR_COMPRESSED_SRGBA;
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_SRGBA;
|
||||
} else {
|
||||
formatMip = gpu::Element::COLOR_SRGBA_32;
|
||||
formatGPU = gpu::Element::COLOR_SRGBA_32;
|
||||
}
|
||||
|
||||
// Find the layout of the cubemap in the 2D image
|
||||
// Use the original image size since processSourceImage may have altered the size / aspect ratio
|
||||
|
|
|
@ -63,6 +63,16 @@ gpu::TexturePointer processCubeTextureColorFromImage(const QImage& srcImage, con
|
|||
|
||||
} // namespace TextureUsage
|
||||
|
||||
bool isColorTexturesCompressionEnabled();
|
||||
bool isNormalTexturesCompressionEnabled();
|
||||
bool isGrayscaleTexturesCompressionEnabled();
|
||||
bool isCubeTexturesCompressionEnabled();
|
||||
|
||||
void setColorTexturesCompressionEnabled(bool enabled);
|
||||
void setNormalTexturesCompressionEnabled(bool enabled);
|
||||
void setGrayscaleTexturesCompressionEnabled(bool enabled);
|
||||
void setCubeTexturesCompressionEnabled(bool enabled);
|
||||
|
||||
gpu::TexturePointer processImage(const QByteArray& content, const std::string& url, int maxNumPixels, TextureUsage::Type textureType);
|
||||
|
||||
} // namespace image
|
||||
|
|
|
@ -174,7 +174,7 @@ namespace ktx {
|
|||
}
|
||||
|
||||
std::unique_ptr<KTX> KTX::create(const StoragePointer& src) {
|
||||
if (!src) {
|
||||
if (!src || !(*src)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
|
|
@ -421,7 +421,7 @@ void NetworkTexture::startRequestForNextMipLevel() {
|
|||
|
||||
_ktxResourceState = PENDING_MIP_REQUEST;
|
||||
|
||||
init();
|
||||
init(false);
|
||||
float priority = -(float)_originalKtxDescriptor->header.numberOfMipmapLevels + (float)_lowestKnownPopulatedMip;
|
||||
setLoadPriority(this, priority);
|
||||
_url.setFragment(QString::number(_lowestKnownPopulatedMip - 1));
|
||||
|
@ -472,6 +472,10 @@ void NetworkTexture::startMipRangeRequest(uint16_t low, uint16_t high) {
|
|||
void NetworkTexture::ktxHeaderRequestFinished() {
|
||||
Q_ASSERT(_ktxResourceState == LOADING_INITIAL_DATA);
|
||||
|
||||
if (!_ktxHeaderRequest) {
|
||||
return;
|
||||
}
|
||||
|
||||
_ktxHeaderRequestFinished = true;
|
||||
maybeHandleFinishedInitialLoad();
|
||||
}
|
||||
|
@ -479,6 +483,10 @@ void NetworkTexture::ktxHeaderRequestFinished() {
|
|||
void NetworkTexture::ktxMipRequestFinished() {
|
||||
Q_ASSERT(_ktxResourceState == LOADING_INITIAL_DATA || _ktxResourceState == REQUESTING_MIP);
|
||||
|
||||
if (!_ktxMipRequest) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (_ktxResourceState == LOADING_INITIAL_DATA) {
|
||||
_ktxHighMipRequestFinished = true;
|
||||
maybeHandleFinishedInitialLoad();
|
||||
|
@ -682,6 +690,27 @@ void NetworkTexture::loadContent(const QByteArray& content) {
|
|||
QThreadPool::globalInstance()->start(new ImageReader(_self, _url, content, _maxNumPixels));
|
||||
}
|
||||
|
||||
void NetworkTexture::refresh() {
|
||||
if ((_ktxHeaderRequest || _ktxMipRequest) && !_loaded && !_failedToLoad) {
|
||||
return;
|
||||
}
|
||||
if (_ktxHeaderRequest || _ktxMipRequest) {
|
||||
if (_ktxHeaderRequest) {
|
||||
_ktxHeaderRequest->disconnect(this);
|
||||
_ktxHeaderRequest->deleteLater();
|
||||
_ktxHeaderRequest = nullptr;
|
||||
}
|
||||
if (_ktxMipRequest) {
|
||||
_ktxMipRequest->disconnect(this);
|
||||
_ktxMipRequest->deleteLater();
|
||||
_ktxMipRequest = nullptr;
|
||||
}
|
||||
TextureCache::requestCompleted(_self);
|
||||
}
|
||||
|
||||
Resource::refresh();
|
||||
}
|
||||
|
||||
ImageReader::ImageReader(const QWeakPointer<Resource>& resource, const QUrl& url, const QByteArray& data, int maxNumPixels) :
|
||||
_resource(resource),
|
||||
_url(url),
|
||||
|
|
|
@ -58,6 +58,8 @@ public:
|
|||
|
||||
gpu::TexturePointer getFallbackTexture() const;
|
||||
|
||||
void refresh() override;
|
||||
|
||||
signals:
|
||||
void networkTextureCreated(const QWeakPointer<NetworkTexture>& self);
|
||||
|
||||
|
|
|
@ -40,16 +40,16 @@ AssetResourceRequest::~AssetResourceRequest() {
|
|||
}
|
||||
}
|
||||
|
||||
bool AssetResourceRequest::urlIsAssetHash() const {
|
||||
bool AssetResourceRequest::urlIsAssetHash(const QUrl& url) {
|
||||
static const QString ATP_HASH_REGEX_STRING { "^atp:([A-Fa-f0-9]{64})(\\.[\\w]+)?$" };
|
||||
|
||||
QRegExp hashRegex { ATP_HASH_REGEX_STRING };
|
||||
return hashRegex.exactMatch(_url.toString());
|
||||
return hashRegex.exactMatch(url.toString());
|
||||
}
|
||||
|
||||
void AssetResourceRequest::doSend() {
|
||||
// We'll either have a hash or an ATP path to a file (that maps to a hash)
|
||||
if (urlIsAssetHash()) {
|
||||
if (urlIsAssetHash(_url)) {
|
||||
// We've detected that this is a hash - simply use AssetClient to request that asset
|
||||
auto parts = _url.path().split(".", QString::SkipEmptyParts);
|
||||
auto hash = parts.length() > 0 ? parts[0] : "";
|
||||
|
|
|
@ -32,7 +32,7 @@ private slots:
|
|||
void onDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
|
||||
|
||||
private:
|
||||
bool urlIsAssetHash() const;
|
||||
static bool urlIsAssetHash(const QUrl& url);
|
||||
|
||||
void requestMappingForPath(const AssetPath& path);
|
||||
void requestHash(const AssetHash& hash);
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
#include <DependencyManager.h>
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
#include <comdef.h>
|
||||
#include <Wbemidl.h>
|
||||
#include <Windows.h>
|
||||
#include <winreg.h>
|
||||
#endif //Q_OS_WIN
|
||||
|
||||
#ifdef Q_OS_MAC
|
||||
|
@ -30,6 +30,9 @@
|
|||
#endif //Q_OS_MAC
|
||||
|
||||
static const QString FALLBACK_FINGERPRINT_KEY = "fallbackFingerprint";
|
||||
|
||||
QUuid FingerprintUtils::_machineFingerprint { QUuid() };
|
||||
|
||||
QString FingerprintUtils::getMachineFingerprintString() {
|
||||
QString uuidString;
|
||||
#ifdef Q_OS_LINUX
|
||||
|
@ -47,122 +50,32 @@ QString FingerprintUtils::getMachineFingerprintString() {
|
|||
#endif //Q_OS_MAC
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
HRESULT hres;
|
||||
IWbemLocator *pLoc = NULL;
|
||||
|
||||
// initialize com. Interface already does, but other
|
||||
// users of this lib don't necessarily do so.
|
||||
hres = CoInitializeEx(0, COINIT_MULTITHREADED);
|
||||
if (FAILED(hres)) {
|
||||
qCDebug(networking) << "Failed to initialize COM library!";
|
||||
return uuidString;
|
||||
}
|
||||
HKEY cryptoKey;
|
||||
|
||||
// initialize WbemLocator
|
||||
hres = CoCreateInstance(
|
||||
CLSID_WbemLocator,
|
||||
0,
|
||||
CLSCTX_INPROC_SERVER,
|
||||
IID_IWbemLocator, (LPVOID *) &pLoc);
|
||||
// try and open the key that contains the machine GUID
|
||||
if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Cryptography", 0, KEY_READ, &cryptoKey) == ERROR_SUCCESS) {
|
||||
DWORD type;
|
||||
DWORD guidSize;
|
||||
|
||||
if (FAILED(hres)) {
|
||||
qCDebug(networking) << "Failed to initialize WbemLocator";
|
||||
return uuidString;
|
||||
}
|
||||
|
||||
// Connect to WMI through the IWbemLocator::ConnectServer method
|
||||
IWbemServices *pSvc = NULL;
|
||||
const char* MACHINE_GUID_KEY = "MachineGuid";
|
||||
|
||||
// Connect to the root\cimv2 namespace with
|
||||
// the current user and obtain pointer pSvc
|
||||
// to make IWbemServices calls.
|
||||
hres = pLoc->ConnectServer(
|
||||
_bstr_t(L"ROOT\\CIMV2"), // Object path of WMI namespace
|
||||
NULL, // User name. NULL = current user
|
||||
NULL, // User password. NULL = current
|
||||
0, // Locale. NULL indicates current
|
||||
NULL, // Security flags.
|
||||
0, // Authority (for example, Kerberos)
|
||||
0, // Context object
|
||||
&pSvc // pointer to IWbemServices proxy
|
||||
);
|
||||
// try and retrieve the size of the GUID value
|
||||
if (RegQueryValueEx(cryptoKey, MACHINE_GUID_KEY, NULL, &type, NULL, &guidSize) == ERROR_SUCCESS) {
|
||||
// make sure that the value is a string
|
||||
if (type == REG_SZ) {
|
||||
// retrieve the machine GUID and return that as our UUID string
|
||||
std::string machineGUID(guidSize / sizeof(char), '\0');
|
||||
|
||||
if (FAILED(hres)) {
|
||||
pLoc->Release();
|
||||
qCDebug(networking) << "Failed to connect to WMI";
|
||||
return uuidString;
|
||||
}
|
||||
|
||||
// Set security levels on the proxy
|
||||
hres = CoSetProxyBlanket(
|
||||
pSvc, // Indicates the proxy to set
|
||||
RPC_C_AUTHN_WINNT, // RPC_C_AUTHN_xxx
|
||||
RPC_C_AUTHZ_NONE, // RPC_C_AUTHZ_xxx
|
||||
NULL, // Server principal name
|
||||
RPC_C_AUTHN_LEVEL_CALL, // RPC_C_AUTHN_LEVEL_xxx
|
||||
RPC_C_IMP_LEVEL_IMPERSONATE, // RPC_C_IMP_LEVEL_xxx
|
||||
NULL, // client identity
|
||||
EOAC_NONE // proxy capabilities
|
||||
);
|
||||
|
||||
if (FAILED(hres)) {
|
||||
pSvc->Release();
|
||||
pLoc->Release();
|
||||
qCDebug(networking) << "Failed to set security on proxy blanket";
|
||||
return uuidString;
|
||||
}
|
||||
|
||||
// Use the IWbemServices pointer to grab the Win32_BIOS stuff
|
||||
IEnumWbemClassObject* pEnumerator = NULL;
|
||||
hres = pSvc->ExecQuery(
|
||||
bstr_t("WQL"),
|
||||
bstr_t("SELECT * FROM Win32_ComputerSystemProduct"),
|
||||
WBEM_FLAG_FORWARD_ONLY | WBEM_FLAG_RETURN_IMMEDIATELY,
|
||||
NULL,
|
||||
&pEnumerator);
|
||||
|
||||
if (FAILED(hres)) {
|
||||
pSvc->Release();
|
||||
pLoc->Release();
|
||||
qCDebug(networking) << "query to get Win32_ComputerSystemProduct info";
|
||||
return uuidString;
|
||||
}
|
||||
|
||||
// Get the SerialNumber from the Win32_BIOS data
|
||||
IWbemClassObject *pclsObj;
|
||||
ULONG uReturn = 0;
|
||||
|
||||
SHORT sRetStatus = -100;
|
||||
|
||||
while (pEnumerator) {
|
||||
HRESULT hr = pEnumerator->Next(WBEM_INFINITE, 1, &pclsObj, &uReturn);
|
||||
|
||||
if(0 == uReturn){
|
||||
break;
|
||||
}
|
||||
|
||||
VARIANT vtProp;
|
||||
|
||||
// Get the value of the Name property
|
||||
hr = pclsObj->Get(L"UUID", 0, &vtProp, 0, 0);
|
||||
if (!FAILED(hres)) {
|
||||
switch (vtProp.vt) {
|
||||
case VT_BSTR:
|
||||
uuidString = QString::fromWCharArray(vtProp.bstrVal);
|
||||
break;
|
||||
if (RegQueryValueEx(cryptoKey, MACHINE_GUID_KEY, NULL, NULL,
|
||||
reinterpret_cast<LPBYTE>(&machineGUID[0]), &guidSize) == ERROR_SUCCESS) {
|
||||
uuidString = QString::fromStdString(machineGUID);
|
||||
}
|
||||
}
|
||||
}
|
||||
VariantClear(&vtProp);
|
||||
|
||||
pclsObj->Release();
|
||||
RegCloseKey(cryptoKey);
|
||||
}
|
||||
pEnumerator->Release();
|
||||
|
||||
// Cleanup
|
||||
pSvc->Release();
|
||||
pLoc->Release();
|
||||
|
||||
qCDebug(networking) << "Windows BIOS UUID: " << uuidString;
|
||||
#endif //Q_OS_WIN
|
||||
|
||||
return uuidString;
|
||||
|
@ -171,29 +84,36 @@ QString FingerprintUtils::getMachineFingerprintString() {
|
|||
|
||||
QUuid FingerprintUtils::getMachineFingerprint() {
|
||||
|
||||
QString uuidString = getMachineFingerprintString();
|
||||
if (_machineFingerprint.isNull()) {
|
||||
QString uuidString = getMachineFingerprintString();
|
||||
|
||||
// now, turn into uuid. A malformed string will
|
||||
// return QUuid() ("{00000...}"), which handles
|
||||
// any errors in getting the string
|
||||
QUuid uuid(uuidString);
|
||||
|
||||
// now, turn into uuid. A malformed string will
|
||||
// return QUuid() ("{00000...}"), which handles
|
||||
// any errors in getting the string
|
||||
QUuid uuid(uuidString);
|
||||
if (uuid == QUuid()) {
|
||||
// if you cannot read a fallback key cuz we aren't saving them, just generate one for
|
||||
// this session and move on
|
||||
if (DependencyManager::get<Setting::Manager>().isNull()) {
|
||||
return QUuid::createUuid();
|
||||
}
|
||||
// read fallback key (if any)
|
||||
Settings settings;
|
||||
uuid = QUuid(settings.value(FALLBACK_FINGERPRINT_KEY).toString());
|
||||
qCDebug(networking) << "read fallback maching fingerprint: " << uuid.toString();
|
||||
if (uuid == QUuid()) {
|
||||
// no fallback yet, set one
|
||||
uuid = QUuid::createUuid();
|
||||
settings.setValue(FALLBACK_FINGERPRINT_KEY, uuid.toString());
|
||||
qCDebug(networking) << "no fallback machine fingerprint, setting it to: " << uuid.toString();
|
||||
// if you cannot read a fallback key cuz we aren't saving them, just generate one for
|
||||
// this session and move on
|
||||
if (DependencyManager::get<Setting::Manager>().isNull()) {
|
||||
return QUuid::createUuid();
|
||||
}
|
||||
// read fallback key (if any)
|
||||
Settings settings;
|
||||
uuid = QUuid(settings.value(FALLBACK_FINGERPRINT_KEY).toString());
|
||||
qCDebug(networking) << "read fallback maching fingerprint: " << uuid.toString();
|
||||
|
||||
if (uuid == QUuid()) {
|
||||
// no fallback yet, set one
|
||||
uuid = QUuid::createUuid();
|
||||
settings.setValue(FALLBACK_FINGERPRINT_KEY, uuid.toString());
|
||||
qCDebug(networking) << "no fallback machine fingerprint, setting it to: " << uuid.toString();
|
||||
}
|
||||
}
|
||||
|
||||
_machineFingerprint = uuid;
|
||||
}
|
||||
return uuid;
|
||||
|
||||
return _machineFingerprint;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ public:
|
|||
|
||||
private:
|
||||
static QString getMachineFingerprintString();
|
||||
static QUuid _machineFingerprint;
|
||||
};
|
||||
|
||||
#endif // hifi_FingerprintUtils_h
|
||||
|
|
|
@ -533,13 +533,13 @@ void Resource::ensureLoading() {
|
|||
}
|
||||
|
||||
void Resource::setLoadPriority(const QPointer<QObject>& owner, float priority) {
|
||||
if (!(_failedToLoad || _loaded)) {
|
||||
if (!(_failedToLoad)) {
|
||||
_loadPriorities.insert(owner, priority);
|
||||
}
|
||||
}
|
||||
|
||||
void Resource::setLoadPriorities(const QHash<QPointer<QObject>, float>& priorities) {
|
||||
if (_failedToLoad || _loaded) {
|
||||
if (_failedToLoad) {
|
||||
return;
|
||||
}
|
||||
for (QHash<QPointer<QObject>, float>::const_iterator it = priorities.constBegin();
|
||||
|
@ -549,7 +549,7 @@ void Resource::setLoadPriorities(const QHash<QPointer<QObject>, float>& prioriti
|
|||
}
|
||||
|
||||
void Resource::clearLoadPriority(const QPointer<QObject>& owner) {
|
||||
if (!(_failedToLoad || _loaded)) {
|
||||
if (!(_failedToLoad)) {
|
||||
_loadPriorities.remove(owner);
|
||||
}
|
||||
}
|
||||
|
@ -612,10 +612,12 @@ void Resource::allReferencesCleared() {
|
|||
}
|
||||
}
|
||||
|
||||
void Resource::init() {
|
||||
void Resource::init(bool resetLoaded) {
|
||||
_startedLoading = false;
|
||||
_failedToLoad = false;
|
||||
_loaded = false;
|
||||
if (resetLoaded) {
|
||||
_loaded = false;
|
||||
}
|
||||
_attempts = 0;
|
||||
_activeUrl = _url;
|
||||
|
||||
|
|
|
@ -385,7 +385,7 @@ public:
|
|||
float getProgress() const { return (_bytesTotal <= 0) ? 0.0f : (float)_bytesReceived / _bytesTotal; }
|
||||
|
||||
/// Refreshes the resource.
|
||||
void refresh();
|
||||
virtual void refresh();
|
||||
|
||||
void setSelf(const QWeakPointer<Resource>& self) { _self = self; }
|
||||
|
||||
|
@ -425,7 +425,7 @@ protected slots:
|
|||
void attemptRequest();
|
||||
|
||||
protected:
|
||||
virtual void init();
|
||||
virtual void init(bool resetLoaded = true);
|
||||
|
||||
/// Called by ResourceCache to begin loading this Resource.
|
||||
/// This method can be overriden to provide custom request functionality. If this is done,
|
||||
|
@ -454,9 +454,14 @@ protected:
|
|||
QUrl _url;
|
||||
QUrl _activeUrl;
|
||||
ByteRange _requestByteRange;
|
||||
|
||||
// _loaded == true means we are in a loaded and usable state. It is possible that there may still be
|
||||
// active requests/loading while in this state. Example: Progressive KTX downloads, where higher resolution
|
||||
// mips are being download.
|
||||
bool _startedLoading = false;
|
||||
bool _failedToLoad = false;
|
||||
bool _loaded = false;
|
||||
|
||||
QHash<QPointer<QObject>, float> _loadPriorities;
|
||||
QWeakPointer<Resource> _self;
|
||||
QPointer<ResourceCache> _cache;
|
||||
|
|
|
@ -14,10 +14,10 @@
|
|||
#include <QNetworkDiskCache>
|
||||
#include <QStandardPaths>
|
||||
#include <QThread>
|
||||
#include <QFileInfo>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
|
||||
#include "AssetResourceRequest.h"
|
||||
#include "FileResourceRequest.h"
|
||||
#include "HTTPResourceRequest.h"
|
||||
|
@ -116,3 +116,51 @@ ResourceRequest* ResourceManager::createResourceRequest(QObject* parent, const Q
|
|||
request->moveToThread(&_thread);
|
||||
return request;
|
||||
}
|
||||
|
||||
|
||||
bool ResourceManager::resourceExists(const QUrl& url) {
|
||||
auto scheme = url.scheme();
|
||||
if (scheme == URL_SCHEME_FILE) {
|
||||
QFileInfo file { url.toString() };
|
||||
return file.exists();
|
||||
} else if (scheme == URL_SCHEME_HTTP || scheme == URL_SCHEME_HTTPS || scheme == URL_SCHEME_FTP) {
|
||||
auto& networkAccessManager = NetworkAccessManager::getInstance();
|
||||
QNetworkRequest request { url };
|
||||
|
||||
request.setAttribute(QNetworkRequest::FollowRedirectsAttribute, true);
|
||||
request.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||
|
||||
auto reply = networkAccessManager.head(request);
|
||||
|
||||
QEventLoop loop;
|
||||
QObject::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
|
||||
loop.exec();
|
||||
|
||||
reply->deleteLater();
|
||||
|
||||
return reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() == 200;
|
||||
} else if (scheme == URL_SCHEME_ATP) {
|
||||
auto request = new AssetResourceRequest(url);
|
||||
ByteRange range;
|
||||
range.fromInclusive = 1;
|
||||
range.toExclusive = 1;
|
||||
request->setByteRange(range);
|
||||
request->setCacheEnabled(false);
|
||||
|
||||
QEventLoop loop;
|
||||
|
||||
QObject::connect(request, &AssetResourceRequest::finished, &loop, &QEventLoop::quit);
|
||||
|
||||
request->send();
|
||||
|
||||
loop.exec();
|
||||
|
||||
request->deleteLater();
|
||||
|
||||
return request->getResult() == ResourceRequest::Success;
|
||||
}
|
||||
|
||||
qCDebug(networking) << "Unknown scheme (" << scheme << ") for URL: " << url.url();
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,6 +36,10 @@ public:
|
|||
static void init();
|
||||
static void cleanup();
|
||||
|
||||
// Blocking call to check if a resource exists. This function uses a QEventLoop internally
|
||||
// to return to the calling thread so that events can still be processed.
|
||||
static bool resourceExists(const QUrl& url);
|
||||
|
||||
private:
|
||||
static QThread _thread;
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "SkeletonModel.h"
|
||||
#include "CauterizedModel.h"
|
||||
|
||||
using namespace render;
|
||||
|
||||
|
@ -29,7 +29,7 @@ void CauterizedMeshPartPayload::updateTransformForCauterizedMesh(
|
|||
|
||||
void CauterizedMeshPartPayload::bindTransform(gpu::Batch& batch, const render::ShapePipeline::LocationsPointer locations, RenderArgs::RenderMode renderMode) const {
|
||||
// Still relying on the raw data from the model
|
||||
SkeletonModel* skeleton = static_cast<SkeletonModel*>(_model);
|
||||
CauterizedModel* skeleton = static_cast<CauterizedModel*>(_model);
|
||||
bool useCauterizedMesh = (renderMode != RenderArgs::RenderMode::SHADOW_RENDER_MODE) && skeleton->getEnableCauterization();
|
||||
|
||||
if (useCauterizedMesh) {
|
|
@ -1,9 +1,6 @@
|
|||
//
|
||||
// CauterizedModelMeshPartPayload.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by AndrewMeadows 2017.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
@ -12,7 +9,7 @@
|
|||
#ifndef hifi_CauterizedMeshPartPayload_h
|
||||
#define hifi_CauterizedMeshPartPayload_h
|
||||
|
||||
#include <MeshPartPayload.h>
|
||||
#include "MeshPartPayload.h"
|
||||
|
||||
class CauterizedMeshPartPayload : public ModelMeshPartPayload {
|
||||
public:
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// CauterizedModel.cpp
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Andrew Meadows 2017.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -11,10 +8,10 @@
|
|||
|
||||
#include "CauterizedModel.h"
|
||||
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include <MeshPartPayload.h>
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "AbstractViewStateInterface.h"
|
||||
#include "MeshPartPayload.h"
|
||||
#include "CauterizedMeshPartPayload.h"
|
||||
#include "RenderUtilsLogging.h"
|
||||
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// CauterizeableModel.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Andrew Meadows 2016.01.17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -13,7 +10,7 @@
|
|||
#define hifi_CauterizedModel_h
|
||||
|
||||
|
||||
#include <Model.h>
|
||||
#include "Model.h"
|
||||
|
||||
class CauterizedModel : public Model {
|
||||
Q_OBJECT
|
||||
|
@ -31,10 +28,10 @@ public:
|
|||
const std::unordered_set<int>& getCauterizeBoneSet() const { return _cauterizeBoneSet; }
|
||||
void setCauterizeBoneSet(const std::unordered_set<int>& boneSet) { _cauterizeBoneSet = boneSet; }
|
||||
|
||||
void deleteGeometry() override;
|
||||
bool updateGeometry() override;
|
||||
void deleteGeometry() override;
|
||||
bool updateGeometry() override;
|
||||
|
||||
void createVisibleRenderItemSet() override;
|
||||
void createVisibleRenderItemSet() override;
|
||||
void createCollisionRenderItemSet() override;
|
||||
|
||||
virtual void updateClusterMatrices() override;
|
||||
|
@ -44,7 +41,7 @@ public:
|
|||
|
||||
protected:
|
||||
std::unordered_set<int> _cauterizeBoneSet;
|
||||
QVector<Model::MeshState> _cauterizeMeshStates;
|
||||
QVector<Model::MeshState> _cauterizeMeshStates;
|
||||
bool _isCauterized { false };
|
||||
bool _enableCauterization { false };
|
||||
};
|
|
@ -64,7 +64,9 @@ float fetchRoughnessMap(vec2 uv) {
|
|||
uniform sampler2D normalMap;
|
||||
vec3 fetchNormalMap(vec2 uv) {
|
||||
// unpack normal, swizzle to get into hifi tangent space with Y axis pointing out
|
||||
return normalize(texture(normalMap, uv).rbg -vec3(0.5, 0.5, 0.5));
|
||||
vec2 t = 2.0 * (texture(normalMap, uv).rg - vec2(0.5, 0.5));
|
||||
vec2 t2 = t*t;
|
||||
return vec3(t.x, sqrt(1 - t2.x - t2.y), t.y);
|
||||
}
|
||||
<@endif@>
|
||||
|
||||
|
|
|
@ -573,7 +573,7 @@ bool Model::addToScene(const render::ScenePointer& scene,
|
|||
|
||||
bool somethingAdded = false;
|
||||
if (_collisionGeometry) {
|
||||
if (_collisionRenderItems.empty()) {
|
||||
if (_collisionRenderItemsMap.empty()) {
|
||||
foreach (auto renderItem, _collisionRenderItems) {
|
||||
auto item = scene->allocateID();
|
||||
auto renderPayload = std::make_shared<MeshPartPayload::Payload>(renderItem);
|
||||
|
@ -583,7 +583,7 @@ bool Model::addToScene(const render::ScenePointer& scene,
|
|||
transaction.resetItem(item, renderPayload);
|
||||
_collisionRenderItemsMap.insert(item, renderPayload);
|
||||
}
|
||||
somethingAdded = !_collisionRenderItems.empty();
|
||||
somethingAdded = !_collisionRenderItemsMap.empty();
|
||||
}
|
||||
} else {
|
||||
if (_modelMeshRenderItemsMap.empty()) {
|
||||
|
@ -632,7 +632,7 @@ void Model::removeFromScene(const render::ScenePointer& scene, render::Transacti
|
|||
transaction.removeItem(item);
|
||||
}
|
||||
_collisionRenderItems.clear();
|
||||
_collisionRenderItems.clear();
|
||||
_collisionRenderItemsMap.clear();
|
||||
_addedToScene = false;
|
||||
|
||||
_renderInfoVertexCount = 0;
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// SoftAttachmentModel.cpp
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Anthony J. Thibault on 12/17/15.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -10,7 +7,6 @@
|
|||
//
|
||||
|
||||
#include "SoftAttachmentModel.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
SoftAttachmentModel::SoftAttachmentModel(RigPointer rig, QObject* parent, RigPointer rigOverride) :
|
||||
CauterizedModel(rig, parent),
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// SoftAttachmentModel.h
|
||||
// interface/src/avatar
|
||||
//
|
||||
// Created by Anthony J. Thibault on 12/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
|
@ -11,7 +11,9 @@
|
|||
|
||||
#include <QDebug>
|
||||
#include <GLMHelpers.h>
|
||||
#include <glm/gtx/string_cast.hpp>
|
||||
#include "ScriptEngineLogging.h"
|
||||
#include "ScriptEngine.h"
|
||||
#include "Mat4.h"
|
||||
|
||||
glm::mat4 Mat4::multiply(const glm::mat4& m1, const glm::mat4& m2) const {
|
||||
|
@ -66,10 +68,12 @@ glm::vec3 Mat4::getUp(const glm::mat4& m) const {
|
|||
return glm::vec3(m[0][1], m[1][1], m[2][1]);
|
||||
}
|
||||
|
||||
void Mat4::print(const QString& label, const glm::mat4& m) const {
|
||||
qCDebug(scriptengine) << qPrintable(label) <<
|
||||
"row0 =" << m[0][0] << "," << m[1][0] << "," << m[2][0] << "," << m[3][0] <<
|
||||
"row1 =" << m[0][1] << "," << m[1][1] << "," << m[2][1] << "," << m[3][1] <<
|
||||
"row2 =" << m[0][2] << "," << m[1][2] << "," << m[2][2] << "," << m[3][2] <<
|
||||
"row3 =" << m[0][3] << "," << m[1][3] << "," << m[2][3] << "," << m[3][3];
|
||||
void Mat4::print(const QString& label, const glm::mat4& m, bool transpose) const {
|
||||
glm::dmat4 out = transpose ? glm::transpose(m) : m;
|
||||
QString message = QString("%1 %2").arg(qPrintable(label));
|
||||
message = message.arg(glm::to_string(out).c_str());
|
||||
qCDebug(scriptengine) << message;
|
||||
if (ScriptEngine* scriptEngine = qobject_cast<ScriptEngine*>(engine())) {
|
||||
scriptEngine->print(message);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,9 +16,10 @@
|
|||
|
||||
#include <QObject>
|
||||
#include <QString>
|
||||
#include <QtScript/QScriptable>
|
||||
|
||||
/// Scriptable Mat4 object. Used exclusively in the JavaScript API
|
||||
class Mat4 : public QObject {
|
||||
class Mat4 : public QObject, protected QScriptable {
|
||||
Q_OBJECT
|
||||
|
||||
public slots:
|
||||
|
@ -43,7 +44,7 @@ public slots:
|
|||
glm::vec3 getRight(const glm::mat4& m) const;
|
||||
glm::vec3 getUp(const glm::mat4& m) const;
|
||||
|
||||
void print(const QString& label, const glm::mat4& m) const;
|
||||
void print(const QString& label, const glm::mat4& m, bool transpose = false) const;
|
||||
};
|
||||
|
||||
#endif // hifi_Mat4_h
|
||||
|
|
|
@ -15,7 +15,9 @@
|
|||
|
||||
#include <OctreeConstants.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <glm/gtx/string_cast.hpp>
|
||||
#include "ScriptEngineLogging.h"
|
||||
#include "ScriptEngine.h"
|
||||
#include "Quat.h"
|
||||
|
||||
quat Quat::normalize(const glm::quat& q) {
|
||||
|
@ -114,8 +116,17 @@ float Quat::dot(const glm::quat& q1, const glm::quat& q2) {
|
|||
return glm::dot(q1, q2);
|
||||
}
|
||||
|
||||
void Quat::print(const QString& label, const glm::quat& q) {
|
||||
qCDebug(scriptengine) << qPrintable(label) << q.x << "," << q.y << "," << q.z << "," << q.w;
|
||||
void Quat::print(const QString& label, const glm::quat& q, bool asDegrees) {
|
||||
QString message = QString("%1 %2").arg(qPrintable(label));
|
||||
if (asDegrees) {
|
||||
message = message.arg(glm::to_string(glm::dvec3(safeEulerAngles(q))).c_str());
|
||||
} else {
|
||||
message = message.arg(glm::to_string(glm::dquat(q)).c_str());
|
||||
}
|
||||
qCDebug(scriptengine) << message;
|
||||
if (ScriptEngine* scriptEngine = qobject_cast<ScriptEngine*>(engine())) {
|
||||
scriptEngine->print(message);
|
||||
}
|
||||
}
|
||||
|
||||
bool Quat::equal(const glm::quat& q1, const glm::quat& q2) {
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
#include <QObject>
|
||||
#include <QString>
|
||||
#include <QtScript/QScriptable>
|
||||
|
||||
/**jsdoc
|
||||
* A Quaternion
|
||||
|
@ -30,7 +31,7 @@
|
|||
*/
|
||||
|
||||
/// Scriptable interface a Quaternion helper class object. Used exclusively in the JavaScript API
|
||||
class Quat : public QObject {
|
||||
class Quat : public QObject, protected QScriptable {
|
||||
Q_OBJECT
|
||||
|
||||
public slots:
|
||||
|
@ -58,7 +59,7 @@ public slots:
|
|||
glm::quat slerp(const glm::quat& q1, const glm::quat& q2, float alpha);
|
||||
glm::quat squad(const glm::quat& q1, const glm::quat& q2, const glm::quat& s1, const glm::quat& s2, float h);
|
||||
float dot(const glm::quat& q1, const glm::quat& q2);
|
||||
void print(const QString& label, const glm::quat& q);
|
||||
void print(const QString& label, const glm::quat& q, bool asDegrees = false);
|
||||
bool equal(const glm::quat& q1, const glm::quat& q2);
|
||||
glm::quat cancelOutRollAndPitch(const glm::quat& q);
|
||||
glm::quat cancelOutRoll(const glm::quat& q);
|
||||
|
|
|
@ -105,11 +105,11 @@ static QScriptValue debugPrint(QScriptContext* context, QScriptEngine* engine) {
|
|||
}
|
||||
message += context->argument(i).toString();
|
||||
}
|
||||
qCDebug(scriptengineScript).noquote() << "script:print()<<" << message; // noquote() so that \n is treated as newline
|
||||
qCDebug(scriptengineScript).noquote() << message; // noquote() so that \n is treated as newline
|
||||
|
||||
// FIXME - this approach neeeds revisiting. print() comes here, which ends up calling Script.print?
|
||||
engine->globalObject().property("Script").property("print")
|
||||
.call(engine->nullValue(), QScriptValueList({ message }));
|
||||
if (ScriptEngine *scriptEngine = qobject_cast<ScriptEngine*>(engine)) {
|
||||
scriptEngine->print(message);
|
||||
}
|
||||
|
||||
return QScriptValue();
|
||||
}
|
||||
|
@ -472,6 +472,11 @@ void ScriptEngine::scriptInfoMessage(const QString& message) {
|
|||
emit infoMessage(message, getFilename());
|
||||
}
|
||||
|
||||
void ScriptEngine::scriptPrintedMessage(const QString& message) {
|
||||
qCDebug(scriptengine) << message;
|
||||
emit printedMessage(message, getFilename());
|
||||
}
|
||||
|
||||
// Even though we never pass AnimVariantMap directly to and from javascript, the queued invokeMethod of
|
||||
// callAnimationStateHandler requires that the type be registered.
|
||||
// These two are meaningful, if we ever do want to use them...
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue