Merge branch 'master' of https://github.com/highfidelity/hifi into ambient-bis

This commit is contained in:
samcake 2017-04-07 11:55:08 -07:00
commit 58cec3fb96
43 changed files with 458 additions and 162 deletions

View file

@ -17,6 +17,8 @@ Documentation
=========
Documentation is available at [docs.highfidelity.com](https://docs.highfidelity.com), if something is missing, please suggest it via a new job on Worklist (add to the hifi-docs project).
There is also detailed [documentation on our coding standards](https://wiki.highfidelity.com/wiki/Coding_Standards).
Build Instructions
=========
All information required to build is found in the [build guide](BUILD.md).

View file

@ -7,8 +7,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://github.com/ValveSoftware/openvr/archive/v1.0.3.zip
URL_MD5 b484b12901917cc739e40389583c8b0d
URL https://github.com/ValveSoftware/openvr/archive/v1.0.6.zip
URL_MD5 f6892cd3a3078f505d03b4297f5a1951
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -127,7 +127,7 @@ Rectangle {
text: hifi.glyphs.mic
color: hifi.colors.primaryHighlight
anchors.verticalCenter: parent.verticalCenter
font.pointSize: 27
size: 32
}
RalewayRegular {
anchors.verticalCenter: parent.verticalCenter
@ -182,7 +182,7 @@ Rectangle {
text: hifi.glyphs.unmuted
color: hifi.colors.primaryHighlight
anchors.verticalCenter: parent.verticalCenter
font.pointSize: 27
size: 32
}
RalewayRegular {
anchors.verticalCenter: parent.verticalCenter

View file

@ -14,6 +14,7 @@ import QtQuick.Controls 1.4
import QtQuick.Controls.Styles 1.4
import QtGraphicalEffects 1.0
import "../styles-uit"
import "../controls-uit" as HifiControls
import "toolbars"
// references Users, UserActivityLogger, MyAvatar, Vec3, Quat, AddressManager from root context
@ -42,8 +43,9 @@ Item {
property bool selected: false
property bool isAdmin: false
property bool isPresent: true
property string placeName: ""
property string profilePicBorderColor: (connectionStatus == "connection" ? hifi.colors.indigoAccent : (connectionStatus == "friend" ? hifi.colors.greenHighlight : "transparent"))
property alias avImage: avatarImage
Item {
id: avatarImage
visible: profileUrl !== "" && userName !== "";
@ -79,25 +81,6 @@ Item {
anchors.fill: parent;
visible: userImage.status != Image.Ready;
}
StateImage {
id: infoHoverImage;
visible: false;
imageURL: "../../images/info-icon-2-state.svg";
size: 32;
buttonState: 1;
anchors.centerIn: parent;
}
MouseArea {
anchors.fill: parent
enabled: selected || isMyCard;
hoverEnabled: enabled
onClicked: {
userInfoViewer.url = defaultBaseUrl + "/users/" + userName;
userInfoViewer.visible = true;
}
onEntered: infoHoverImage.visible = true;
onExited: infoHoverImage.visible = false;
}
}
// Colored border around avatarImage
@ -316,9 +299,10 @@ Item {
visible: thisNameCard.userName !== "";
// Size
width: parent.width
height: pal.activeTab == "nearbyTab" || isMyCard ? usernameTextPixelSize + 4 : parent.height;
height: usernameTextPixelSize + 4
// Anchors
anchors.top: isMyCard ? myDisplayName.bottom : (pal.activeTab == "nearbyTab" ? displayNameContainer.bottom : parent.top);
anchors.top: isMyCard ? myDisplayName.bottom : pal.activeTab == "nearbyTab" ? displayNameContainer.bottom : undefined //(parent.height - displayNameTextPixelSize/2));
anchors.verticalCenter: pal.activeTab == "connectionsTab" ? avatarImage.verticalCenter : undefined
anchors.left: avatarImage.right;
anchors.leftMargin: avatarImage.visible ? 5 : 0;
anchors.rightMargin: 5;
@ -346,6 +330,92 @@ Item {
}
}
}
StateImage {
id: nameCardConnectionInfoImage
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
imageURL: "../../images/info-icon-2-state.svg" // PLACEHOLDER!!!
size: 32;
buttonState: 0;
anchors.left: avatarImage.right
anchors.bottom: parent.bottom
}
MouseArea {
anchors.fill:nameCardConnectionInfoImage
enabled: selected
hoverEnabled: true
onClicked: {
userInfoViewer.url = defaultBaseUrl + "/users/" + userName;
userInfoViewer.visible = true;
}
onEntered: {
nameCardConnectionInfoImage.buttonState = 1;
}
onExited: {
nameCardConnectionInfoImage.buttonState = 0;
}
}
FiraSansRegular {
id: nameCardConnectionInfoText
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && !isMyCard
width: parent.width
height: displayNameTextPixelSize
size: displayNameTextPixelSize - 4
anchors.left: nameCardConnectionInfoImage.right
anchors.verticalCenter: nameCardConnectionInfoImage.verticalCenter
anchors.leftMargin: 5
verticalAlignment: Text.AlignVCenter
text: "Info"
color: hifi.colors.baseGray
}
HiFiGlyphs {
id: nameCardRemoveConnectionImage
visible: selected && !isMyCard && pal.activeTab == "connectionsTab"
text: hifi.glyphs.close
size: 28;
x: 120
anchors.verticalCenter: nameCardConnectionInfoImage.verticalCenter
}
MouseArea {
anchors.fill:nameCardRemoveConnectionImage
enabled: selected
hoverEnabled: true
onClicked: {
// send message to pal.js to forgetConnection
pal.sendToScript({method: 'removeConnection', params: thisNameCard.userName});
}
onEntered: {
nameCardRemoveConnectionImage.text = hifi.glyphs.closeInverted;
}
onExited: {
nameCardRemoveConnectionImage.text = hifi.glyphs.close;
}
}
FiraSansRegular {
id: nameCardRemoveConnectionText
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && !isMyCard
width: parent.width
height: displayNameTextPixelSize
size: displayNameTextPixelSize - 4
anchors.left: nameCardRemoveConnectionImage.right
anchors.verticalCenter: nameCardRemoveConnectionImage.verticalCenter
anchors.leftMargin: 5
verticalAlignment: Text.AlignVCenter
text: "Forget"
color: hifi.colors.baseGray
}
HifiControls.Button {
id: visitConnectionButton
visible: selected && !isMyCard && pal.activeTab == "connectionsTab" && !isMyCard
text: "Visit"
enabled: thisNameCard.placeName !== ""
anchors.verticalCenter: nameCardRemoveConnectionImage.verticalCenter
x: 240
onClicked: {
AddressManager.goToUser(thisNameCard.userName);
UserActivityLogger.palAction("go_to_user", thisNameCard.userName);
}
}
// VU Meter
Rectangle {
id: nameCardVUMeter
@ -484,7 +554,7 @@ Item {
}
}
}
function updateGainFromQML(avatarUuid, sliderValue, isReleased) {
Users.setAvatarGain(avatarUuid, sliderValue);
if (isReleased) {

View file

@ -52,6 +52,13 @@ Rectangle {
id: letterboxMessage;
z: 999; // Force the popup on top of everything else
}
Connections {
target: GlobalServices
onMyUsernameChanged: {
myData.userName = Account.username;
myDataChanged(); // Setting a property within an object isn't enough to update dependencies. This will do it.
}
}
// The ComboDialog used for setting availability
ComboDialog {
id: comboDialog;
@ -301,7 +308,6 @@ Rectangle {
MouseArea {
anchors.fill: parent;
hoverEnabled: true;
enabled: activeTab === "connectionsTab";
onClicked: letterbox(hifi.glyphs.question,
"Connections and Friends",
"<font color='purple'>Purple borders around profile pictures are <b>Connections</b>.</font><br>" +
@ -436,7 +442,7 @@ Rectangle {
rowDelegate: Rectangle { // The only way I know to specify a row height.
// Size
height: rowHeight + (styleData.selected ? 15 : 0);
color: rowColor(styleData.selected, styleData.alternate);
color: nearbyRowColor(styleData.selected, styleData.alternate);
}
// This Item refers to the contents of each Cell
@ -749,7 +755,7 @@ Rectangle {
resizable: false;
}
TableViewColumn {
role: "friends";
role: "connection";
title: "FRIEND";
width: actionButtonWidth;
movable: false;
@ -763,8 +769,8 @@ Rectangle {
// This Rectangle refers to each Row in the connectionsTable.
rowDelegate: Rectangle {
// Size
height: rowHeight;
color: rowColor(styleData.selected, styleData.alternate);
height: rowHeight + (styleData.selected ? 15 : 0);
color: connectionsRowColor(styleData.selected, styleData.alternate);
}
// This Item refers to the contents of each Cell
@ -779,6 +785,7 @@ Rectangle {
profileUrl: (model && model.profileUrl) || "";
displayName: "";
userName: model ? model.userName : "";
placeName: model ? model.placeName : ""
connectionStatus : model ? model.connection : "";
selected: styleData.selected;
// Size
@ -797,12 +804,16 @@ Rectangle {
elide: Text.ElideRight;
// Size
width: parent.width;
// Anchors
anchors.fill: parent;
// you would think that this would work:
// anchors.verticalCenter: connectionsNameCard.avImage.verticalCenter
// but no! you cannot anchor to a non-sibling or parent. So I will
// align with the friends checkbox, where I did the manual alignment
anchors.verticalCenter: friendsCheckBox.verticalCenter
// Text Size
size: 16;
// Text Positioning
verticalAlignment: Text.AlignVCenter
horizontalAlignment: Text.AlignHCenter
// Style
color: hifi.colors.blueAccent;
font.underline: true;
@ -822,22 +833,21 @@ Rectangle {
// "Friends" checkbox
HifiControlsUit.CheckBox {
id: friendsCheckBox;
visible: styleData.role === "friends" && model.userName !== myData.userName;
anchors.centerIn: parent;
checked: model ? (model["connection"] === "friend" ? true : false) : false;
visible: styleData.role === "connection" && model && model.userName !== myData.userName;
// you would think that this would work:
// anchors.verticalCenter: connectionsNameCard.avImage.verticalCenter
// but no! you cannot anchor to a non-sibling or parent. So:
x: parent.width/2 - boxSize/2;
y: connectionsNameCard.avImage.y + connectionsNameCard.avImage.height/2 - boxSize/2;
checked: model && (model.connection === "friend");
boxSize: 24;
onClicked: {
var newValue = !(model["connection"] === "friend");
var newValue = model.connection !== "friend";
connectionsUserModel.setProperty(model.userIndex, styleData.role, newValue);
connectionsUserModelData[model.userIndex][styleData.role] = newValue; // Defensive programming
pal.sendToScript({method: newValue ? 'addFriend' : 'removeFriend', params: model.userName});
UserActivityLogger["palAction"](newValue ? styleData.role : "un-" + styleData.role, model.sessionId);
// http://doc.qt.io/qt-5/qtqml-syntax-propertybinding.html#creating-property-bindings-from-javascript
// I'm using an explicit binding here because clicking a checkbox breaks the implicit binding as set by
// "checked:" statement above.
checked = Qt.binding(function() { return (model["connection"] === "friend" ? true : false)});
}
}
}
@ -901,7 +911,7 @@ Rectangle {
wrapMode: Text.WordWrap
textFormat: Text.StyledText;
// Text
text: HMD.active ?
text: HMD.isMounted ?
"<b>When you meet someone you want to remember later, you can <font color='purple'>connect</font> with a handshake:</b><br><br>" +
"1. Put your hand out onto their hand and squeeze your controller's grip button on its side.<br>" +
"2. Once the other person puts their hand onto yours, you'll see your connection form.<br>" +
@ -960,7 +970,6 @@ Rectangle {
// Text size
size: hifi.fontSizes.tabularData;
// Anchors
anchors.top: myCard.top;
anchors.left: parent.left;
// Style
color: hifi.colors.baseGrayHighlight;
@ -1184,9 +1193,12 @@ Rectangle {
}
}
function rowColor(selected, alternate) {
function nearbyRowColor(selected, alternate) {
return selected ? hifi.colors.orangeHighlight : alternate ? hifi.colors.tableRowLightEven : hifi.colors.tableRowLightOdd;
}
function connectionsRowColor(selected, alternate) {
return selected ? hifi.colors.lightBlueHighlight : alternate ? hifi.colors.tableRowLightEven : hifi.colors.tableRowLightOdd;
}
function findNearbySessionIndex(sessionId, optionalData) { // no findIndex in .qml
var data = optionalData || nearbyUserModelData, length = data.length;
for (var i = 0; i < length; i++) {
@ -1257,6 +1269,8 @@ Rectangle {
selectionTimer.userIndex = userIndex;
selectionTimer.start();
}
// in any case make sure we are in the nearby tab
activeTab="nearbyTab";
break;
// Received an "updateUsername()" request from the JS
case 'updateUsername':

View file

@ -21,6 +21,9 @@ Rectangle {
id: root
objectName: "AssetServer"
property string title: "Asset Browser"
property bool keyboardRaised: false
property var eventBridge;
signal sendToScript(var message);
property bool isHMD: false
@ -415,7 +418,6 @@ Rectangle {
Column {
width: parent.width
y: hifi.dimensions.tabletMenuHeader //-bgNavBar
spacing: 10
HifiControls.TabletContentSection {

View file

@ -20,7 +20,7 @@ import "../../windows"
Rectangle {
id: root
objectName: "RunningScripts"
property var title: "Running Scripts"
property string title: "Running Scripts"
HifiConstants { id: hifi }
signal sendToScript(var message);
property var eventBridge;
@ -81,9 +81,9 @@ Rectangle {
Flickable {
id: flickable
width: parent.width
width: tabletRoot.width
height: parent.height - (keyboard.raised ? keyboard.raisedHeight : 0)
contentWidth: parent.width
contentWidth: column.width
contentHeight: column.childrenRect.height
clip: true
@ -121,9 +121,8 @@ Rectangle {
model: runningScriptsModel
id: table
height: 185
width: parent.width
colorScheme: hifi.colorSchemes.dark
anchors.left: parent.left
anchors.right: parent.right
expandSelectedRow: true
itemDelegate: Item {

View file

@ -202,7 +202,7 @@ Item {
RalewaySemiBold {
id: usernameText
text: tablet.parent.parent.username
text: tabletRoot.username
anchors.verticalCenter: parent.verticalCenter
anchors.right: parent.right
anchors.rightMargin: 20

View file

@ -25,8 +25,8 @@ StackView {
HifiConstants { id: hifi }
HifiStyles.HifiConstants { id: hifiStyleConstants }
initialItem: addressBarDialog
width: parent.width
height: parent.height
width: parent !== null ? parent.width : undefined
height: parent !== null ? parent.height : undefined
property var eventBridge;
property var allStories: [];
property int cardWidth: 460;
@ -107,7 +107,7 @@ StackView {
imageURL: "../../../images/home.svg"
onClicked: {
addressBarDialog.loadHome();
root.shown = false;
tabletRoot.shown = false;
}
anchors {
left: parent.left
@ -142,7 +142,9 @@ StackView {
anchors {
top: navBar.bottom
right: parent.right
rightMargin: 16
left: parent.left
leftMargin: 16
}
property int inputAreaHeight: 70
@ -291,8 +293,6 @@ StackView {
left: parent.left
right: parent.right
leftMargin: 10
verticalCenter: parent.verticalCenter;
horizontalCenter: parent.horizontalCenter;
}
model: suggestions
orientation: ListView.Vertical
@ -547,14 +547,14 @@ StackView {
if (addressLine.text !== "") {
addressBarDialog.loadAddress(addressLine.text, fromSuggestions)
}
root.shown = false;
tabletRoot.shown = false;
}
Keys.onPressed: {
switch (event.key) {
case Qt.Key_Escape:
case Qt.Key_Back:
root.shown = false
tabletRoot.shown = false
clearAddressLineTimer.start();
event.accepted = true
break

View file

@ -13,6 +13,7 @@ Item {
property var openMessage: null;
property string subMenu: ""
signal showDesktop();
property bool shown: true
function setOption(value) {
option = value;

View file

@ -72,6 +72,7 @@ Item {
readonly property color magentaAccent: "#A2277C"
readonly property color checkboxCheckedRed: "#FF0000"
readonly property color checkboxCheckedBorderRed: "#D00000"
readonly property color lightBlueHighlight: "#d6f6ff"
// Semitransparent
readonly property color darkGray30: "#4d121212"

View file

@ -76,8 +76,8 @@ void JSConsole::setScriptEngine(ScriptEngine* scriptEngine) {
return;
}
if (_scriptEngine != NULL) {
disconnect(_scriptEngine, SIGNAL(printedMessage(const QString&)), this, SLOT(handlePrint(const QString&)));
disconnect(_scriptEngine, SIGNAL(errorMessage(const QString&)), this, SLOT(handleError(const QString&)));
disconnect(_scriptEngine, &ScriptEngine::printedMessage, this, &JSConsole::handlePrint);
disconnect(_scriptEngine, &ScriptEngine::errorMessage, this, &JSConsole::handleError);
if (_ownScriptEngine) {
_scriptEngine->deleteLater();
}
@ -87,8 +87,8 @@ void JSConsole::setScriptEngine(ScriptEngine* scriptEngine) {
_ownScriptEngine = scriptEngine == NULL;
_scriptEngine = _ownScriptEngine ? DependencyManager::get<ScriptEngines>()->loadScript(QString(), false) : scriptEngine;
connect(_scriptEngine, SIGNAL(printedMessage(const QString&)), this, SLOT(handlePrint(const QString&)));
connect(_scriptEngine, SIGNAL(errorMessage(const QString&)), this, SLOT(handleError(const QString&)));
connect(_scriptEngine, &ScriptEngine::printedMessage, this, &JSConsole::handlePrint);
connect(_scriptEngine, &ScriptEngine::errorMessage, this, &JSConsole::handleError);
}
void JSConsole::executeCommand(const QString& command) {
@ -134,11 +134,13 @@ void JSConsole::commandFinished() {
resetCurrentCommandHistory();
}
void JSConsole::handleError(const QString& message) {
void JSConsole::handleError(const QString& scriptName, const QString& message) {
Q_UNUSED(scriptName);
appendMessage(GUTTER_ERROR, "<span style='" + RESULT_ERROR_STYLE + "'>" + message.toHtmlEscaped() + "</span>");
}
void JSConsole::handlePrint(const QString& message) {
void JSConsole::handlePrint(const QString& scriptName, const QString& message) {
Q_UNUSED(scriptName);
appendMessage("", message);
}

View file

@ -47,8 +47,8 @@ protected:
protected slots:
void scrollToBottom();
void resizeTextInput();
void handlePrint(const QString& message);
void handleError(const QString& message);
void handlePrint(const QString& scriptName, const QString& message);
void handleError(const QString& scriptName, const QString& message);
void commandFinished();
private:

View file

@ -74,7 +74,7 @@ void AnimationReader::run() {
// Parse the FBX directly from the QNetworkReply
FBXGeometry::Pointer fbxgeo;
if (_url.path().toLower().endsWith(".fbx")) {
fbxgeo.reset(readFBX(_data, QVariantHash(), _url.path()));
fbxgeo.reset(readFBX(_data, QVariantHash(), _url));
} else {
QString errorStr("usupported format");
emit onError(299, errorStr);

View file

@ -182,6 +182,7 @@ bool EntityTree::updateEntityWithElement(EntityItemPointer entity, const EntityI
if (!wantsLocked) {
EntityItemProperties tempProperties;
tempProperties.setLocked(wantsLocked);
tempProperties.setLastEdited(properties.getLastEdited());
bool success;
AACube queryCube = entity->getQueryAACube(success);

View file

@ -376,10 +376,10 @@ public:
};
bool checkMaterialsHaveTextures(const QHash<QString, FBXMaterial>& materials,
const QHash<QString, QByteArray>& textureFilenames, const QMultiMap<QString, QString>& _connectionChildMap) {
const QHash<QString, QByteArray>& textureFilepaths, const QMultiMap<QString, QString>& _connectionChildMap) {
foreach (const QString& materialID, materials.keys()) {
foreach (const QString& childID, _connectionChildMap.values(materialID)) {
if (textureFilenames.contains(childID)) {
if (textureFilepaths.contains(childID)) {
return true;
}
}
@ -443,21 +443,48 @@ FBXLight extractLight(const FBXNode& object) {
return light;
}
QByteArray fileOnUrl(const QByteArray& filepath, const QString& url) {
QString path = QFileInfo(url).path();
QByteArray filename = filepath;
QFileInfo checkFile(path + "/" + filepath);
QByteArray fixedTextureFilepath(QByteArray fbxRelativeFilepath, QUrl url) {
// first setup a QFileInfo for the passed relative filepath, with backslashes replaced by forward slashes
auto fileInfo = QFileInfo { fbxRelativeFilepath.replace("\\", "/") };
// check if the file exists at the RelativeFilename
if (!(checkFile.exists() && checkFile.isFile())) {
// if not, assume it is in the fbx directory
filename = filename.mid(filename.lastIndexOf('/') + 1);
#ifndef Q_OS_WIN
// it turns out that absolute windows paths starting with drive letters look like relative paths to QFileInfo on UNIX
// so we add a check for that here to work around it
bool isRelative = fbxRelativeFilepath[1] != ':' && fileInfo.isRelative();
#else
bool isRelative = fileInfo.isRelative();
#endif
if (isRelative) {
// the RelativeFilename pulled from the FBX is already correctly relative
// so simply return this as the filepath to use
return fbxRelativeFilepath;
} else {
// the RelativeFilename pulled from the FBX is an absolute path
// use the URL to figure out where the FBX is being loaded from
auto filename = fileInfo.fileName();
if (url.isLocalFile()) {
// the FBX is being loaded from the local filesystem
if (fileInfo.exists() && fileInfo.isFile()) {
// found a file at the absolute path in the FBX, return that path
return fbxRelativeFilepath;
} else {
// didn't find a file at the absolute path, assume it is right beside the FBX
// return just the filename as the relative path
return filename.toUtf8();
}
} else {
// this is a remote file, meaning we can't really do anything with the absolute path to the texture
// so assume it will be right beside the fbx
return filename.toUtf8();
}
}
return filename;
}
FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QString& url) {
FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QUrl& url) {
const FBXNode& node = _fbxNode;
QMap<QString, ExtractedMesh> meshes;
QHash<QString, QString> modelIDsToNames;
@ -833,11 +860,9 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
const int MODEL_UV_SCALING_MIN_SIZE = 2;
const int CROPPING_MIN_SIZE = 4;
if (subobject.name == "RelativeFilename" && subobject.properties.length() >= RELATIVE_FILENAME_MIN_SIZE) {
QByteArray filename = subobject.properties.at(0).toByteArray();
QByteArray filepath = filename.replace('\\', '/');
filename = fileOnUrl(filepath, url);
auto filepath = fixedTextureFilepath(subobject.properties.at(0).toByteArray(), url);
_textureFilepaths.insert(getID(object.properties), filepath);
_textureFilenames.insert(getID(object.properties), filename);
} else if (subobject.name == "TextureName" && subobject.properties.length() >= TEXTURE_NAME_MIN_SIZE) {
// trim the name from the timestamp
QString name = QString(subobject.properties.at(0).toByteArray());
@ -930,7 +955,7 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
QByteArray content;
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "RelativeFilename") {
filepath= subobject.properties.at(0).toByteArray();
filepath = subobject.properties.at(0).toByteArray();
filepath = filepath.replace('\\', '/');
} else if (subobject.name == "Content" && !subobject.properties.isEmpty()) {
@ -1502,7 +1527,7 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
geometry.materials = _fbxMaterials;
// see if any materials have texture children
bool materialsHaveTextures = checkMaterialsHaveTextures(_fbxMaterials, _textureFilenames, _connectionChildMap);
bool materialsHaveTextures = checkMaterialsHaveTextures(_fbxMaterials, _textureFilepaths, _connectionChildMap);
for (QMap<QString, ExtractedMesh>::iterator it = meshes.begin(); it != meshes.end(); it++) {
ExtractedMesh& extracted = it.value();
@ -1547,7 +1572,7 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
materialIndex++;
} else if (_textureFilenames.contains(childID)) {
} else if (_textureFilepaths.contains(childID)) {
FBXTexture texture = getTexture(childID);
for (int j = 0; j < extracted.partMaterialTextures.size(); j++) {
int partTexture = extracted.partMaterialTextures.at(j).second;
@ -1818,13 +1843,13 @@ FBXGeometry* FBXReader::extractFBXGeometry(const QVariantHash& mapping, const QS
return geometryPtr;
}
FBXGeometry* readFBX(const QByteArray& model, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
FBXGeometry* readFBX(const QByteArray& model, const QVariantHash& mapping, const QUrl& url, bool loadLightmaps, float lightmapLevel) {
QBuffer buffer(const_cast<QByteArray*>(&model));
buffer.open(QIODevice::ReadOnly);
return readFBX(&buffer, mapping, url, loadLightmaps, lightmapLevel);
}
FBXGeometry* readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
FBXGeometry* readFBX(QIODevice* device, const QVariantHash& mapping, const QUrl& url, bool loadLightmaps, float lightmapLevel) {
FBXReader reader;
reader._fbxNode = FBXReader::parseFBX(device);
reader._loadLightmaps = loadLightmaps;

View file

@ -268,7 +268,7 @@ class FBXGeometry {
public:
using Pointer = std::shared_ptr<FBXGeometry>;
QString originalURL;
QUrl originalURL;
QString author;
QString applicationName; ///< the name of the application that generated the model
@ -330,11 +330,11 @@ Q_DECLARE_METATYPE(FBXGeometry::Pointer)
/// Reads FBX geometry from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
FBXGeometry* readFBX(const QByteArray& model, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
FBXGeometry* readFBX(const QByteArray& model, const QVariantHash& mapping, const QUrl& url = QUrl(), bool loadLightmaps = true, float lightmapLevel = 1.0f);
/// Reads FBX geometry from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
FBXGeometry* readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
FBXGeometry* readFBX(QIODevice* device, const QVariantHash& mapping, const QUrl& url = QUrl(), bool loadLightmaps = true, float lightmapLevel = 1.0f);
class TextureParam {
public:
@ -402,19 +402,17 @@ public:
FBXNode _fbxNode;
static FBXNode parseFBX(QIODevice* device);
FBXGeometry* extractFBXGeometry(const QVariantHash& mapping, const QString& url);
FBXGeometry* extractFBXGeometry(const QVariantHash& mapping, const QUrl& url);
ExtractedMesh extractMesh(const FBXNode& object, unsigned int& meshIndex);
QHash<QString, ExtractedMesh> meshes;
static void buildModelMesh(FBXMesh& extractedMesh, const QString& url);
static void buildModelMesh(FBXMesh& extractedMesh, const QUrl& url);
FBXTexture getTexture(const QString& textureID);
QHash<QString, QString> _textureNames;
// Hashes the original RelativeFilename of textures
QHash<QString, QByteArray> _textureFilepaths;
// Hashes the place to look for textures, in case they are not inlined
QHash<QString, QByteArray> _textureFilenames;
// Hashes texture content by filepath, in case they are inlined
QHash<QByteArray, QByteArray> _textureContent;
QHash<QString, TextureParam> _textureParams;

View file

@ -85,12 +85,7 @@ FBXTexture FBXReader::getTexture(const QString& textureID) {
FBXTexture texture;
const QByteArray& filepath = _textureFilepaths.value(textureID);
texture.content = _textureContent.value(filepath);
if (texture.content.isEmpty()) { // the content is not inlined
texture.filename = _textureFilenames.value(textureID);
} else { // use supplied filepath for inlined content
texture.filename = filepath;
}
texture.filename = filepath;
texture.name = _textureNames.value(textureID);
texture.transform.setIdentity();
@ -155,7 +150,7 @@ void FBXReader::consolidateFBXMaterials(const QVariantHash& mapping) {
// FBX files generated by 3DSMax have an intermediate texture parent, apparently
foreach(const QString& childTextureID, _connectionChildMap.values(diffuseTextureID)) {
if (_textureFilenames.contains(childTextureID)) {
if (_textureFilepaths.contains(childTextureID)) {
diffuseTexture = getTexture(diffuseTextureID);
}
}

View file

@ -388,7 +388,7 @@ ExtractedMesh FBXReader::extractMesh(const FBXNode& object, unsigned int& meshIn
return data.extracted;
}
void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QString& url) {
void FBXReader::buildModelMesh(FBXMesh& extractedMesh, const QUrl& url) {
static QString repeatedMessage = LogHandler::getInstance().addRepeatedMessageRegex("buildModelMesh failed -- .*");
unsigned int totalSourceIndices = 0;

View file

@ -60,6 +60,7 @@ bool OffscreenGLCanvas::create(QOpenGLContext* sharedContext) {
bool OffscreenGLCanvas::makeCurrent() {
bool result = _context->makeCurrent(_offscreenSurface);
Q_ASSERT(result);
std::call_once(_reportOnce, [this]{
qCDebug(glLogging) << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
qCDebug(glLogging) << "GL Shader Language Version: " << QString((const char*) glGetString(GL_SHADING_LANGUAGE_VERSION));

View file

@ -278,11 +278,9 @@ void OffscreenQmlSurface::cleanup() {
}
void OffscreenQmlSurface::render() {
#ifdef HIFI_ENABLE_NSIGHT_DEBUG
return;
#endif
if (_paused) {
return;
}
@ -614,7 +612,11 @@ QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QOb
return nullptr;
}
connect(newItem, SIGNAL(sendToScript(QVariant)), this, SIGNAL(fromQml(QVariant)));
//check if the item contains sendToScript signal
int sendToScriptIndex = newItem->metaObject()->indexOfSignal("sendToScript");
if (sendToScriptIndex != -1) {
connect(newItem, SIGNAL(sendToScript(QVariant)), this, SIGNAL(fromQml(QVariant)));
}
// The root item is ready. Associate it with the window.
_rootItem = newItem;

View file

@ -72,6 +72,15 @@ Size KtxStorage::getMipFaceSize(uint16 level, uint8 face) const {
}
void Texture::setKtxBacking(const std::string& filename) {
// Check the KTX file for validity before using it as backing storage
{
ktx::StoragePointer storage { new storage::FileStorage(filename.c_str()) };
auto ktxPointer = ktx::KTX::create(storage);
if (!ktxPointer) {
return;
}
}
auto newBacking = std::unique_ptr<Storage>(new KtxStorage(filename));
setStorage(newBacking);
}
@ -185,7 +194,12 @@ ktx::KTXUniquePointer Texture::serialize(const Texture& texture) {
}
Texture* Texture::unserialize(const std::string& ktxfile, TextureUsageType usageType, Usage usage, const Sampler::Desc& sampler) {
ktx::KTXDescriptor descriptor { ktx::KTX::create(ktx::StoragePointer { new storage::FileStorage(ktxfile.c_str()) })->toDescriptor() };
std::unique_ptr<ktx::KTX> ktxPointer = ktx::KTX::create(ktx::StoragePointer { new storage::FileStorage(ktxfile.c_str()) });
if (!ktxPointer) {
return nullptr;
}
ktx::KTXDescriptor descriptor { ktxPointer->toDescriptor() };
const auto& header = descriptor.header;
Format mipFormat = Format::COLOR_BGRA_32;

View file

@ -173,7 +173,7 @@ void GeometryReader::run() {
FBXGeometry::Pointer fbxGeometry;
if (_url.path().toLower().endsWith(".fbx")) {
fbxGeometry.reset(readFBX(_data, _mapping, _url.path()));
fbxGeometry.reset(readFBX(_data, _mapping, _url));
if (fbxGeometry->meshes.size() == 0 && fbxGeometry->joints.size() == 0) {
throw QString("empty geometry, possibly due to an unsupported FBX version");
}

View file

@ -88,8 +88,9 @@ static const std::string DEFAULT_NORMAL_SHADER {
static const std::string DEFAULT_OCCLUSION_SHADER{
"vec4 getFragmentColor() {"
" DeferredFragment frag = unpackDeferredFragmentNoPosition(uv);"
" return vec4(vec3(pow(frag.obscurance, 1.0 / 2.2)), 1.0);"
// " DeferredFragment frag = unpackDeferredFragmentNoPosition(uv);"
// " return vec4(vec3(pow(frag.obscurance, 1.0 / 2.2)), 1.0);"
" return vec4(vec3(pow(texture(specularMap, uv).a, 1.0 / 2.2)), 1.0);"
" }"
};
@ -194,6 +195,18 @@ static const std::string DEFAULT_DIFFUSED_NORMAL_CURVATURE_SHADER{
" }"
};
static const std::string DEFAULT_CURVATURE_OCCLUSION_SHADER{
"vec4 getFragmentColor() {"
" vec4 midNormalCurvature;"
" vec4 lowNormalCurvature;"
" unpackMidLowNormalCurvature(uv, midNormalCurvature, lowNormalCurvature);"
" float ambientOcclusion = curvatureAO(lowNormalCurvature.a * 20.0f) * 0.5f;"
" float ambientOcclusionHF = curvatureAO(midNormalCurvature.a * 8.0f) * 0.5f;"
" ambientOcclusion = min(ambientOcclusion, ambientOcclusionHF);"
" return vec4(vec3(ambientOcclusion), 1.0);"
" }"
};
static const std::string DEFAULT_DEBUG_SCATTERING_SHADER{
"vec4 getFragmentColor() {"
" return vec4(pow(vec3(texture(scatteringMap, uv).xyz), vec3(1.0 / 2.2)), 1.0);"
@ -203,7 +216,7 @@ static const std::string DEFAULT_DEBUG_SCATTERING_SHADER{
static const std::string DEFAULT_AMBIENT_OCCLUSION_SHADER{
"vec4 getFragmentColor() {"
" return vec4(vec3(texture(obscuranceMap, uv).xyz), 1.0);"
" return vec4(vec3(texture(obscuranceMap, uv).x), 1.0);"
// When drawing color " return vec4(vec3(texture(occlusionMap, uv).xyz), 1.0);"
// when drawing normal" return vec4(normalize(texture(occlusionMap, uv).xyz * 2.0 - vec3(1.0)), 1.0);"
" }"
@ -288,6 +301,8 @@ std::string DebugDeferredBuffer::getShaderSourceCode(Mode mode, std::string cust
return DEFAULT_DIFFUSED_CURVATURE_SHADER;
case DiffusedNormalCurvatureMode:
return DEFAULT_DIFFUSED_NORMAL_CURVATURE_SHADER;
case CurvatureOcclusionMode:
return DEFAULT_CURVATURE_OCCLUSION_SHADER;
case ScatteringDebugMode:
return DEFAULT_DEBUG_SCATTERING_SHADER;
case AmbientOcclusionMode:

View file

@ -72,6 +72,7 @@ protected:
NormalCurvatureMode,
DiffusedCurvatureMode,
DiffusedNormalCurvatureMode,
CurvatureOcclusionMode,
ScatteringDebugMode,
AmbientOcclusionMode,
AmbientOcclusionBlurredMode,

View file

@ -67,10 +67,8 @@ DeferredFragment unpackDeferredFragmentNoPosition(vec2 texcoord) {
frag.scattering = 0.0;
unpackModeMetallic(diffuseVal.w, frag.mode, frag.metallic);
//frag.emissive = specularVal.xyz;
frag.obscurance = min(specularVal.w, frag.obscurance);
if (frag.mode == FRAG_MODE_SCATTERING) {
frag.scattering = specularVal.x;
}

View file

@ -55,7 +55,7 @@ void DeferredFramebuffer::allocate() {
_deferredColorTexture = gpu::TexturePointer(gpu::Texture::createRenderBuffer(colorFormat, width, height, gpu::Texture::SINGLE_MIP, defaultSampler));
_deferredNormalTexture = gpu::TexturePointer(gpu::Texture::createRenderBuffer(linearFormat, width, height, gpu::Texture::SINGLE_MIP, defaultSampler));
_deferredSpecularTexture = gpu::TexturePointer(gpu::Texture::createRenderBuffer(colorFormat, width, height, gpu::Texture::SINGLE_MIP, defaultSampler));
_deferredSpecularTexture = gpu::TexturePointer(gpu::Texture::createRenderBuffer(linearFormat, width, height, gpu::Texture::SINGLE_MIP, defaultSampler));
_deferredFramebuffer->setRenderBuffer(0, _deferredColorTexture);
_deferredFramebuffer->setRenderBuffer(1, _deferredNormalTexture);

View file

@ -62,7 +62,7 @@ vec3 evalAmbientSpecularIrradiance(LightAmbient ambient, vec3 fragEyeDir, vec3 f
<@if supportScattering@>
float curvatureAO(in float k) {
return 1.0f - (0.0022f * k * k) + (0.0776f * k) + 0.7369;
return 1.0f - (0.0022f * k * k) + (0.0776f * k) + 0.7369f;
}
<@endif@>
@ -83,13 +83,12 @@ void evalLightingAmbient(out vec3 diffuse, out vec3 specular, LightAmbient ambie
specular = evalAmbientSpecularIrradiance(ambient, eyeDir, normal, roughness) * ambientFresnel;
<@if supportScattering@>
float ambientOcclusion = curvatureAO(lowNormalCurvature.w * 20.0f) * 0.5f;
float ambientOcclusionHF = curvatureAO(midNormalCurvature.w * 8.0f) * 0.5f;
ambientOcclusion = min(ambientOcclusion, ambientOcclusionHF);
obscurance = min(obscurance, ambientOcclusion);
if (scattering * isScatteringEnabled() > 0.0) {
float ambientOcclusion = curvatureAO(lowNormalCurvature.w * 20.0f) * 0.5f;
float ambientOcclusionHF = curvatureAO(midNormalCurvature.w * 8.0f) * 0.5f;
ambientOcclusion = min(ambientOcclusion, ambientOcclusionHF);
obscurance = min(obscurance, ambientOcclusion);
// Diffuse from ambient
diffuse = sphericalHarmonics_evalSphericalLight(getLightAmbientSphere(ambient), lowNormalCurvature.xyz).xyz;

View file

@ -28,6 +28,12 @@
#include "skin_model_shadow_vert.h"
#include "skin_model_normal_map_vert.h"
#include "simple_vert.h"
#include "simple_textured_frag.h"
#include "simple_textured_unlit_frag.h"
#include "simple_transparent_textured_frag.h"
#include "simple_transparent_textured_unlit_frag.h"
#include "model_frag.h"
#include "model_unlit_frag.h"
#include "model_shadow_frag.h"
@ -135,6 +141,7 @@ void initOverlay3DPipelines(ShapePlumber& plumber) {
void initDeferredPipelines(render::ShapePlumber& plumber) {
// Vertex shaders
auto simpleVertex = gpu::Shader::createVertex(std::string(simple_vert));
auto modelVertex = gpu::Shader::createVertex(std::string(model_vert));
auto modelNormalMapVertex = gpu::Shader::createVertex(std::string(model_normal_map_vert));
auto modelLightmapVertex = gpu::Shader::createVertex(std::string(model_lightmap_vert));
@ -145,6 +152,10 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
auto skinModelShadowVertex = gpu::Shader::createVertex(std::string(skin_model_shadow_vert));
// Pixel shaders
auto simplePixel = gpu::Shader::createPixel(std::string(simple_textured_frag));
auto simpleUnlitPixel = gpu::Shader::createPixel(std::string(simple_textured_unlit_frag));
auto simpleTranslucentPixel = gpu::Shader::createPixel(std::string(simple_transparent_textured_frag));
auto simpleTranslucentUnlitPixel = gpu::Shader::createPixel(std::string(simple_transparent_textured_unlit_frag));
auto modelPixel = gpu::Shader::createPixel(std::string(model_frag));
auto modelUnlitPixel = gpu::Shader::createPixel(std::string(model_unlit_frag));
auto modelNormalMapPixel = gpu::Shader::createPixel(std::string(model_normal_map_frag));
@ -167,13 +178,13 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
modelVertex, modelPixel);
addPipeline(
Key::Builder(),
modelVertex, modelPixel);
simpleVertex, simplePixel);
addPipeline(
Key::Builder().withMaterial().withUnlit(),
modelVertex, modelUnlitPixel);
addPipeline(
Key::Builder().withUnlit(),
modelVertex, modelUnlitPixel);
simpleVertex, simpleUnlitPixel);
addPipeline(
Key::Builder().withMaterial().withTangents(),
modelNormalMapVertex, modelNormalMapPixel);
@ -189,13 +200,13 @@ void initDeferredPipelines(render::ShapePlumber& plumber) {
modelVertex, modelTranslucentPixel);
addPipeline(
Key::Builder().withTranslucent(),
modelVertex, modelTranslucentPixel);
simpleVertex, simpleTranslucentPixel);
addPipeline(
Key::Builder().withMaterial().withTranslucent().withUnlit(),
modelVertex, modelTranslucentUnlitPixel);
addPipeline(
Key::Builder().withTranslucent().withUnlit(),
modelVertex, modelTranslucentUnlitPixel);
simpleVertex, simpleTranslucentUnlitPixel);
addPipeline(
Key::Builder().withMaterial().withTranslucent().withTangents(),
modelNormalMapVertex, modelTranslucentPixel);

View file

@ -16,15 +16,20 @@
<@include gpu/Color.slh@>
<$declareColorWheel()$>
uniform sampler2D linearDepthMap;
uniform sampler2D halfLinearDepthMap;
uniform sampler2D halfNormalMap;
uniform sampler2D occlusionMap;
uniform sampler2D occlusionBlurredMap;
uniform sampler2D curvatureMap;
uniform sampler2D diffusedCurvatureMap;
uniform sampler2D scatteringMap;
<$declareDeferredCurvature()$>
float curvatureAO(float k) {
return 1.0f - (0.0022f * k * k) + (0.0776f * k) + 0.7369f;
}
in vec2 uv;
out vec4 outFragColor;

View file

@ -26,15 +26,17 @@ in vec2 _texCoord0;
void main(void) {
vec4 texel = texture(originalTexture, _texCoord0);
float colorAlpha = _color.a;
if (_color.a <= 0.0) {
texel = colorToLinearRGBA(texel);
colorAlpha = -_color.a;
}
const float ALPHA_THRESHOLD = 0.999;
if (_color.a * texel.a < ALPHA_THRESHOLD) {
if (colorAlpha * texel.a < ALPHA_THRESHOLD) {
packDeferredFragmentTranslucent(
normalize(_normal),
_color.a * texel.a,
colorAlpha * texel.a,
_color.rgb * texel.rgb,
DEFAULT_FRESNEL,
DEFAULT_ROUGHNESS);

View file

@ -2,7 +2,7 @@
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// simple.frag
// simple_textured_unlit.frag
// fragment shader
//
// Created by Clément Brisset on 5/29/15.
@ -25,15 +25,17 @@ in vec2 _texCoord0;
void main(void) {
vec4 texel = texture(originalTexture, _texCoord0.st);
float colorAlpha = _color.a;
if (_color.a <= 0.0) {
texel = colorToLinearRGBA(texel);
colorAlpha = -_color.a;
}
const float ALPHA_THRESHOLD = 0.999;
if (_color.a * texel.a < ALPHA_THRESHOLD) {
packDeferredFragmentTranslucent(
if (colorAlpha * texel.a < ALPHA_THRESHOLD) {
packDeferredFragmentTranslucent(
normalize(_normal),
_color.a * texel.a,
colorAlpha * texel.a,
_color.rgb * texel.rgb,
DEFAULT_FRESNEL,
DEFAULT_ROUGHNESS);

View file

@ -0,0 +1,62 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// simple_transparent_textured.slf
// fragment shader
//
// Created by Sam Gateau on 4/3/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Color.slh@>
<@include DeferredBufferWrite.slh@>
<@include DeferredGlobalLight.slh@>
<$declareEvalGlobalLightingAlphaBlended()$>
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>
// the albedo texture
uniform sampler2D originalTexture;
// the interpolated normal
in vec4 _position;
in vec3 _normal;
in vec4 _color;
in vec2 _texCoord0;
void main(void) {
vec4 texel = texture(originalTexture, _texCoord0.st);
float opacity = _color.a;
if (_color.a <= 0.0) {
texel = colorToLinearRGBA(texel);
opacity = -_color.a;
}
opacity *= texel.a;
vec3 albedo = _color.rgb * texel.rgb;
vec3 fragPosition = _position.xyz;
vec3 fragNormal = normalize(_normal);
TransformCamera cam = getTransformCamera();
_fragColor0 = vec4(evalGlobalLightingAlphaBlended(
cam._viewInverse,
1.0,
1.0,
fragPosition,
fragNormal,
albedo,
DEFAULT_FRESNEL,
0.0,
vec3(0.0f),
DEFAULT_ROUGHNESS,
opacity),
opacity);
}

View file

@ -0,0 +1,36 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// simple_transparent_textured_unlit.slf
// fragment shader
//
// Created by Sam Gateau on 4/3/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Color.slh@>
// the albedo texture
uniform sampler2D originalTexture;
// the interpolated normal
in vec3 _normal;
in vec4 _color;
in vec2 _texCoord0;
layout(location = 0) out vec4 _fragColor0;
void main(void) {
vec4 texel = texture(originalTexture, _texCoord0.st);
float colorAlpha = _color.a;
if (_color.a <= 0.0) {
texel = colorToLinearRGBA(texel);
colorAlpha = -_color.a;
}
_fragColor0 = vec4(_color.rgb * texel.rgb, colorAlpha * texel.a);
}

View file

@ -122,12 +122,15 @@ void QmlWindowClass::initQml(QVariantMap properties) {
object->setProperty(OFFSCREEN_VISIBILITY_PROPERTY, visible);
object->setProperty(SOURCE_PROPERTY, _source);
const QMetaObject *metaObject = _qmlWindow->metaObject();
// Forward messages received from QML on to the script
connect(_qmlWindow, SIGNAL(sendToScript(QVariant)), this, SLOT(qmlToScript(const QVariant&)), Qt::QueuedConnection);
connect(_qmlWindow, SIGNAL(visibleChanged()), this, SIGNAL(visibleChanged()), Qt::QueuedConnection);
connect(_qmlWindow, SIGNAL(resized(QSizeF)), this, SIGNAL(resized(QSizeF)), Qt::QueuedConnection);
connect(_qmlWindow, SIGNAL(moved(QVector2D)), this, SLOT(hasMoved(QVector2D)), Qt::QueuedConnection);
if (metaObject->indexOfSignal("resized") >= 0)
connect(_qmlWindow, SIGNAL(resized(QSizeF)), this, SIGNAL(resized(QSizeF)), Qt::QueuedConnection);
if (metaObject->indexOfSignal("moved") >= 0)
connect(_qmlWindow, SIGNAL(moved(QVector2D)), this, SLOT(hasMoved(QVector2D)), Qt::QueuedConnection);
connect(_qmlWindow, SIGNAL(windowClosed()), this, SLOT(hasClosed()), Qt::QueuedConnection);
});
}

View file

@ -277,8 +277,8 @@ public:
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
static const vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
static const vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
vr::Texture_t texture{ (void*)_colors[currentColorBuffer], vr::API_OpenGL, vr::ColorSpace_Auto };
vr::Texture_t texture{ (void*)_colors[currentColorBuffer], vr::TextureType_OpenGL, vr::ColorSpace_Auto };
vr::VRCompositor()->Submit(vr::Eye_Left, &texture, &leftBounds);
vr::VRCompositor()->Submit(vr::Eye_Right, &texture, &rightBounds);
_plugin._presentRate.increment();
@ -422,7 +422,7 @@ bool OpenVrDisplayPlugin::internalActivate() {
withNonPresentThreadLock([&] {
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
_eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
_eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
_eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
});
// FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
_cullingProjection = _eyeProjections[0];
@ -639,7 +639,7 @@ void OpenVrDisplayPlugin::hmdPresent() {
_submitThread->waitForPresent();
} else {
GLuint glTexId = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0));
vr::Texture_t vrTexture { (void*)glTexId, vr::API_OpenGL, vr::ColorSpace_Auto };
vr::Texture_t vrTexture { (void*)glTexId, vr::TextureType_OpenGL, vr::ColorSpace_Auto };
vr::VRCompositor()->Submit(vr::Eye_Left, &vrTexture, &OPENVR_TEXTURE_BOUNDS_LEFT);
vr::VRCompositor()->Submit(vr::Eye_Right, &vrTexture, &OPENVR_TEXTURE_BOUNDS_RIGHT);
vr::VRCompositor()->PostPresentHandoff();

View file

@ -114,7 +114,7 @@ void releaseOpenVrSystem() {
// HACK: workaround openvr crash, call submit with an invalid texture, right before VR_Shutdown.
const GLuint INVALID_GL_TEXTURE_HANDLE = -1;
vr::Texture_t vrTexture{ (void*)INVALID_GL_TEXTURE_HANDLE, vr::API_OpenGL, vr::ColorSpace_Auto };
vr::Texture_t vrTexture{ (void*)INVALID_GL_TEXTURE_HANDLE, vr::TextureType_OpenGL, vr::ColorSpace_Auto };
static vr::VRTextureBounds_t OPENVR_TEXTURE_BOUNDS_LEFT{ 0, 0, 0.5f, 1 };
static vr::VRTextureBounds_t OPENVR_TEXTURE_BOUNDS_RIGHT{ 0.5f, 0, 1, 1 };

View file

@ -140,7 +140,7 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle
handleHandController(deltaTime, leftHandDeviceIndex, inputCalibrationData, true);
handleHandController(deltaTime, rightHandDeviceIndex, inputCalibrationData, false);
// collect raw poses
// collect poses for all generic trackers
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
handleTrackedObject(i, inputCalibrationData);
}
@ -171,6 +171,7 @@ void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceInde
uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex;
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
_system->GetTrackedDeviceClass(deviceIndex) == vr::TrackedDeviceClass_GenericTracker &&
_nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid &&
poseIndex <= controller::TRACKED_OBJECT_15) {
@ -203,7 +204,7 @@ void ViveControllerManager::InputDevice::handleHandController(float deltaTime, u
handlePoseEvent(deltaTime, inputCalibrationData, mat, linearVelocity, angularVelocity, isLeftHand);
vr::VRControllerState_t controllerState = vr::VRControllerState_t();
if (_system->GetControllerState(deviceIndex, &controllerState)) {
if (_system->GetControllerState(deviceIndex, &controllerState, sizeof(vr::VRControllerState_t))) {
// process each button
for (uint32_t i = 0; i < vr::k_EButton_Max; ++i) {
auto mask = vr::ButtonMaskFromId((vr::EVRButtonId)i);

View file

@ -18,6 +18,9 @@ Rectangle {
width: parent ? parent.width : 100
height: parent ? parent.height : 100
signal moved(vector2d position);
signal resized(size size);
property var channel;
TextArea {

View file

@ -63,7 +63,8 @@ Column {
"Directional:LightingModel:enableDirectionalLight",
"Point:LightingModel:enablePointLight",
"Spot:LightingModel:enableSpotLight",
"Light Contour:LightingModel:showLightContour"
"Light Contour:LightingModel:showLightContour",
"Shadow:RenderShadowTask:enabled"
]
CheckBox {
text: modelData.split(":")[0]
@ -150,6 +151,7 @@ Column {
ListElement { text: "Mid Normal"; color: "White" }
ListElement { text: "Low Curvature"; color: "White" }
ListElement { text: "Low Normal"; color: "White" }
ListElement { text: "Curvature Occlusion"; color: "White" }
ListElement { text: "Debug Scattering"; color: "White" }
ListElement { text: "Ambient Occlusion"; color: "White" }
ListElement { text: "Ambient Occlusion Blurred"; color: "White" }

View file

@ -131,7 +131,6 @@ function request(options, callback) { // cb(error, responseOfCorrectContentType)
var error = (httpRequest.status !== HTTP_OK) && httpRequest.status.toString() + ':' + httpRequest.statusText,
response = !error && httpRequest.responseText,
contentType = !error && httpRequest.getResponseHeader('content-type');
debug('FIXME REMOVE: server response', options, error, response, contentType);
if (!error && contentType.indexOf('application/json') === 0) { // ignoring charset, etc.
try {
response = JSON.parse(response);
@ -139,6 +138,9 @@ function request(options, callback) { // cb(error, responseOfCorrectContentType)
error = e;
}
}
if (error) {
response = {statusCode: httpRequest.status};
}
callback(error, response);
}
};
@ -164,7 +166,6 @@ function request(options, callback) { // cb(error, responseOfCorrectContentType)
options.headers["Content-type"] = "application/json";
options.body = JSON.stringify(options.body);
}
debug("FIXME REMOVE: final options to send", options);
for (key in options.headers || {}) {
httpRequest.setRequestHeader(key, options.headers[key]);
}
@ -574,6 +575,9 @@ function handleConnectionResponseAndMaybeRepeat(error, response) {
}
} else if (error || (response.status !== 'success')) {
debug('server fail', error, response.status);
if (response && (response.statusCode === 401)) {
error = "All participants must be logged in to connect.";
}
result = error ? {status: 'error', connection: error} : response;
UserActivityLogger.makeUserConnection(connectingId, false, error || response);
connectionRequestCompleted();
@ -603,6 +607,15 @@ function makeConnection(id) {
// probably, in which we do this.
Controller.triggerHapticPulse(HAPTIC_DATA.background.strength, MAKING_CONNECTION_TIMEOUT, handToHaptic(currentHand));
requestBody = {node_id: cleanId(MyAvatar.sessionUUID), proposed_node_id: cleanId(id)}; // for use when repeating
// It would be "simpler" to skip this and just look at the response, but:
// 1. We don't want to bother the metaverse with request that we know will fail.
// 2. We don't want our code here to be dependent on precisely how the metaverse responds (400, 401, etc.)
if (!Account.isLoggedIn()) {
handleConnectionResponseAndMaybeRepeat("401:Unauthorized", {statusCode: 401});
return;
}
// This will immediately set response if successfull (e.g., the other guy got his request in first), or immediate failure,
// and will otherwise poll (using the requestBody we just set).
request({ //

View file

@ -427,21 +427,24 @@ function deleteNotification(index) {
arrays.splice(index, 1);
}
// wraps whole word to newline
function stringDivider(str, slotWidth, spaceReplacer) {
var left, right;
if (str.length > slotWidth && slotWidth > 0) {
left = str.substring(0, slotWidth);
right = str.substring(slotWidth);
return left + spaceReplacer + stringDivider(right, slotWidth, spaceReplacer);
// Trims extra whitespace and breaks into lines of length no more than MAX_LENGTH, breaking at spaces. Trims extra whitespace.
var MAX_LENGTH = 42;
function wordWrap(string) {
var finishedLines = [], currentLine = '';
string.split(/\s/).forEach(function (word) {
var tail = currentLine ? ' ' + word : word;
if ((currentLine.length + tail.length) <= MAX_LENGTH) {
currentLine += tail;
} else {
finishedLines.push(currentLine);
currentLine = word;
}
});
if (currentLine) {
finishedLines.push(currentLine);
}
return str;
}
// formats string to add newline every 43 chars
function wordWrap(str) {
return stringDivider(str, 43.0, "\n");
return finishedLines.join('\n');
}
function update() {

View file

@ -269,13 +269,26 @@ function fromQml(message) { // messages are {method, params}, like json-rpc. See
getConnectionData();
UserActivityLogger.palAction("refresh_connections", "");
break;
case 'removeConnection':
connectionUserName = message.params;
request({
uri: METAVERSE_BASE + '/api/v1/user/connections/' + connectionUserName,
method: 'DELETE'
}, function (error, response) {
if (error || (response.status !== 'success')) {
print("Error: unable to remove connection", connectionUserName, error || response.status);
return;
}
getConnectionData();
});
break
case 'removeFriend':
friendUserName = message.params;
request({
uri: METAVERSE_BASE + '/api/v1/user/friends/' + friendUserName,
method: 'DELETE'
}, function (error, response) {
print(JSON.stringify(response));
if (error || (response.status !== 'success')) {
print("Error: unable to unfriend", friendUserName, error || response.status);
return;