Merge branch 'master' of github.com:highfidelity/hifi into grab-try-timer-instead-of-update

This commit is contained in:
Seth Alves 2017-05-02 09:50:13 -07:00
commit dc569051f9
136 changed files with 4963 additions and 2711 deletions
BUILD.mdBUILD_OSX.mdBUILD_WIN.md
assignment-client/src/octree
cmake
domain-server
interface
libraries

View file

@ -1,7 +1,7 @@
###Dependencies
* [cmake](https://cmake.org/download/) ~> 3.3.2
* [Qt](https://www.qt.io/download-open-source) ~> 5.6.1
* [Qt](https://www.qt.io/download-open-source) ~> 5.6.2
* [OpenSSL](https://www.openssl.org/community/binaries.html)
* IMPORTANT: Use the latest available version of OpenSSL to avoid security vulnerabilities.
* [VHACD](https://github.com/virneo/v-hacd)(clone this repository)(Optional)
@ -46,8 +46,8 @@ This can either be entered directly into your shell session before you build or
The path it needs to be set to will depend on where and how Qt5 was installed. e.g.
export QT_CMAKE_PREFIX_PATH=/usr/local/qt/5.6.1/clang_64/lib/cmake/
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.6.1-1/lib/cmake
export QT_CMAKE_PREFIX_PATH=/usr/local/qt/5.6.2/clang_64/lib/cmake/
export QT_CMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.6.2/lib/cmake
export QT_CMAKE_PREFIX_PATH=/usr/local/opt/qt5/lib/cmake
####Generating build files
@ -64,7 +64,7 @@ Any variables that need to be set for CMake to find dependencies can be set as E
For example, to pass the QT_CMAKE_PREFIX_PATH variable during build file generation:
cmake .. -DQT_CMAKE_PREFIX_PATH=/usr/local/qt/5.6.1/lib/cmake
cmake .. -DQT_CMAKE_PREFIX_PATH=/usr/local/qt/5.6.2/lib/cmake
####Finding Dependencies

View file

@ -16,16 +16,12 @@ For OpenSSL installed via homebrew, set OPENSSL_ROOT_DIR:
Note that this uses the version from the homebrew formula at the time of this writing, and the version in the path will likely change.
###Qt
You can use the online installer or the offline installer.
Download and install the [Qt 5.6.2 for macOS](http://download.qt.io/official_releases/qt/5.6/5.6.2/qt-opensource-mac-x64-clang-5.6.2.dmg).
* [Download the online installer](https://www.qt.io/download-open-source/#section-2)
* When it asks you to select components, select the following:
* Qt > Qt 5.6
* [Download the offline installer](https://download.qt.io/official_releases/qt/5.6/5.6.1-1/qt-opensource-mac-x64-clang-5.6.1-1.dmg)
Keep the default components checked when going through the installer.
Once Qt is installed, you need to manually configure the following:
* Set the QT_CMAKE_PREFIX_PATH environment variable to your `Qt5.6.1/5.6/clang_64/lib/cmake/` directory.
* Set the QT_CMAKE_PREFIX_PATH environment variable to your `Qt5.6.2/5.6/clang_64/lib/cmake/` directory.
###Xcode
If Xcode is your editor of choice, you can ask CMake to generate Xcode project files instead of Unix Makefiles.

View file

@ -8,23 +8,23 @@ Note: Newer versions of Visual Studio are not yet compatible.
###Step 2. Installing CMake
Download and install the CMake 3.8.0-rc2 "win64-x64 Installer" from the [CMake Website](https://cmake.org/download/). Make sure "Add CMake to system PATH for all users" is checked when going through the installer.
Download and install the [CMake 3.8.0 win64-x64 Installer](https://cmake.org/files/v3.8/cmake-3.8.0-win64-x64.msi). Make sure "Add CMake to system PATH for all users" is checked when going through the installer.
###Step 3. Installing Qt
Download and install the [Qt 5.6.1 Installer](https://download.qt.io/official_releases/qt/5.6/5.6.1-1/qt-opensource-windows-x86-msvc2013_64-5.6.1-1.exe). Please note that the download file is large (850MB) and may take some time.
Download and install the [Qt 5.6.2 for Windows 64-bit (VS 2013)](http://download.qt.io/official_releases/qt/5.6/5.6.2/qt-opensource-windows-x86-msvc2013_64-5.6.2.exe).
Make sure to select all components when going through the installer.
Keep the default components checked when going through the installer.
###Step 4. Setting Qt Environment Variable
Go to "Control Panel > System > Advanced System Settings > Environment Variables > New..." (or search “Environment Variables” in Start Search).
* Set "Variable name": QT_CMAKE_PREFIX_PATH
* Set "Variable value": `C:\Qt\Qt5.6.1\5.6\msvc2013_64\lib\cmake`
* Set "Variable value": `%QT_DIR%\5.6\msvc2013_64\lib\cmake`
###Step 5. Installing OpenSSL
Download and install the "Win64 OpenSSL v1.0.2k" Installer from [this website](https://slproweb.com/products/Win32OpenSSL.html).
Download and install the [Win64 OpenSSL v1.0.2k Installer](https://slproweb.com/download/Win64OpenSSL-1_0_2k.exe).
###Step 6. Running CMake to Generate Build Files
@ -77,5 +77,5 @@ If not, add the directory where nmake is located to the PATH environment variabl
####Qt is throwing an error
Make sure you have the correct version (5.6.1-1) installed and 'QT_CMAKE_PREFIX_PATH' environment variable is set correctly.
Make sure you have the correct version (5.6.2) installed and 'QT_CMAKE_PREFIX_PATH' environment variable is set correctly.

View file

@ -11,12 +11,14 @@
#include "OctreeServer.h"
#include <QJsonDocument>
#include <QJsonObject>
#include <QTimer>
#include <time.h>
#include <AccountManager.h>
#include <Gzip.h>
#include <HTTPConnection.h>
#include <LogHandler.h>
#include <shared/NetworkUtils.h>
@ -924,6 +926,57 @@ void OctreeServer::handleJurisdictionRequestPacket(QSharedPointer<ReceivedMessag
_jurisdictionSender->queueReceivedPacket(message, senderNode);
}
void OctreeServer::handleOctreeFileReplacement(QSharedPointer<ReceivedMessage> message) {
if (!_isFinished && !_isShuttingDown) {
// these messages are only allowed to come from the domain server, so make sure that is the case
auto nodeList = DependencyManager::get<NodeList>();
if (message->getSenderSockAddr() == nodeList->getDomainHandler().getSockAddr()) {
// it's far cleaner to load up the new content upon server startup
// so here we just store a special file at our persist path
// and then force a stop of the server so that it can pick it up when it relaunches
if (!_persistAbsoluteFilePath.isEmpty()) {
// before we restart the server and make it try and load this data, let's make sure it is valid
auto compressedOctree = message->getMessage();
QByteArray jsonOctree;
// assume we have GZipped content
bool wasCompressed = gunzip(compressedOctree, jsonOctree);
if (!wasCompressed) {
// the source was not compressed, assume we were sent regular JSON data
jsonOctree = compressedOctree;
}
// check the JSON data to verify it is an object
if (QJsonDocument::fromJson(jsonOctree).isObject()) {
if (!wasCompressed) {
// source was not compressed, we compress it before we write it locally
gzip(jsonOctree, compressedOctree);
}
// write the compressed octree data to a special file
auto replacementFilePath = _persistAbsoluteFilePath.append(OctreePersistThread::REPLACEMENT_FILE_EXTENSION);
QFile replacementFile(replacementFilePath);
if (replacementFile.open(QIODevice::WriteOnly) && replacementFile.write(compressedOctree) != -1) {
// we've now written our replacement file, time to take the server down so it can
// process it when it comes back up
qInfo() << "Wrote octree replacement file to" << replacementFilePath << "- stopping server";
setFinished(true);
} else {
qWarning() << "Could not write replacement octree data to file - refusing to process";
}
} else {
qDebug() << "Received replacement octree file that is invalid - refusing to process";
}
} else {
qDebug() << "Cannot perform octree file replacement since current persist file path is not yet known";
}
} else {
qDebug() << "Received an octree file replacement that was not from our domain server - refusing to process";
}
}
}
bool OctreeServer::readOptionBool(const QString& optionName, const QJsonObject& settingsSectionObject, bool& result) {
result = false; // assume it doesn't exist
bool optionAvailable = false;
@ -1148,6 +1201,7 @@ void OctreeServer::domainSettingsRequestComplete() {
packetReceiver.registerListener(getMyQueryMessageType(), this, "handleOctreeQueryPacket");
packetReceiver.registerListener(PacketType::OctreeDataNack, this, "handleOctreeDataNackPacket");
packetReceiver.registerListener(PacketType::JurisdictionRequest, this, "handleJurisdictionRequestPacket");
packetReceiver.registerListener(PacketType::OctreeFileReplacement, this, "handleOctreeFileReplacement");
readConfiguration();
@ -1173,25 +1227,25 @@ void OctreeServer::domainSettingsRequestComplete() {
// If persist filename does not exist, let's see if there is one beside the application binary
// If there is, let's copy it over to our target persist directory
QDir persistPath { _persistFilePath };
QString persistAbsoluteFilePath = persistPath.absolutePath();
_persistAbsoluteFilePath = persistPath.absolutePath();
if (persistPath.isRelative()) {
// if the domain settings passed us a relative path, make an absolute path that is relative to the
// default data directory
persistAbsoluteFilePath = QDir(PathUtils::getAppDataFilePath("entities/")).absoluteFilePath(_persistFilePath);
_persistAbsoluteFilePath = QDir(PathUtils::getAppDataFilePath("entities/")).absoluteFilePath(_persistFilePath);
}
static const QString ENTITY_PERSIST_EXTENSION = ".json.gz";
// force the persist file to end with .json.gz
if (!persistAbsoluteFilePath.endsWith(ENTITY_PERSIST_EXTENSION, Qt::CaseInsensitive)) {
persistAbsoluteFilePath += ENTITY_PERSIST_EXTENSION;
if (!_persistAbsoluteFilePath.endsWith(ENTITY_PERSIST_EXTENSION, Qt::CaseInsensitive)) {
_persistAbsoluteFilePath += ENTITY_PERSIST_EXTENSION;
} else {
// make sure the casing of .json.gz is correct
persistAbsoluteFilePath.replace(ENTITY_PERSIST_EXTENSION, ENTITY_PERSIST_EXTENSION, Qt::CaseInsensitive);
_persistAbsoluteFilePath.replace(ENTITY_PERSIST_EXTENSION, ENTITY_PERSIST_EXTENSION, Qt::CaseInsensitive);
}
if (!QFile::exists(persistAbsoluteFilePath)) {
if (!QFile::exists(_persistAbsoluteFilePath)) {
qDebug() << "Persist file does not exist, checking for existence of persist file next to application";
static const QString OLD_DEFAULT_PERSIST_FILENAME = "resources/models.json.gz";
@ -1217,7 +1271,7 @@ void OctreeServer::domainSettingsRequestComplete() {
pathToCopyFrom = oldDefaultPersistPath;
}
QDir persistFileDirectory { QDir::cleanPath(persistAbsoluteFilePath + "/..") };
QDir persistFileDirectory { QDir::cleanPath(_persistAbsoluteFilePath + "/..") };
if (!persistFileDirectory.exists()) {
qDebug() << "Creating data directory " << persistFileDirectory.absolutePath();
@ -1225,15 +1279,15 @@ void OctreeServer::domainSettingsRequestComplete() {
}
if (shouldCopy) {
qDebug() << "Old persist file found, copying from " << pathToCopyFrom << " to " << persistAbsoluteFilePath;
qDebug() << "Old persist file found, copying from " << pathToCopyFrom << " to " << _persistAbsoluteFilePath;
QFile::copy(pathToCopyFrom, persistAbsoluteFilePath);
QFile::copy(pathToCopyFrom, _persistAbsoluteFilePath);
} else {
qDebug() << "No existing persist file found";
}
}
auto persistFileDirectory = QFileInfo(persistAbsoluteFilePath).absolutePath();
auto persistFileDirectory = QFileInfo(_persistAbsoluteFilePath).absolutePath();
if (_backupDirectoryPath.isEmpty()) {
// Use the persist file's directory to store backups
_backupDirectoryPath = persistFileDirectory;
@ -1264,7 +1318,7 @@ void OctreeServer::domainSettingsRequestComplete() {
qDebug() << "Backups will be stored in: " << _backupDirectoryPath;
// now set up PersistThread
_persistThread = new OctreePersistThread(_tree, persistAbsoluteFilePath, _backupDirectoryPath, _persistInterval,
_persistThread = new OctreePersistThread(_tree, _persistAbsoluteFilePath, _backupDirectoryPath, _persistInterval,
_wantBackup, _settings, _debugTimestampNow, _persistAsFileType);
_persistThread->initialize(true);
}

View file

@ -136,6 +136,7 @@ private slots:
void handleOctreeQueryPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleOctreeDataNackPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleJurisdictionRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleOctreeFileReplacement(QSharedPointer<ReceivedMessage> message);
void removeSendThread();
protected:
@ -172,6 +173,7 @@ protected:
QString _statusHost;
QString _persistFilePath;
QString _persistAbsoluteFilePath;
QString _persistAsFileType;
QString _backupDirectoryPath;
int _packetsPerClientPerInterval;

View file

@ -1,47 +0,0 @@
set(EXTERNAL_NAME faceshift)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://hifi-public.s3.amazonaws.com/dependencies/faceshift.zip
CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
LOG_BUILD 1
)
# URL_MD5 1bdcb8a0b8d5b1ede434cc41efade41d
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${INSTALL_DIR}/include CACHE FILEPATH "Path to Faceshift include directory")
set(LIBRARY_DEBUG_PATH "lib/Debug")
set(LIBRARY_RELEASE_PATH "lib/Release")
if (WIN32)
set(LIBRARY_PREFIX "")
set(LIBRARY_EXT "lib")
# use selected configuration in release path when building on Windows
set(LIBRARY_RELEASE_PATH "$<$<CONFIG:RelWithDebInfo>:build/RelWithDebInfo>")
set(LIBRARY_RELEASE_PATH "${LIBRARY_RELEASE_PATH}$<$<CONFIG:MinSizeRel>:build/MinSizeRel>")
set(LIBRARY_RELEASE_PATH "${LIBRARY_RELEASE_PATH}$<$<OR:$<CONFIG:Release>,$<CONFIG:Debug>>:lib/Release>")
elseif (APPLE)
set(LIBRARY_EXT "a")
set(LIBRARY_PREFIX "lib")
if (CMAKE_GENERATOR STREQUAL "Unix Makefiles")
set(LIBRARY_DEBUG_PATH "build")
set(LIBRARY_RELEASE_PATH "build")
endif ()
endif()
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG
${INSTALL_DIR}/${LIBRARY_DEBUG_PATH}/${LIBRARY_PREFIX}faceshift.${LIBRARY_EXT} CACHE FILEPATH "Faceshift libraries")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE
${INSTALL_DIR}/${LIBRARY_RELEASE_PATH}/${LIBRARY_PREFIX}faceshift.${LIBRARY_EXT} CACHE FILEPATH "Faceshift libraries")

View file

@ -1,14 +0,0 @@
#
# Copyright 2015 High Fidelity, Inc.
# Created by Bradley Austin Davis on 2015/10/10
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(TARGET_FACESHIFT)
add_dependency_external_projects(faceshift)
find_package(Faceshift REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${FACESHIFT_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${FACESHIFT_LIBRARIES})
add_definitions(-DHAVE_FACESHIFT)
endmacro()

View file

@ -1,26 +0,0 @@
#
# FindFaceshift.cmake
#
# Try to find the Faceshift networking library
#
# You must provide a FACESHIFT_ROOT_DIR which contains lib and include directories
#
# Once done this will define
#
# FACESHIFT_FOUND - system found Faceshift
# FACESHIFT_INCLUDE_DIRS - the Faceshift include directory
# FACESHIFT_LIBRARIES - Link this to use Faceshift
#
# Created on 8/30/2013 by Andrzej Kapolka
# Copyright 2013 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
include(SelectLibraryConfigurations)
select_library_configurations(FACESHIFT)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Faceshift DEFAULT_MSG FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES)
mark_as_advanced(FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES FACESHIFT_SEARCH_DIRS)

View file

@ -0,0 +1,44 @@
<!--#include virtual="header.html"-->
<div class="col-md-10 col-md-offset-1">
<div class="row">
<div class="col-xs-12">
<div class="alert" style="display:none;"></div>
</div>
</div>
<div class="row">
<div class="col-xs-12">
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title">Upload Entities File</h3>
</div>
<form id="upload-form" action="upload" enctype="multipart/form-data" method="post">
<div class="panel-body">
<p>
Upload an entities file (e.g.: models.json.gz) to replace the content of this domain.<br>
Note: <strong>Your domain's content will be replaced by the content you upload</strong>, but the backup files of your domain's content will not immediately be changed.
</p>
<p>
If your domain has any content that you would like to re-use at a later date, save a manual backup of your models.json.gz file, which is usually stored at the following paths:<br>
<pre>C:\Users\[username]\AppData\Roaming\High Fidelity\assignment-client/entities/models.json.gz</pre>
<pre>/Users/[username]/Library/Application Support/High Fidelity/assignment-client/entities/models.json.gz</pre>
<pre>/home/[username]/.local/share/High Fidelity/assignment-client/entities/models.json.gz</pre>
</p>
<br>
<input type="file" name="entities-file" class="form-control-file" accept=".json, .gz">
<br>
</div>
<div class="panel-footer">
<input type="submit" class="btn btn-info" value="Upload">
</div>
</form>
</div>
</div>
</div>
</div>
<!--#include virtual="footer.html"-->
<script src='js/content.js'></script>
<script src='/js/sweetalert.min.js'></script>
<!--#include virtual="page-end.html"-->

View file

@ -0,0 +1,45 @@
$(document).ready(function(){
function showSpinnerAlert(title) {
swal({
title: title,
text: '<div class="spinner" style="color:black;"><div class="bounce1"></div><div class="bounce2"></div><div class="bounce3"></div></div>',
html: true,
showConfirmButton: false,
allowEscapeKey: false
});
}
var frm = $('#upload-form');
frm.submit(function (ev) {
$.ajax({
type: frm.attr('method'),
url: frm.attr('action'),
data: new FormData($(this)[0]),
cache: false,
contentType: false,
processData: false,
success: function (data) {
swal({
title: 'Uploaded',
type: 'success',
text: 'Your Entity Server is restarting to replace its local content with the uploaded file.',
confirmButtonText: 'OK'
})
},
error: function (data) {
swal({
title: '',
type: 'error',
text: 'Your entities file could not be transferred to the Entity Server.</br>Verify that the file is a <i>.json</i> or <i>.json.gz</i> entities file and try again.',
html: true,
confirmButtonText: 'OK',
});
}
});
ev.preventDefault();
showSpinnerAlert("Uploading Entities File");
});
});

View file

@ -36,6 +36,7 @@
<li><a href="/assignment">New Assignment</a></li>
</ul>
</li>
<li><a href="/content/">Content</a></li>
<li><a href="/settings/">Settings</a></li>
</ul>
</div>

View file

@ -99,7 +99,7 @@
<script src='/js/underscore-keypath.min.js'></script>
<script src='/js/bootbox.min.js'></script>
<script src='js/bootstrap-switch.min.js'></script>
<script src='js/sweetalert.min.js'></script>
<script src='/js/sweetalert.min.js'></script>
<script src='js/settings.js'></script>
<script src='js/form2js.min.js'></script>
<script src='js/sha256.js'></script>

View file

@ -1633,6 +1633,15 @@ QString pathForAssignmentScript(const QUuid& assignmentUUID) {
return directory.absoluteFilePath(uuidStringWithoutCurlyBraces(assignmentUUID));
}
QString DomainServer::pathForRedirect(QString path) const {
// make sure the passed path has a leading slash
if (!path.startsWith('/')) {
path.insert(0, '/');
}
return "http://" + _hostname + ":" + QString::number(_httpManager.serverPort()) + path;
}
const QString URI_OAUTH = "/oauth";
bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url, bool skipSubHandler) {
const QString JSON_MIME_TYPE = "application/json";
@ -1640,6 +1649,7 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
const QString URI_ASSIGNMENT = "/assignment";
const QString URI_NODES = "/nodes";
const QString URI_SETTINGS = "/settings";
const QString URI_ENTITY_FILE_UPLOAD = "/content/upload";
const QString UUID_REGEX_STRING = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}";
@ -1869,6 +1879,25 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
// respond with a 200 code for successful upload
connection->respond(HTTPConnection::StatusCode200);
return true;
} else if (url.path() == URI_ENTITY_FILE_UPLOAD) {
// this is an entity file upload, ask the HTTPConnection to parse the data
QList<FormData> formData = connection->parseFormData();
Headers redirectHeaders;
if (formData.size() > 0 && formData[0].second.size() > 0) {
// invoke our method to hand the new octree file off to the octree server
QMetaObject::invokeMethod(this, "handleOctreeFileReplacement",
Qt::QueuedConnection, Q_ARG(QByteArray, formData[0].second));
// respond with a 200 for success
connection->respond(HTTPConnection::StatusCode200);
} else {
// respond with a 400 for failure
connection->respond(HTTPConnection::StatusCode400);
}
return true;
}
} else if (connection->requestOperation() == QNetworkAccessManager::DeleteOperation) {
@ -2159,8 +2188,7 @@ Headers DomainServer::setupCookieHeadersFromProfileReply(QNetworkReply* profileR
cookieHeaders.insert("Set-Cookie", cookieString.toUtf8());
// redirect the user back to the homepage so they can present their cookie and be authenticated
QString redirectString = "http://" + _hostname + ":" + QString::number(_httpManager.serverPort());
cookieHeaders.insert("Location", redirectString.toUtf8());
cookieHeaders.insert("Location", pathForRedirect().toUtf8());
return cookieHeaders;
}
@ -2560,3 +2588,20 @@ void DomainServer::setupGroupCacheRefresh() {
_metaverseGroupCacheTimer->start(REFRESH_GROUPS_INTERVAL_MSECS);
}
}
void DomainServer::handleOctreeFileReplacement(QByteArray octreeFile) {
// enumerate the nodes and find any octree type servers with active sockets
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
limitedNodeList->eachMatchingNode([](const SharedNodePointer& node) {
return node->getType() == NodeType::EntityServer && node->getActiveSocket();
}, [&octreeFile, limitedNodeList](const SharedNodePointer& octreeNode) {
// setup a packet to send to this octree server with the new octree file data
auto octreeFilePacketList = NLPacketList::create(PacketType::OctreeFileReplacement, QByteArray(), true, true);
octreeFilePacketList->write(octreeFile);
qDebug() << "Sending an octree file replacement of" << octreeFile.size() << "bytes to" << octreeNode;
limitedNodeList->sendPacketList(std::move(octreeFilePacketList), *octreeNode);
});
}

View file

@ -100,6 +100,8 @@ private slots:
void handleSuccessfulICEServerAddressUpdate(QNetworkReply& requestReply);
void handleFailedICEServerAddressUpdate(QNetworkReply& requestReply);
void handleOctreeFileReplacement(QByteArray octreeFile);
signals:
void iceServerChanged();
void userConnected();
@ -161,6 +163,8 @@ private:
void setupGroupCacheRefresh();
QString pathForRedirect(QString path = QString()) const;
SubnetList _acSubnetWhitelist;
DomainGatekeeper _gatekeeper;

View file

@ -194,7 +194,7 @@ link_hifi_libraries(
recording fbx networking model-networking entities avatars
audio audio-client animation script-engine physics
render-utils entities-renderer avatars-renderer ui auto-updater
controllers plugins image
controllers plugins image trackers
ui-plugins display-plugins input-plugins
${NON_ANDROID_LIBRARIES}
)
@ -202,7 +202,6 @@ link_hifi_libraries(
# include the binary directory of render-utils for shader includes
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_BINARY_DIR}/libraries/render-utils")
#fixme find a way to express faceshift as a plugin
target_bullet()
target_opengl()
@ -210,10 +209,6 @@ if (NOT ANDROID)
target_glew()
endif ()
if (WIN32 OR APPLE)
target_faceshift()
endif()
# perform standard include and linking for found externals
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})

View file

@ -50,6 +50,12 @@
"type": "inverseKinematics",
"data": {
"targets": [
{
"jointName": "Hips",
"positionVar": "hipsPosition",
"rotationVar": "hipsRotation",
"typeVar": "hipsType"
},
{
"jointName": "RightHand",
"positionVar": "rightHandPosition",
@ -75,10 +81,10 @@
"typeVar": "leftFootType"
},
{
"jointName": "Neck",
"positionVar": "neckPosition",
"rotationVar": "neckRotation",
"typeVar": "neckType"
"jointName": "Spine2",
"positionVar": "spine2Position",
"rotationVar": "spine2Rotation",
"typeVar": "spine2Type"
},
{
"jointName": "Head",
@ -91,20 +97,27 @@
"children": []
},
{
"id": "manipulatorOverlay",
"id": "hipsManipulatorOverlay",
"type": "overlay",
"data": {
"alpha": 1.0,
"boneSet": "spineOnly"
"alpha": 0.0,
"boneSet": "hipsOnly"
},
"children": [
{
"id": "spineLean",
"id": "hipsManipulator",
"type": "manipulator",
"data": {
"alpha": 0.0,
"alphaVar": "hipsManipulatorAlpha",
"joints": [
{ "type": "absoluteRotation", "jointName": "Spine", "var": "lean" }
{
"jointName": "Hips",
"rotationType": "absolute",
"translationType": "absolute",
"rotationVar": "hipsManipulatorRotation",
"translationVar": "hipsManipulatorPosition"
}
]
},
"children": []

View file

@ -61,6 +61,11 @@
{ "from": "Standard.RightHand", "to": "Actions.RightHand" },
{ "from": "Standard.LeftFoot", "to": "Actions.LeftFoot" },
{ "from": "Standard.RightFoot", "to": "Actions.RightFoot" }
{ "from": "Standard.RightFoot", "to": "Actions.RightFoot" },
{ "from": "Standard.Hips", "to": "Actions.Hips" },
{ "from": "Standard.Spine2", "to": "Actions.Spine2" },
{ "from": "Standard.Head", "to": "Actions.Head" }
]
}

View file

@ -19,7 +19,7 @@
function shouldRaiseKeyboard() {
var nodeName = document.activeElement.nodeName;
var nodeType = document.activeElement.type;
if (nodeName === "INPUT" && (nodeType === "text" || nodeType === "number" || nodeType === "password")
if (nodeName === "INPUT" && ["email", "number", "password", "tel", "text", "url"].indexOf(nodeType) !== -1
|| document.activeElement.nodeName === "TEXTAREA") {
return true;
} else {

View file

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
.st1{fill:#EF3B4E;}
</style>
<g id="Layer_2">
</g>
<g id="Layer_1_1_">
<path class="st0" d="M47.2,41.3l-9.1-9.1c-0.8-0.8-1.9-1.1-3-1l-2.4-2.4c1.8-2.6,2.8-5.7,2.8-9c0-8.9-7.2-16.1-16.1-16.1
S3.3,11,3.3,19.8c0,8.9,7.2,16.1,16.1,16.1c4.1,0,7.8-1.5,10.6-4l2.2,2.2c-0.2,1.1,0.1,2.2,1,3l9.1,9.1c1.4,1.4,3.6,1.4,4.9,0
C48.5,44.9,48.5,42.7,47.2,41.3z M19.4,32.2c-6.8,0-12.3-5.5-12.3-12.3S12.6,7.6,19.4,7.6s12.3,5.5,12.3,12.3
C31.8,26.6,26.2,32.2,19.4,32.2z"/>
</g>
<circle class="st1" cx="43.5" cy="6.5" r="5.9"/>
</svg>

After

(image error) Size: 911 B

View file

@ -0,0 +1,132 @@
import QtQuick 2.5
import QtWebEngine 1.1
import QtWebChannel 1.0
import "../controls-uit" as HiFiControls
import HFTabletWebEngineProfile 1.0
Item {
property alias url: root.url
property alias scriptURL: root.userScriptUrl
property alias eventBridge: eventBridgeWrapper.eventBridge
property alias canGoBack: root.canGoBack;
property var goBack: root.goBack;
property alias urlTag: root.urlTag
property bool keyboardEnabled: true // FIXME - Keyboard HMD only: Default to false
property bool keyboardRaised: false
property bool punctuationMode: false
// FIXME - Keyboard HMD only: Make Interface either set keyboardRaised property directly in OffscreenQmlSurface
// or provide HMDinfo object to QML in RenderableWebEntityItem and do the following.
/*
onKeyboardRaisedChanged: {
keyboardEnabled = HMDinfo.active;
}
*/
QtObject {
id: eventBridgeWrapper
WebChannel.id: "eventBridgeWrapper"
property var eventBridge;
}
property alias viewProfile: root.profile
WebEngineView {
id: root
objectName: "webEngineView"
x: 0
y: 0
width: parent.width
height: keyboardEnabled && keyboardRaised ? parent.height - keyboard.height : parent.height
profile: HFTabletWebEngineProfile {
id: webviewProfile
storageName: "qmlTabletWebEngine"
}
property string userScriptUrl: ""
// creates a global EventBridge object.
WebEngineScript {
id: createGlobalEventBridge
sourceCode: eventBridgeJavaScriptToInject
injectionPoint: WebEngineScript.DocumentCreation
worldId: WebEngineScript.MainWorld
}
// detects when to raise and lower virtual keyboard
WebEngineScript {
id: raiseAndLowerKeyboard
injectionPoint: WebEngineScript.Deferred
sourceUrl: resourceDirectoryUrl + "/html/raiseAndLowerKeyboard.js"
worldId: WebEngineScript.MainWorld
}
// User script.
WebEngineScript {
id: userScript
sourceUrl: root.userScriptUrl
injectionPoint: WebEngineScript.DocumentReady // DOM ready but page load may not be finished.
worldId: WebEngineScript.MainWorld
}
property string urlTag: "noDownload=false";
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard, userScript ]
property string newUrl: ""
webChannel.registeredObjects: [eventBridgeWrapper]
Component.onCompleted: {
// Ensure the JS from the web-engine makes it to our logging
root.javaScriptConsoleMessage.connect(function(level, message, lineNumber, sourceID) {
console.log("Web Entity JS message: " + sourceID + " " + lineNumber + " " + message);
});
root.profile.httpUserAgent = "Mozilla/5.0 Chrome (HighFidelityInterface)";
}
onFeaturePermissionRequested: {
grantFeaturePermission(securityOrigin, feature, true);
}
onLoadingChanged: {
keyboardRaised = false;
punctuationMode = false;
keyboard.resetShiftMode(false);
// Required to support clicking on "hifi://" links
if (WebEngineView.LoadStartedStatus == loadRequest.status) {
var url = loadRequest.url.toString();
url = (url.indexOf("?") >= 0) ? url + urlTag : url + "?" + urlTag;
if (urlHandler.canHandleUrl(url)) {
if (urlHandler.handleUrl(url)) {
root.stop();
}
}
}
}
onNewViewRequested:{
// desktop is not defined for web-entities or tablet
if (typeof desktop !== "undefined") {
desktop.openBrowserWindow(request, profile);
} else {
tabletRoot.openBrowserWindow(request, profile);
}
}
}
HiFiControls.Keyboard {
id: keyboard
raised: parent.keyboardEnabled && parent.keyboardRaised
numeric: parent.punctuationMode
anchors {
left: parent.left
right: parent.right
bottom: parent.bottom
}
}
}

View file

@ -23,8 +23,13 @@ Item {
property bool keyboardRaised: false
property bool punctuationMode: false
property bool isDesktop: false
property bool removingPage: false
property bool loadingPage: false
property string initialPage: ""
property bool startingUp: true
property alias webView: webview
property alias profile: webview.profile
property bool remove: false
property var urlList: []
property var forwardList: []
property int currentPage: -1 // used as a model for repeater
@ -76,11 +81,21 @@ Item {
id: displayUrl
color: hifi.colors.baseGray
font.pixelSize: 12
verticalAlignment: Text.AlignLeft
anchors {
top: nav.bottom
horizontalCenter: parent.horizontalCenter;
left: parent.left
leftMargin: 20
}
}
MouseArea {
anchors.fill: parent
preventStealing: true
propagateComposedEvents: true
}
}
ListModel {
@ -96,20 +111,24 @@ Item {
}
function goBack() {
if (webview.canGoBack && !isUrlLoaded(webview.url)) {
if (currentPage > 0) {
removingPage = true;
pagesModel.remove(currentPage);
}
if (webview.canGoBack) {
forwardList.push(webview.url);
webview.goBack();
} else if (currentPage > 0) {
removingPage = true;
pagesModel.remove(currentPage);
} else if (web.urlList.length > 0) {
var url = web.urlList.pop();
loadUrl(url);
} else if (web.forwardList.length > 0) {
var url = web.forwardList.pop();
loadUrl(url);
web.forwardList = [];
}
}
function closeWebEngine() {
if (remove) {
web.destroy();
return;
}
if (parentStackItem) {
parentStackItem.pop();
} else {
@ -137,32 +156,42 @@ Item {
view.setEnabled(true);
}
function loadUrl(url) {
webview.url = url
web.url = webview.url;
web.address = webview.url;
}
function onInitialPage(url) {
return (url === webview.url);
}
function urlAppend(url) {
if (removingPage) {
removingPage = false;
return;
}
var lurl = decodeURIComponent(url)
if (lurl[lurl.length - 1] !== "/") {
lurl = lurl + "/"
}
if (currentPage === -1 || (pagesModel.get(currentPage).webUrl !== lurl && !timer.running)) {
timer.start();
pagesModel.append({webUrl: lurl});
}
web.urlList.push(url);
setBackButtonStatus();
}
onCurrentPageChanged: {
if (currentPage >= 0 && currentPage < pagesModel.count) {
timer.start();
webview.url = pagesModel.get(currentPage).webUrl;
web.url = webview.url;
web.address = webview.url;
function setBackButtonStatus() {
if (web.urlList.length > 0 || webview.canGoBack) {
back.enabledColor = hifi.colors.darkGray;
back.enabled = true;
} else {
back.enabledColor = hifi.colors.baseGray;
back.enabled = false;
}
}
onUrlChanged: {
gotoPage(url)
loadUrl(url);
if (startingUp) {
web.initialPage = webview.url;
startingUp = false;
}
}
QtObject {
@ -170,18 +199,7 @@ Item {
WebChannel.id: "eventBridgeWrapper"
property var eventBridge;
}
Timer {
id: timer
interval: 200
running: false
repeat: false
onTriggered: timer.stop();
}
WebEngineView {
id: webview
objectName: "webEngineView"
@ -221,6 +239,7 @@ Item {
worldId: WebEngineScript.MainWorld
}
property string urlTag: "noDownload=false";
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard, userScript ]
property string newUrl: ""
@ -247,9 +266,7 @@ Item {
keyboard.resetShiftMode(false);
// Required to support clicking on "hifi://" links
if (WebEngineView.LoadStartedStatus == loadRequest.status) {
urlAppend(loadRequest.url.toString());
loadingPage = true;
var url = loadRequest.url.toString();
var url = loadRequest.url.toString();
if (urlHandler.canHandleUrl(url)) {
if (urlHandler.handleUrl(url)) {
root.stop();
@ -260,9 +277,19 @@ Item {
if (WebEngineView.LoadFailedStatus == loadRequest.status) {
console.log(" Tablet WebEngineView failed to laod url: " + loadRequest.url.toString());
}
}
if (WebEngineView.LoadSucceededStatus == loadRequest.status) {
web.address = webview.url;
if (startingUp) {
web.initialPage = webview.url;
startingUp = false;
}
}
}
onNewViewRequested: {
var currentUrl = webview.url;
urlAppend(currentUrl);
request.openIn(webview);
}
}

View file

@ -113,7 +113,7 @@ Item {
if (typeof desktop !== "undefined") {
desktop.openBrowserWindow(request, profile);
} else {
console.log("onNewViewRequested: desktop not defined");
tabletRoot.openBrowserWindow(request, profile);
}
}
}

View file

@ -17,7 +17,7 @@ import QtGraphicalEffects 1.0
import "toolbars"
import "../styles-uit"
Rectangle {
Item {
id: root;
property string userName: "";
property string placeName: "";
@ -31,10 +31,11 @@ Rectangle {
property bool drillDownToPlace: false;
property bool showPlace: isConcurrency;
property string messageColor: hifi.colors.blueAccent;
property string messageColor: isAnnouncement ? "white" : hifi.colors.blueAccent;
property string timePhrase: pastTime(timestamp);
property int onlineUsers: 0;
property bool isConcurrency: action === 'concurrency';
property bool isAnnouncement: action === 'announcement';
property bool isStacked: !isConcurrency && drillDownToPlace;
property int textPadding: 10;
@ -44,7 +45,7 @@ Rectangle {
property int textSizeSmall: 18;
property int stackShadowNarrowing: 5;
property string defaultThumbnail: Qt.resolvedUrl("../../images/default-domain.gif");
property int shadowHeight: 20;
property int shadowHeight: 10;
HifiConstants { id: hifi }
function pastTime(timestamp) { // Answer a descriptive string
@ -69,6 +70,44 @@ Rectangle {
}
property bool hasGif: imageUrl.indexOf('.gif') === (imageUrl.length - 4);
function pluralize(count, singular, optionalPlural) {
return (count === 1) ? singular : (optionalPlural || (singular + "s"));
}
DropShadow {
visible: isStacked;
anchors.fill: shadow1;
source: shadow1;
verticalOffset: 2;
radius: 4;
samples: 9;
color: hifi.colors.baseGrayShadow;
}
Rectangle {
id: shadow1;
visible: isStacked;
width: parent.width - stackShadowNarrowing;
height: shadowHeight;
anchors {
top: parent.bottom;
horizontalCenter: parent.horizontalCenter;
}
}
DropShadow {
anchors.fill: base;
source: base;
verticalOffset: 2;
radius: 4;
samples: 9;
color: hifi.colors.baseGrayShadow;
}
Rectangle {
id: base;
color: "white";
anchors.fill: parent;
}
AnimatedImage {
id: animation;
// Always visible, to drive loading, but initially covered up by lobby during load.
@ -80,7 +119,7 @@ Rectangle {
id: lobby;
visible: !hasGif || (animation.status !== Image.Ready);
width: parent.width - (isConcurrency ? 0 : (2 * smallMargin));
height: parent.height - (isConcurrency ? 0 : smallMargin);
height: parent.height -(isAnnouncement ? smallMargin : messageHeight) - (isConcurrency ? 0 : smallMargin);
source: thumbnail || defaultThumbnail;
fillMode: Image.PreserveAspectCrop;
anchors {
@ -95,41 +134,13 @@ Rectangle {
}
}
}
Rectangle {
id: shadow1;
visible: isStacked;
width: parent.width - stackShadowNarrowing;
height: shadowHeight / 2;
anchors {
top: parent.bottom;
horizontalCenter: parent.horizontalCenter;
}
gradient: Gradient {
GradientStop { position: 0.0; color: "gray" }
GradientStop { position: 1.0; color: "white" }
}
}
Rectangle {
id: shadow2;
visible: isStacked;
width: shadow1.width - stackShadowNarrowing;
height: shadowHeight / 2;
anchors {
top: shadow1.bottom;
horizontalCenter: parent.horizontalCenter;
}
gradient: Gradient {
GradientStop { position: 0.0; color: "gray" }
GradientStop { position: 1.0; color: "white" }
}
}
property int dropHorizontalOffset: 0;
property int dropVerticalOffset: 1;
property int dropRadius: 2;
property int dropSamples: 9;
property int dropSpread: 0;
DropShadow {
visible: true;
visible: showPlace; // Do we have to check for whatever the modern equivalent is for desktop.gradientsSupported?
source: place;
anchors.fill: place;
horizontalOffset: dropHorizontalOffset;
@ -139,12 +150,12 @@ Rectangle {
color: hifi.colors.black;
spread: dropSpread;
}
RalewayLight {
RalewaySemiBold {
id: place;
visible: showPlace;
text: placeName;
color: hifi.colors.white;
size: 38;
size: textSize;
elide: Text.ElideRight; // requires constrained width
anchors {
top: parent.top;
@ -154,56 +165,68 @@ Rectangle {
}
}
Rectangle {
id: rectRow
z: 1
width: message.width + (users.visible ? users.width + bottomRow.spacing : 0)
+ (icon.visible ? icon.width + bottomRow.spacing: 0) + bottomRow.spacing;
height: messageHeight + 1;
radius: 25
anchors {
bottom: parent.bottom
left: parent.left
leftMargin: textPadding
bottomMargin: textPadding
id: lozenge;
visible: isAnnouncement;
color: hifi.colors.redHighlight;
anchors.fill: infoRow;
radius: lozenge.height / 2.0;
border.width: lozengeHot.containsMouse ? 4 : 0;
border.color: "white";
}
Row {
id: infoRow;
Image {
id: icon;
source: isAnnouncement ? "../../images/Announce-Blast.svg" : "../../images/snap-icon.svg";
width: 40;
height: 40;
visible: ((action === 'snapshot') || isAnnouncement) && (messageHeight >= 40);
}
Row {
id: bottomRow
FiraSansRegular {
id: users;
visible: isConcurrency;
text: onlineUsers;
size: textSize;
color: messageColor;
anchors.verticalCenter: message.verticalCenter;
}
Image {
id: icon;
source: "../../images/snap-icon.svg"
width: 40;
height: 40;
visible: action === 'snapshot';
}
RalewayRegular {
id: message;
text: isConcurrency ? ((onlineUsers === 1) ? "person" : "people") : (drillDownToPlace ? "snapshots" : ("by " + userName));
size: textSizeSmall;
color: messageColor;
elide: Text.ElideRight; // requires a width to be specified`
anchors {
bottom: parent.bottom;
bottomMargin: parent.spacing;
}
}
spacing: textPadding;
height: messageHeight;
FiraSansRegular {
id: users;
visible: isConcurrency || isAnnouncement;
text: onlineUsers;
size: textSize;
color: messageColor;
anchors.verticalCenter: message.verticalCenter;
}
RalewayRegular {
id: message;
visible: !isAnnouncement;
text: isConcurrency ? pluralize(onlineUsers, "person", "people") : (drillDownToPlace ? "snapshots" : ("by " + userName));
size: textSizeSmall;
color: messageColor;
elide: Text.ElideRight; // requires a width to be specified`
width: root.width - textPadding
- (icon.visible ? icon.width + parent.spacing : 0)
- (users.visible ? users.width + parent.spacing : 0)
- (actionIcon.width + (2 * smallMargin));
anchors {
bottom: parent.bottom;
left: parent.left;
leftMargin: 4
bottomMargin: parent.spacing;
}
}
Column {
visible: isAnnouncement;
RalewayRegular {
text: pluralize(onlineUsers, "connection") + " "; // hack padding
size: textSizeSmall;
color: messageColor;
}
RalewayRegular {
text: pluralize(onlineUsers, "is here now", "are here now");
size: textSizeSmall * 0.7;
color: messageColor;
}
}
spacing: textPadding;
height: messageHeight;
anchors {
bottom: parent.bottom;
left: parent.left;
leftMargin: textPadding;
bottomMargin: isAnnouncement ? textPadding : 0;
}
}
// These two can be supplied to provide hover behavior.
// For example, AddressBarDialog provides functions that set the current list view item
@ -218,39 +241,37 @@ Rectangle {
onEntered: hoverThunk();
onExited: unhoverThunk();
}
Rectangle {
id: rectIcon
z: 1
width: 32
height: 32
radius: 15
StateImage {
id: actionIcon;
visible: !isAnnouncement;
imageURL: "../../images/info-icon-2-state.svg";
size: 30;
buttonState: messageArea.containsMouse ? 1 : 0;
anchors {
bottom: parent.bottom;
right: parent.right;
bottomMargin: textPadding;
rightMargin: textPadding;
}
StateImage {
id: actionIcon;
imageURL: "../../images/info-icon-2-state.svg";
size: 32;
buttonState: messageArea.containsMouse ? 1 : 0;
anchors {
bottom: parent.bottom;
right: parent.right;
//margins: smallMargin;
}
margins: smallMargin;
}
}
function go() {
goFunction(drillDownToPlace ? ("/places/" + placeName) : ("/user_stories/" + storyId));
}
MouseArea {
id: messageArea;
width: rectIcon.width;
height: rectIcon.height;
anchors.fill: rectIcon
visible: !isAnnouncement;
width: parent.width;
height: messageHeight;
anchors.top: lobby.bottom;
acceptedButtons: Qt.LeftButton;
onClicked: goFunction(drillDownToPlace ? ("/places/" + placeName) : ("/user_stories/" + storyId));
onClicked: go();
hoverEnabled: true;
}
MouseArea {
id: lozengeHot;
visible: lozenge.visible;
anchors.fill: lozenge;
acceptedButtons: Qt.LeftButton;
onClicked: go();
hoverEnabled: true;
}
}

View file

@ -0,0 +1,247 @@
//
// Feed.qml
// qml/hifi
//
// Displays a particular type of feed
//
// Created by Howard Stearns on 4/18/2017
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0
import QtQuick 2.5
import QtGraphicalEffects 1.0
import "toolbars"
import "../styles-uit"
Column {
id: root;
visible: false;
property int cardWidth: 212;
property int cardHeight: 152;
property int textPadding: 10;
property int smallMargin: 4;
property int messageHeight: 40;
property int textSize: 24;
property int textSizeSmall: 18;
property int stackShadowNarrowing: 5;
property int stackedCardShadowHeight: 4;
property int labelSize: 20;
property string metaverseServerUrl: '';
property string actions: 'snapshot';
// sendToScript doesn't get wired until after everything gets created. So we have to queue fillDestinations on nextTick.
Component.onCompleted: delay.start();
property string labelText: actions;
property string filter: '';
onFilterChanged: filterChoicesByText();
property var goFunction: null;
property var rpc: null;
HifiConstants { id: hifi }
ListModel { id: suggestions; }
function resolveUrl(url) {
return (url.indexOf('/') === 0) ? (metaverseServerUrl + url) : url;
}
function makeModelData(data) { // create a new obj from data
// ListModel elements will only ever have those properties that are defined by the first obj that is added.
// So here we make sure that we have all the properties we need, regardless of whether it is a place data or user story.
var name = data.place_name,
tags = data.tags || [data.action, data.username],
description = data.description || "",
thumbnail_url = data.thumbnail_url || "";
if (actions === 'concurrency,snapshot') {
// A temporary hack for simulating announcements. We won't use this in production, but if requested, we'll use this data like announcements.
data.details.connections = 4;
data.action = 'announcement';
}
return {
place_name: name,
username: data.username || "",
path: data.path || "",
created_at: data.created_at || "",
action: data.action || "",
thumbnail_url: resolveUrl(thumbnail_url),
image_url: resolveUrl(data.details && data.details.image_url),
metaverseId: (data.id || "").toString(), // Some are strings from server while others are numbers. Model objects require uniformity.
tags: tags,
description: description,
online_users: data.details.connections || data.details.concurrency || 0,
drillDownToPlace: false,
searchText: [name].concat(tags, description || []).join(' ').toUpperCase()
}
}
property var allStories: [];
property var placeMap: ({}); // Used for making stacks.
property int requestId: 0;
function handleError(url, error, data, cb) { // cb(error) and answer truthy if needed, else falsey
if (!error && (data.status === 'success')) {
return;
}
if (!error) { // Create a message from the data
error = data.status + ': ' + data.error;
}
if (typeof(error) === 'string') { // Make a proper Error object
error = new Error(error);
}
error.message += ' in ' + url; // Include the url.
cb(error);
return true;
}
function getUserStoryPage(pageNumber, cb, cb1) { // cb(error) after all pages of domain data have been added to model
// If supplied, cb1 will be run after the first page IFF it is not the last, for responsiveness.
var options = [
'now=' + new Date().toISOString(),
'include_actions=' + actions,
'restriction=' + (Account.isLoggedIn() ? 'open,hifi' : 'open'),
'require_online=true',
'protocol=' + encodeURIComponent(AddressManager.protocolVersion()),
'page=' + pageNumber
];
var url = metaverseBase + 'user_stories?' + options.join('&');
var thisRequestId = ++requestId;
rpc('request', url, function (error, data) {
if (thisRequestId !== requestId) {
error = 'stale';
}
if (handleError(url, error, data, cb)) {
return; // abandon stale requests
}
allStories = allStories.concat(data.user_stories.map(makeModelData));
if ((data.current_page < data.total_pages) && (data.current_page <= 10)) { // just 10 pages = 100 stories for now
if ((pageNumber === 1) && cb1) {
cb1();
}
return getUserStoryPage(pageNumber + 1, cb);
}
cb();
});
}
property var delay: Timer { // No setTimeout or nextTick in QML.
interval: 0;
onTriggered: fillDestinations();
}
function fillDestinations() { // Public
function report(label, error) {
console.log(label, actions, error || 'ok', allStories.length, 'filtered to', suggestions.count);
}
var filter = makeFilteredStoryProcessor(), counter = 0;
allStories = [];
suggestions.clear();
placeMap = {};
getUserStoryPage(1, function (error) {
allStories.slice(counter).forEach(filter);
report('user stories update', error);
root.visible = !!suggestions.count;
}, function () { // If there's more than a page, put what we have in the model right away, keeping track of how many are processed.
allStories.forEach(function (story) {
counter++;
filter(story);
root.visible = !!suggestions.count;
});
report('user stories');
});
}
function identity(x) {
return x;
}
function makeFilteredStoryProcessor() { // answer a function(storyData) that adds it to suggestions if it matches
var words = filter.toUpperCase().split(/\s+/).filter(identity);
function suggestable(story) {
if (story.action === 'snapshot') {
return true;
}
return story.place_name !== AddressManager.placename; // Not our entry, but do show other entry points to current domain.
}
function matches(story) {
if (!words.length) {
return suggestable(story);
}
return words.every(function (word) {
return story.searchText.indexOf(word) >= 0;
});
}
function addToSuggestions(place) {
var collapse = ((actions === 'concurrency,snapshot') && (place.action !== 'concurrency')) || (place.action === 'announcement');
if (collapse) {
var existing = placeMap[place.place_name];
if (existing) {
existing.drillDownToPlace = true;
return;
}
}
suggestions.append(place);
if (collapse) {
placeMap[place.place_name] = suggestions.get(suggestions.count - 1);
} else if (place.action === 'concurrency') {
suggestions.get(suggestions.count - 1).drillDownToPlace = true; // Don't change raw place object (in allStories).
}
}
return function (story) {
if (matches(story)) {
addToSuggestions(story);
}
};
}
function filterChoicesByText() {
suggestions.clear();
placeMap = {};
allStories.forEach(makeFilteredStoryProcessor());
root.visible = !!suggestions.count;
}
RalewayBold {
id: label;
text: labelText;
color: hifi.colors.blueAccent;
size: labelSize;
}
ListView {
id: scroll;
model: suggestions;
orientation: ListView.Horizontal;
highlightMoveDuration: -1;
highlightMoveVelocity: -1;
highlight: Rectangle { color: "transparent"; border.width: 4; border.color: hifiStyleConstants.colors.primaryHighlight; z: 1; }
currentIndex: -1;
spacing: 12;
width: parent.width;
height: cardHeight + stackedCardShadowHeight;
delegate: Card {
id: card;
width: cardWidth;
height: cardHeight;
goFunction: root.goFunction;
userName: model.username;
placeName: model.place_name;
hifiUrl: model.place_name + model.path;
thumbnail: model.thumbnail_url;
imageUrl: model.image_url;
action: model.action;
timestamp: model.created_at;
onlineUsers: model.online_users;
storyId: model.metaverseId;
drillDownToPlace: model.drillDownToPlace;
textPadding: root.textPadding;
smallMargin: root.smallMargin;
messageHeight: root.messageHeight;
textSize: root.textSize;
textSizeSmall: root.textSizeSmall;
stackShadowNarrowing: root.stackShadowNarrowing;
shadowHeight: root.stackedCardShadowHeight;
hoverThunk: function () { scroll.currentIndex = index; }
unhoverThunk: function () { scroll.currentIndex = -1; }
}
}
}

View file

@ -30,18 +30,33 @@ StackView {
width: parent !== null ? parent.width : undefined
height: parent !== null ? parent.height : undefined
property var eventBridge;
property var allStories: [];
property int cardWidth: 460;
property int cardHeight: 320;
property int cardWidth: 212;
property int cardHeight: 152;
property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/";
property var tablet: null;
// This version only implements rpc(method, parameters, callback(error, result)) calls initiated from here, not initiated from .js, nor "notifications".
property var rpcCalls: ({});
property var rpcCounter: 0;
signal sendToScript(var message);
function rpc(method, parameters, callback) {
rpcCalls[rpcCounter] = callback;
var message = {method: method, params: parameters, id: rpcCounter++, jsonrpc: "2.0"};
sendToScript(message);
}
function fromScript(message) {
var callback = rpcCalls[message.id];
if (!callback) {
console.log('No callback for message fromScript', JSON.stringify(message));
return;
}
delete rpcCalls[message.id];
callback(message.error, message.result);
}
Component { id: tabletWebView; TabletWebView {} }
Component.onCompleted: {
fillDestinations();
updateLocationText(false);
fillDestinations();
addressLine.focus = !HMD.active;
root.parentChanged.connect(center);
center();
@ -57,7 +72,7 @@ StackView {
}
function resetAfterTeleport() {
function resetAfterTeleport() {
//storyCardFrame.shown = root.shown = false;
}
function goCard(targetString) {
@ -157,7 +172,7 @@ StackView {
left: parent.left;
}
HifiStyles.RalewayLight {
HifiStyles.RalewayRegular {
id: notice;
font.pixelSize: hifi.fonts.pixelSize * 0.7;
anchors {
@ -190,7 +205,6 @@ StackView {
}
font.pixelSize: hifi.fonts.pixelSize * 0.75
onTextChanged: {
filterChoicesByText();
updateLocationText(text.length > 0);
}
onAccepted: {
@ -225,109 +239,80 @@ StackView {
}
}
}
Rectangle {
id: topBar
height: 37
color: hifiStyleConstants.colors.white
anchors.right: parent.right
anchors.rightMargin: 0
anchors.left: parent.left
anchors.leftMargin: 0
anchors.topMargin: 0
anchors.top: addressBar.bottom
Row {
id: thing
spacing: 5 * hifi.layout.spacing
anchors {
top: parent.top;
left: parent.left
leftMargin: 25
}
TabletTextButton {
id: allTab;
text: "ALL";
property string includeActions: 'snapshot,concurrency';
selected: allTab === selectedTab;
action: tabSelect;
}
TabletTextButton {
id: placeTab;
text: "PLACES";
property string includeActions: 'concurrency';
selected: placeTab === selectedTab;
action: tabSelect;
}
TabletTextButton {
id: snapTab;
text: "SNAP";
property string includeActions: 'snapshot';
selected: snapTab === selectedTab;
action: tabSelect;
id: bgMain;
anchors {
top: addressBar.bottom;
bottom: parent.keyboardEnabled ? keyboard.top : parent.bottom;
left: parent.left;
right: parent.right;
}
Rectangle {
id: addressShadow;
width: parent.width;
height: 42 - 33;
gradient: Gradient {
GradientStop { position: 0.0; color: "gray" }
GradientStop { position: 1.0; color: "white" }
}
}
}
Rectangle {
id: bgMain
color: hifiStyleConstants.colors.white
anchors.bottom: parent.keyboardEnabled ? keyboard.top : parent.bottom
anchors.bottomMargin: 0
anchors.right: parent.right
anchors.rightMargin: 0
anchors.left: parent.left
anchors.leftMargin: 0
anchors.top: topBar.bottom
anchors.topMargin: 0
ListModel { id: suggestions }
ListView {
id: scroll
property int stackedCardShadowHeight: 0;
clip: true
spacing: 14
Rectangle { // Column margins require QtQuick 2.7, which we don't use yet.
id: column;
property real pad: 10;
width: bgMain.width - column.pad;
height: stack.height;
color: "transparent";
anchors {
bottom: parent.bottom
top: parent.top
left: parent.left
right: parent.right
leftMargin: 10
left: parent.left;
leftMargin: column.pad;
top: addressShadow.bottom;
topMargin: column.pad;
}
model: suggestions
orientation: ListView.Vertical
delegate: Card {
width: cardWidth;
height: cardHeight;
goFunction: goCard;
userName: model.username;
placeName: model.place_name;
hifiUrl: model.place_name + model.path;
thumbnail: model.thumbnail_url;
imageUrl: model.image_url;
action: model.action;
timestamp: model.created_at;
onlineUsers: model.online_users;
storyId: model.metaverseId;
drillDownToPlace: model.drillDownToPlace;
shadowHeight: scroll.stackedCardShadowHeight;
hoverThunk: function () { scroll.currentIndex = index; }
unhoverThunk: function () { scroll.currentIndex = -1; }
Column {
id: stack;
width: column.width;
spacing: 33 - places.labelSize;
Feed {
id: happeningNow;
width: parent.width;
cardWidth: 312 + (2 * 4);
cardHeight: 163 + (2 * 4);
metaverseServerUrl: addressBarDialog.metaverseServerUrl;
labelText: 'HAPPENING NOW';
actions: 'announcement';
filter: addressLine.text;
goFunction: goCard;
rpc: root.rpc;
}
Feed {
id: places;
width: parent.width;
cardWidth: 210;
cardHeight: 110 + messageHeight;
messageHeight: 44;
metaverseServerUrl: addressBarDialog.metaverseServerUrl;
labelText: 'PLACES';
actions: 'concurrency';
filter: addressLine.text;
goFunction: goCard;
rpc: root.rpc;
}
Feed {
id: snapshots;
width: parent.width;
cardWidth: 143 + (2 * 4);
cardHeight: 75 + messageHeight + 4;
messageHeight: 32;
textPadding: 6;
metaverseServerUrl: addressBarDialog.metaverseServerUrl;
labelText: 'RECENT SNAPS';
actions: 'snapshot';
filter: addressLine.text;
goFunction: goCard;
rpc: root.rpc;
}
}
highlightMoveDuration: -1;
highlightMoveVelocity: -1;
highlight: Rectangle { color: "transparent"; border.width: 4; border.color: hifiStyleConstants.colors.blueHighlight; z: 1; }
}
}
@ -365,175 +350,13 @@ StackView {
}
function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
// TODO: make available to other .qml.
var request = new XMLHttpRequest();
// QT bug: apparently doesn't handle onload. Workaround using readyState.
request.onreadystatechange = function () {
var READY_STATE_DONE = 4;
var HTTP_OK = 200;
if (request.readyState >= READY_STATE_DONE) {
var error = (request.status !== HTTP_OK) && request.status.toString() + ':' + request.statusText,
response = !error && request.responseText,
contentType = !error && request.getResponseHeader('content-type');
if (!error && contentType.indexOf('application/json') === 0) {
try {
response = JSON.parse(response);
} catch (e) {
error = e;
}
}
cb(error, response);
}
};
request.open("GET", url, true);
request.send();
}
function identity(x) {
return x;
}
function handleError(url, error, data, cb) { // cb(error) and answer truthy if needed, else falsey
if (!error && (data.status === 'success')) {
return;
}
if (!error) { // Create a message from the data
error = data.status + ': ' + data.error;
}
if (typeof(error) === 'string') { // Make a proper Error object
error = new Error(error);
}
error.message += ' in ' + url; // Include the url.
cb(error);
return true;
}
function resolveUrl(url) {
return (url.indexOf('/') === 0) ? (addressBarDialog.metaverseServerUrl + url) : url;
}
function makeModelData(data) { // create a new obj from data
// ListModel elements will only ever have those properties that are defined by the first obj that is added.
// So here we make sure that we have all the properties we need, regardless of whether it is a place data or user story.
var name = data.place_name,
tags = data.tags || [data.action, data.username],
description = data.description || "",
thumbnail_url = data.thumbnail_url || "";
return {
place_name: name,
username: data.username || "",
path: data.path || "",
created_at: data.created_at || "",
action: data.action || "",
thumbnail_url: resolveUrl(thumbnail_url),
image_url: resolveUrl(data.details.image_url),
metaverseId: (data.id || "").toString(), // Some are strings from server while others are numbers. Model objects require uniformity.
tags: tags,
description: description,
online_users: data.details.concurrency || 0,
drillDownToPlace: false,
searchText: [name].concat(tags, description || []).join(' ').toUpperCase()
}
}
function suggestable(place) {
if (place.action === 'snapshot') {
return true;
}
return (place.place_name !== AddressManager.placename); // Not our entry, but do show other entry points to current domain.
}
property var selectedTab: allTab;
function tabSelect(textButton) {
selectedTab = textButton;
fillDestinations();
}
property var placeMap: ({});
function addToSuggestions(place) {
var collapse = allTab.selected && (place.action !== 'concurrency');
if (collapse) {
var existing = placeMap[place.place_name];
if (existing) {
existing.drillDownToPlace = true;
return;
}
}
suggestions.append(place);
if (collapse) {
placeMap[place.place_name] = suggestions.get(suggestions.count - 1);
} else if (place.action === 'concurrency') {
suggestions.get(suggestions.count - 1).drillDownToPlace = true; // Don't change raw place object (in allStories).
}
}
property int requestId: 0;
function getUserStoryPage(pageNumber, cb) { // cb(error) after all pages of domain data have been added to model
var options = [
'now=' + new Date().toISOString(),
'include_actions=' + selectedTab.includeActions,
'restriction=' + (Account.isLoggedIn() ? 'open,hifi' : 'open'),
'require_online=true',
'protocol=' + encodeURIComponent(AddressManager.protocolVersion()),
'page=' + pageNumber
];
var url = metaverseBase + 'user_stories?' + options.join('&');
var thisRequestId = ++requestId;
getRequest(url, function (error, data) {
if ((thisRequestId !== requestId) || handleError(url, error, data, cb)) {
return;
}
var stories = data.user_stories.map(function (story) { // explicit single-argument function
return makeModelData(story, url);
});
allStories = allStories.concat(stories);
stories.forEach(makeFilteredPlaceProcessor());
if ((data.current_page < data.total_pages) && (data.current_page <= 10)) { // just 10 pages = 100 stories for now
return getUserStoryPage(pageNumber + 1, cb);
}
cb();
});
}
function makeFilteredPlaceProcessor() { // answer a function(placeData) that adds it to suggestions if it matches
var words = addressLine.text.toUpperCase().split(/\s+/).filter(identity),
data = allStories;
function matches(place) {
if (!words.length) {
return suggestable(place);
}
return words.every(function (word) {
return place.searchText.indexOf(word) >= 0;
});
}
return function (place) {
if (matches(place)) {
addToSuggestions(place);
}
};
}
function filterChoicesByText() {
suggestions.clear();
placeMap = {};
allStories.forEach(makeFilteredPlaceProcessor());
}
function fillDestinations() {
allStories = [];
suggestions.clear();
placeMap = {};
getUserStoryPage(1, function (error) {
console.log('user stories query', error || 'ok', allStories.length);
});
}
function updateLocationText(enteringAddress) {
if (enteringAddress) {
notice.text = "Go To a place, @user, path, or network address:";
notice.color = hifiStyleConstants.colors.baseGrayHighlight;
} else {
notice.text = AddressManager.isConnected ? "Your location:" : "Not Connected";
notice.color = AddressManager.isConnected ? hifiStyleConstants.colors.baseGrayHighlight : hifiStyleConstants.colors.redHighlight;
notice.text = AddressManager.isConnected ? "YOUR LOCATION" : "NOT CONNECTED";
notice.color = AddressManager.isConnected ? hifiStyleConstants.colors.blueHighlight : hifiStyleConstants.colors.redHighlight;
// Display hostname, which includes ip address, localhost, and other non-placenames.
location.text = (AddressManager.placename || AddressManager.hostname || '') + (AddressManager.pathname ? AddressManager.pathname.match(/\/[^\/]+/)[0] : '');
}

View file

@ -1,7 +1,9 @@
import QtQuick 2.0
import Hifi 1.0
import QtQuick.Controls 1.4
import HFTabletWebEngineProfile 1.0
import "../../dialogs"
import "../../controls"
Item {
id: tabletRoot
@ -11,6 +13,7 @@ Item {
property var rootMenu;
property var openModal: null;
property var openMessage: null;
property var openBrowser: null;
property string subMenu: ""
signal showDesktop();
property bool shown: true
@ -87,13 +90,18 @@ Item {
loader.item.gotoPreviousApp = true;
}
}
function loadWebBase() {
loader.source = "";
loader.source = "TabletWebView.qml";
}
function returnToPreviousApp() {
tabletApps.remove(currentApp);
var isWebPage = tabletApps.get(currentApp).isWebUrl;
if (isWebPage) {
var webUrl = tabletApps.get(currentApp).appWebUrl;
var scriptUrl = tabletApps.get(currentApp).scriptUrl;
var webUrl = tabletApps.get(currentApp).appWebUrl;
var scriptUrl = tabletApps.get(currentApp).scriptUrl;
loadSource("TabletWebView.qml");
loadWebUrl(webUrl, scriptUrl);
} else {
@ -101,6 +109,16 @@ Item {
}
}
function openBrowserWindow(request, profile) {
var component = Qt.createComponent("../../controls/TabletWebView.qml");
var newWindow = component.createObject(tabletRoot);
newWindow.eventBridge = tabletRoot.eventBridge;
newWindow.remove = true;
newWindow.profile = profile;
request.openIn(newWindow.webView);
tabletRoot.openBrowser = newWindow;
}
function loadWebUrl(url, injectedJavaScriptUrl) {
tabletApps.clear();
loader.item.url = url;
@ -180,6 +198,11 @@ Item {
openModal.destroy();
openModal = null;
}
if (openBrowser) {
openBrowser.destroy();
openBrowser = null;
}
}
}

View file

@ -3,7 +3,7 @@ import QtWebEngine 1.2
import "../../controls" as Controls
Controls.WebView {
Controls.TabletWebScreen {
}

View file

@ -38,6 +38,11 @@ Windows.ScrollingWindow {
loader.source = url;
}
function loadWebBase() {
loader.source = "";
loader.source = "WindowWebView.qml";
}
function loadWebUrl(url, injectedJavaScriptUrl) {
loader.item.url = url;
loader.item.scriptURL = injectedJavaScriptUrl;

View file

@ -0,0 +1,10 @@
import QtQuick 2.0
import QtWebEngine 1.2
import "../../controls" as Controls
Controls.WebView {
}

View file

@ -128,6 +128,7 @@
#include <QmlWebWindowClass.h>
#include <Preferences.h>
#include <display-plugins/CompositorHelper.h>
#include <trackers/EyeTracker.h>
#include "AudioClient.h"
@ -136,8 +137,6 @@
#include "avatar/ScriptAvatar.h"
#include "CrashHandler.h"
#include "devices/DdeFaceTracker.h"
#include "devices/EyeTracker.h"
#include "devices/Faceshift.h"
#include "devices/Leapmotion.h"
#include "DiscoverabilityManager.h"
#include "GLCanvas.h"
@ -480,7 +479,6 @@ bool setupEssentials(int& argc, char** argv) {
DependencyManager::set<ModelCache>();
DependencyManager::set<ScriptCache>();
DependencyManager::set<SoundCache>();
DependencyManager::set<Faceshift>();
DependencyManager::set<DdeFaceTracker>();
DependencyManager::set<EyeTracker>();
DependencyManager::set<AudioClient>();
@ -535,6 +533,7 @@ bool setupEssentials(int& argc, char** argv) {
DependencyManager::set<OctreeStatsProvider>(nullptr, qApp->getOcteeSceneStats());
DependencyManager::set<AvatarBookmarks>();
DependencyManager::set<LocationBookmarks>();
DependencyManager::set<Snapshot>();
return previousSessionCrashed;
}
@ -797,7 +796,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
connect(&domainHandler, SIGNAL(resetting()), SLOT(resettingDomain()));
connect(&domainHandler, SIGNAL(connectedToDomain(const QString&)), SLOT(updateWindowTitle()));
connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(updateWindowTitle()));
connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(clearDomainOctreeDetails()));
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &Application::clearDomainAvatars);
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, [this]() {
getOverlays().deleteOverlay(getTabletScreenID());
getOverlays().deleteOverlay(getTabletHomeButtonID());
@ -1209,10 +1208,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
this->installEventFilter(this);
// initialize our face trackers after loading the menu settings
auto faceshiftTracker = DependencyManager::get<Faceshift>();
faceshiftTracker->init();
connect(faceshiftTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
#ifdef HAVE_DDE
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
ddeTracker->init();
@ -2052,6 +2047,7 @@ void Application::initializeUi() {
rootContext->setContextProperty("Scene", DependencyManager::get<SceneScriptingInterface>().data());
rootContext->setContextProperty("Render", _renderEngine->getConfiguration().get());
rootContext->setContextProperty("Reticle", getApplicationCompositor().getReticleInterface());
rootContext->setContextProperty("Snapshot", DependencyManager::get<Snapshot>().data());
rootContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor());
@ -3624,20 +3620,13 @@ ivec2 Application::getMouse() const {
}
FaceTracker* Application::getActiveFaceTracker() {
auto faceshift = DependencyManager::get<Faceshift>();
auto dde = DependencyManager::get<DdeFaceTracker>();
return (dde->isActive() ? static_cast<FaceTracker*>(dde.data()) :
(faceshift->isActive() ? static_cast<FaceTracker*>(faceshift.data()) : nullptr));
return dde->isActive() ? static_cast<FaceTracker*>(dde.data()) : nullptr;
}
FaceTracker* Application::getSelectedFaceTracker() {
FaceTracker* faceTracker = nullptr;
#ifdef HAVE_FACESHIFT
if (Menu::getInstance()->isOptionChecked(MenuOption::Faceshift)) {
faceTracker = DependencyManager::get<Faceshift>().data();
}
#endif
#ifdef HAVE_DDE
if (Menu::getInstance()->isOptionChecked(MenuOption::UseCamera)) {
faceTracker = DependencyManager::get<DdeFaceTracker>().data();
@ -3647,15 +3636,8 @@ FaceTracker* Application::getSelectedFaceTracker() {
}
void Application::setActiveFaceTracker() const {
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
#endif
#ifdef HAVE_FACESHIFT
auto faceshiftTracker = DependencyManager::get<Faceshift>();
faceshiftTracker->setIsMuted(isMuted);
faceshiftTracker->setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !isMuted);
#endif
#ifdef HAVE_DDE
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
bool isUsingDDE = Menu::getInstance()->isOptionChecked(MenuOption::UseCamera);
Menu::getInstance()->getActionForOption(MenuOption::BinaryEyelidControl)->setVisible(isUsingDDE);
Menu::getInstance()->getActionForOption(MenuOption::CoupleEyelids)->setVisible(isUsingDDE);
@ -4373,7 +4355,13 @@ void Application::update(float deltaTime) {
controller::InputCalibrationData calibrationData = {
myAvatar->getSensorToWorldMatrix(),
createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition()),
myAvatar->getHMDSensorMatrix()
myAvatar->getHMDSensorMatrix(),
myAvatar->getCenterEyeCalibrationMat(),
myAvatar->getHeadCalibrationMat(),
myAvatar->getSpine2CalibrationMat(),
myAvatar->getHipsCalibrationMat(),
myAvatar->getLeftFootCalibrationMat(),
myAvatar->getRightFootCalibrationMat()
};
InputPluginPointer keyboardMousePlugin;
@ -4421,6 +4409,13 @@ void Application::update(float deltaTime) {
controller::Pose rightFootPose = userInputMapper->getPoseState(controller::Action::RIGHT_FOOT);
myAvatar->setFootControllerPosesInSensorFrame(leftFootPose.transform(avatarToSensorMatrix), rightFootPose.transform(avatarToSensorMatrix));
controller::Pose hipsPose = userInputMapper->getPoseState(controller::Action::HIPS);
controller::Pose spine2Pose = userInputMapper->getPoseState(controller::Action::SPINE2);
myAvatar->setSpineControllerPosesInSensorFrame(hipsPose.transform(avatarToSensorMatrix), spine2Pose.transform(avatarToSensorMatrix));
controller::Pose headPose = userInputMapper->getPoseState(controller::Action::HEAD);
myAvatar->setHeadControllerPoseInSensorFrame(headPose.transform(avatarToSensorMatrix));
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
updateDialogs(deltaTime); // update various stats dialogs if present
@ -5131,7 +5126,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
}
void Application::resetSensors(bool andReload) {
DependencyManager::get<Faceshift>()->reset();
DependencyManager::get<DdeFaceTracker>()->reset();
DependencyManager::get<EyeTracker>()->reset();
getActiveDisplayPlugin()->resetSensors();
@ -5177,7 +5171,6 @@ void Application::clearDomainOctreeDetails() {
qCDebug(interfaceapp) << "Clearing domain octree details...";
resetPhysicsReadyInformation();
getMyAvatar()->setAvatarEntityDataChanged(true); // to recreate worn entities
// reset our node to stats and node to jurisdiction maps... since these must be changing...
_entityServerJurisdictions.withWriteLock([&] {
@ -5196,14 +5189,18 @@ void Application::clearDomainOctreeDetails() {
skyStage->setBackgroundMode(model::SunSkyStage::SKY_DEFAULT);
_recentlyClearedDomain = true;
DependencyManager::get<AvatarManager>()->clearOtherAvatars();
DependencyManager::get<AnimationCache>()->clearUnusedResources();
DependencyManager::get<ModelCache>()->clearUnusedResources();
DependencyManager::get<SoundCache>()->clearUnusedResources();
DependencyManager::get<TextureCache>()->clearUnusedResources();
}
void Application::clearDomainAvatars() {
getMyAvatar()->setAvatarEntityDataChanged(true); // to recreate worn entities
DependencyManager::get<AvatarManager>()->clearOtherAvatars();
}
void Application::domainChanged(const QString& domainHostname) {
updateWindowTitle();
// disable physics until we have enough information about our new location to not cause craziness.
@ -5273,33 +5270,8 @@ void Application::nodeKilled(SharedNodePointer node) {
if (node->getType() == NodeType::AudioMixer) {
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "audioMixerKilled");
} else if (node->getType() == NodeType::EntityServer) {
QUuid nodeUUID = node->getUUID();
// see if this is the first we've heard of this node...
_entityServerJurisdictions.withReadLock([&] {
if (_entityServerJurisdictions.find(nodeUUID) == _entityServerJurisdictions.end()) {
return;
}
auto rootCode = _entityServerJurisdictions[nodeUUID].getRootOctalCode();
VoxelPositionSize rootDetails;
voxelDetailsForCode(rootCode.get(), rootDetails);
qCDebug(interfaceapp, "model server going away...... v[%f, %f, %f, %f]",
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
});
// If the model server is going away, remove it from our jurisdiction map so we don't send voxels to a dead server
_entityServerJurisdictions.withWriteLock([&] {
_entityServerJurisdictions.erase(_entityServerJurisdictions.find(nodeUUID));
});
// also clean up scene stats for that server
_octreeServerSceneStats.withWriteLock([&] {
if (_octreeServerSceneStats.find(nodeUUID) != _octreeServerSceneStats.end()) {
_octreeServerSceneStats.erase(nodeUUID);
}
});
// we lost an entity server, clear all of the domain octree details
clearDomainOctreeDetails();
} else if (node->getType() == NodeType::AvatarMixer) {
// our avatar mixer has gone away - clear the hash of avatars
DependencyManager::get<AvatarManager>()->clearOtherAvatars();
@ -5504,6 +5476,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
scriptEngine->registerGlobalObject("Menu", MenuScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("Stats", Stats::getInstance());
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("Snapshot", DependencyManager::get<Snapshot>().data());
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
scriptEngine->registerGlobalObject("AudioScope", DependencyManager::get<AudioScope>().data());
@ -6448,7 +6421,7 @@ void Application::takeSnapshot(bool notify, bool includeAnimated, float aspectRa
// Get a screenshot and save it
QString path = Snapshot::saveSnapshot(getActiveDisplayPlugin()->getScreenshot(aspectRatio));
// If we're not doing an animated snapshot as well...
if (!includeAnimated || !(SnapshotAnimated::alsoTakeAnimatedSnapshot.get())) {
if (!includeAnimated) {
// Tell the dependency manager that the capture of the still snapshot has taken place.
emit DependencyManager::get<WindowScriptingInterface>()->stillSnapshotTaken(path, notify);
} else {

View file

@ -409,6 +409,7 @@ public slots:
private slots:
void showDesktop();
void clearDomainOctreeDetails();
void clearDomainAvatars();
void aboutToQuit();
void resettingDomain();

View file

@ -23,6 +23,8 @@
#include "DiscoverabilityManager.h"
#include "Menu.h"
#include <QThread>
const Discoverability::Mode DEFAULT_DISCOVERABILITY_MODE = Discoverability::Friends;
DiscoverabilityManager::DiscoverabilityManager() :
@ -37,6 +39,13 @@ const QString API_USER_HEARTBEAT_PATH = "/api/v1/user/heartbeat";
const QString SESSION_ID_KEY = "session_id";
void DiscoverabilityManager::updateLocation() {
// since we store the last location and compare it to
// the current one in this function, we need to do this in
// the object's main thread (or use a mutex)
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "updateLocation");
return;
}
auto accountManager = DependencyManager::get<AccountManager>();
auto addressManager = DependencyManager::get<AddressManager>();
auto& domainHandler = DependencyManager::get<NodeList>()->getDomainHandler();
@ -143,7 +152,7 @@ void DiscoverabilityManager::removeLocation() {
void DiscoverabilityManager::setDiscoverabilityMode(Discoverability::Mode discoverabilityMode) {
if (static_cast<Discoverability::Mode>(_mode.get()) != discoverabilityMode) {
// update the setting to the new value
_mode.set(static_cast<int>(discoverabilityMode));
updateLocation(); // update right away

View file

@ -34,7 +34,6 @@
#include "avatar/AvatarManager.h"
#include "AvatarBookmarks.h"
#include "devices/DdeFaceTracker.h"
#include "devices/Faceshift.h"
#include "MainWindow.h"
#include "render/DrawStatus.h"
#include "scripting/MenuScriptingInterface.h"
@ -156,6 +155,8 @@ Menu::Menu() {
// Audio > Show Level Meter
addCheckableActionToQMenuAndActionHash(audioMenu, MenuOption::AudioTools, 0, false);
addCheckableActionToQMenuAndActionHash(audioMenu, MenuOption::AudioNoiseReduction, 0, true,
audioIO.data(), SLOT(toggleAudioNoiseReduction()));
// Avatar menu ----------------------------------
MenuWrapper* avatarMenu = addMenu("Avatar");
@ -196,6 +197,9 @@ Menu::Menu() {
0, // QML Qt::Key_Apostrophe,
qApp, SLOT(resetSensors()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
avatar.get(), SLOT(updateMotionBehaviorFromMenu()));
// Avatar > AvatarBookmarks related menus -- Note: the AvatarBookmarks class adds its own submenus here.
auto avatarBookmarks = DependencyManager::get<AvatarBookmarks>();
avatarBookmarks->setupMenus(this, avatarMenu);
@ -446,12 +450,6 @@ Menu::Menu() {
qApp, SLOT(setActiveFaceTracker()));
faceTrackerGroup->addAction(noFaceTracker);
#ifdef HAVE_FACESHIFT
QAction* faceshiftFaceTracker = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::Faceshift,
0, false,
qApp, SLOT(setActiveFaceTracker()));
faceTrackerGroup->addAction(faceshiftFaceTracker);
#endif
#ifdef HAVE_DDE
QAction* ddeFaceTracker = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::UseCamera,
0, true,
@ -472,11 +470,10 @@ Menu::Menu() {
QAction* ddeCalibrate = addActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::CalibrateCamera, 0,
DependencyManager::get<DdeFaceTracker>().data(), SLOT(calibrate()));
ddeCalibrate->setVisible(true); // DDE face tracking is on by default
#endif
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
faceTrackingMenu->addSeparator();
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::MuteFaceTracking,
Qt::CTRL | Qt::SHIFT | Qt::Key_F, true); // DDE face tracking is on by default
[](bool mute) { FaceTracker::setIsMuted(mute); },
Qt::CTRL | Qt::SHIFT | Qt::Key_F, FaceTracker::isMuted());
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::AutoMuteAudio, 0, false);
#endif
@ -532,10 +529,6 @@ Menu::Menu() {
avatar.get(), SLOT(updateMotionBehaviorFromMenu()),
UNSPECIFIED_POSITION, "Developer");
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::EnableCharacterController, 0, true,
avatar.get(), SLOT(updateMotionBehaviorFromMenu()),
UNSPECIFIED_POSITION, "Developer");
// Developer > Hands >>>
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false,
@ -622,8 +615,6 @@ Menu::Menu() {
QString("../../hifi/tablet/TabletAudioPreferences.qml"), "AudioPreferencesDialog");
});
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioNoiseReduction, 0, true,
audioIO.data(), SLOT(toggleAudioNoiseReduction()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoServerAudio, 0, false,
audioIO.data(), SLOT(toggleServerEcho()));
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoLocalAudio, 0, false,

View file

@ -36,7 +36,7 @@ namespace MenuOption {
const QString AssetMigration = "ATP Asset Migration";
const QString AssetServer = "Asset Browser";
const QString Attachments = "Attachments...";
const QString AudioNoiseReduction = "Audio Noise Reduction";
const QString AudioNoiseReduction = "Noise Reduction";
const QString AudioScope = "Show Scope";
const QString AudioScopeFiftyFrames = "Fifty";
const QString AudioScopeFiveFrames = "Five";
@ -96,7 +96,7 @@ namespace MenuOption {
const QString DontRenderEntitiesAsScene = "Don't Render Entities as Scene";
const QString EchoLocalAudio = "Echo Local Audio";
const QString EchoServerAudio = "Echo Server Audio";
const QString EnableCharacterController = "Enable avatar collisions";
const QString EnableCharacterController = "Collide with world";
const QString EnableInverseKinematics = "Enable Inverse Kinematics";
const QString EntityScriptServerLog = "Entity Script Server Log";
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
@ -105,7 +105,6 @@ namespace MenuOption {
const QString ExpandPaintGLTiming = "Expand /paintGL";
const QString ExpandPhysicsSimulationTiming = "Expand /physics";
const QString ExpandUpdateTiming = "Expand /update";
const QString Faceshift = "Faceshift";
const QString FirstPerson = "First Person";
const QString FivePointCalibration = "5 Point Calibration";
const QString FixGaze = "Fix Gaze (no saccade)";

View file

@ -13,6 +13,7 @@
#include <NodeList.h>
#include <recording/Deck.h>
#include <trackers/EyeTracker.h>
#include "Application.h"
#include "Avatar.h"
@ -22,8 +23,6 @@
#include "Menu.h"
#include "Util.h"
#include "devices/DdeFaceTracker.h"
#include "devices/EyeTracker.h"
#include "devices/Faceshift.h"
#include <Rig.h>
using namespace std;
@ -209,14 +208,14 @@ void Head::simulate(float deltaTime, bool isMine) {
// use data to update fake Faceshift blendshape coefficients
calculateMouthShapes(deltaTime);
DependencyManager::get<Faceshift>()->updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_blendshapeCoefficients);
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_blendshapeCoefficients);
applyEyelidOffset(getOrientation());

View file

@ -41,9 +41,9 @@
#include <recording/Clip.h>
#include <recording/Frame.h>
#include <RecordingScriptingInterface.h>
#include <trackers/FaceTracker.h>
#include "Application.h"
#include "devices/Faceshift.h"
#include "AvatarManager.h"
#include "AvatarActionHold.h"
#include "Menu.h"
@ -82,6 +82,18 @@ const float MyAvatar::ZOOM_MIN = 0.5f;
const float MyAvatar::ZOOM_MAX = 25.0f;
const float MyAvatar::ZOOM_DEFAULT = 1.5f;
// default values, used when avatar is missing joints... (avatar space)
// static const glm::quat DEFAULT_AVATAR_MIDDLE_EYE_ROT { Quaternions::Y_180 };
static const glm::vec3 DEFAULT_AVATAR_MIDDLE_EYE_POS { 0.0f, 0.6f, 0.0f };
static const glm::vec3 DEFAULT_AVATAR_HEAD_POS { 0.0f, 0.53f, 0.0f };
static const glm::vec3 DEFAULT_AVATAR_NECK_POS { 0.0f, 0.445f, 0.025f };
static const glm::vec3 DEFAULT_AVATAR_SPINE2_POS { 0.0f, 0.32f, 0.02f };
static const glm::vec3 DEFAULT_AVATAR_HIPS_POS { 0.0f, 0.0f, 0.0f };
static const glm::vec3 DEFAULT_AVATAR_LEFTFOOT_POS { -0.08f, -0.96f, 0.029f};
static const glm::quat DEFAULT_AVATAR_LEFTFOOT_ROT { -0.40167322754859924f, 0.9154590368270874f, -0.005437685176730156f, -0.023744143545627594f };
static const glm::vec3 DEFAULT_AVATAR_RIGHTFOOT_POS { 0.08f, -0.96f, 0.029f };
static const glm::quat DEFAULT_AVATAR_RIGHTFOOT_ROT { -0.4016716778278351f, 0.9154615998268127f, 0.0053307069465518f, 0.023696165531873703f };
MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
Avatar(thread, rig),
_wasPushing(false),
@ -650,18 +662,13 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
}
FaceTracker* tracker = qApp->getActiveFaceTracker();
bool inFacetracker = tracker && !tracker->isMuted();
bool inFacetracker = tracker && !FaceTracker::isMuted();
if (inHmd) {
estimatedPosition = extractTranslation(getHMDSensorMatrix());
estimatedPosition.x *= -1.0f;
_trackedHeadPosition = estimatedPosition;
const float OCULUS_LEAN_SCALE = 0.05f;
estimatedPosition /= OCULUS_LEAN_SCALE;
} else if (inFacetracker) {
estimatedPosition = tracker->getHeadTranslation();
_trackedHeadPosition = estimatedPosition;
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
}
@ -1378,6 +1385,65 @@ controller::Pose MyAvatar::getRightFootControllerPoseInAvatarFrame() const {
return getRightFootControllerPoseInWorldFrame().transform(invAvatarMatrix);
}
void MyAvatar::setSpineControllerPosesInSensorFrame(const controller::Pose& hips, const controller::Pose& spine2) {
if (controller::InputDevice::getLowVelocityFilter()) {
auto oldHipsPose = getHipsControllerPoseInSensorFrame();
auto oldSpine2Pose = getSpine2ControllerPoseInSensorFrame();
_hipsControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldHipsPose, hips));
_spine2ControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldSpine2Pose, spine2));
} else {
_hipsControllerPoseInSensorFrameCache.set(hips);
_spine2ControllerPoseInSensorFrameCache.set(spine2);
}
}
controller::Pose MyAvatar::getHipsControllerPoseInSensorFrame() const {
return _hipsControllerPoseInSensorFrameCache.get();
}
controller::Pose MyAvatar::getSpine2ControllerPoseInSensorFrame() const {
return _spine2ControllerPoseInSensorFrameCache.get();
}
controller::Pose MyAvatar::getHipsControllerPoseInWorldFrame() const {
return _hipsControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
}
controller::Pose MyAvatar::getSpine2ControllerPoseInWorldFrame() const {
return _spine2ControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
}
controller::Pose MyAvatar::getHipsControllerPoseInAvatarFrame() const {
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
return getHipsControllerPoseInWorldFrame().transform(invAvatarMatrix);
}
controller::Pose MyAvatar::getSpine2ControllerPoseInAvatarFrame() const {
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
return getSpine2ControllerPoseInWorldFrame().transform(invAvatarMatrix);
}
void MyAvatar::setHeadControllerPoseInSensorFrame(const controller::Pose& head) {
if (controller::InputDevice::getLowVelocityFilter()) {
auto oldHeadPose = getHeadControllerPoseInSensorFrame();
_headControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldHeadPose, head));
} else {
_headControllerPoseInSensorFrameCache.set(head);
}
}
controller::Pose MyAvatar::getHeadControllerPoseInSensorFrame() const {
return _headControllerPoseInSensorFrameCache.get();
}
controller::Pose MyAvatar::getHeadControllerPoseInWorldFrame() const {
return _headControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
}
controller::Pose MyAvatar::getHeadControllerPoseInAvatarFrame() const {
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
return getHeadControllerPoseInWorldFrame().transform(invAvatarMatrix);
}
void MyAvatar::updateMotors() {
_characterController.clearMotors();
@ -2220,22 +2286,17 @@ glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
const glm::quat hmdOrientation = getHMDSensorOrientation();
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
// 2 meter tall dude (in rig coordinates)
const glm::vec3 DEFAULT_RIG_MIDDLE_EYE_POS(0.0f, 0.9f, 0.0f);
const glm::vec3 DEFAULT_RIG_NECK_POS(0.0f, 0.70f, 0.0f);
const glm::vec3 DEFAULT_RIG_HIPS_POS(0.0f, 0.05f, 0.0f);
int rightEyeIndex = _rig->indexOfJoint("RightEye");
int leftEyeIndex = _rig->indexOfJoint("LeftEye");
int neckIndex = _rig->indexOfJoint("Neck");
int hipsIndex = _rig->indexOfJoint("Hips");
glm::vec3 rigMiddleEyePos = DEFAULT_RIG_MIDDLE_EYE_POS;
glm::vec3 rigMiddleEyePos = DEFAULT_AVATAR_MIDDLE_EYE_POS;
if (leftEyeIndex >= 0 && rightEyeIndex >= 0) {
rigMiddleEyePos = (_rig->getAbsoluteDefaultPose(leftEyeIndex).trans() + _rig->getAbsoluteDefaultPose(rightEyeIndex).trans()) / 2.0f;
}
glm::vec3 rigNeckPos = neckIndex != -1 ? _rig->getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_RIG_NECK_POS;
glm::vec3 rigHipsPos = hipsIndex != -1 ? _rig->getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_RIG_HIPS_POS;
glm::vec3 rigNeckPos = neckIndex != -1 ? _rig->getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_AVATAR_NECK_POS;
glm::vec3 rigHipsPos = hipsIndex != -1 ? _rig->getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_AVATAR_HIPS_POS;
glm::vec3 localEyes = (rigMiddleEyePos - rigHipsPos);
glm::vec3 localNeck = (rigNeckPos - rigHipsPos);
@ -2599,6 +2660,79 @@ glm::vec3 MyAvatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
}
}
glm::mat4 MyAvatar::getCenterEyeCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int rightEyeIndex = _rig->indexOfJoint("RightEye");
int leftEyeIndex = _rig->indexOfJoint("LeftEye");
if (rightEyeIndex >= 0 && leftEyeIndex >= 0) {
auto centerEyePos = (getAbsoluteDefaultJointTranslationInObjectFrame(rightEyeIndex) + getAbsoluteDefaultJointTranslationInObjectFrame(leftEyeIndex)) * 0.5f;
auto centerEyeRot = Quaternions::Y_180;
return createMatFromQuatAndPos(centerEyeRot, centerEyePos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_MIDDLE_EYE_POS, DEFAULT_AVATAR_MIDDLE_EYE_POS);
}
}
glm::mat4 MyAvatar::getHeadCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int headIndex = _rig->indexOfJoint("Head");
if (headIndex >= 0) {
auto headPos = getAbsoluteDefaultJointTranslationInObjectFrame(headIndex);
auto headRot = getAbsoluteDefaultJointRotationInObjectFrame(headIndex);
return createMatFromQuatAndPos(headRot, headPos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_HEAD_POS, DEFAULT_AVATAR_HEAD_POS);
}
}
glm::mat4 MyAvatar::getSpine2CalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int spine2Index = _rig->indexOfJoint("Spine2");
if (spine2Index >= 0) {
auto spine2Pos = getAbsoluteDefaultJointTranslationInObjectFrame(spine2Index);
auto spine2Rot = getAbsoluteDefaultJointRotationInObjectFrame(spine2Index);
return createMatFromQuatAndPos(spine2Rot, spine2Pos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_SPINE2_POS, DEFAULT_AVATAR_SPINE2_POS);
}
}
glm::mat4 MyAvatar::getHipsCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int hipsIndex = _rig->indexOfJoint("Hips");
if (hipsIndex >= 0) {
auto hipsPos = getAbsoluteDefaultJointTranslationInObjectFrame(hipsIndex);
auto hipsRot = getAbsoluteDefaultJointRotationInObjectFrame(hipsIndex);
return createMatFromQuatAndPos(hipsRot, hipsPos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_HIPS_POS, DEFAULT_AVATAR_HIPS_POS);
}
}
glm::mat4 MyAvatar::getLeftFootCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int leftFootIndex = _rig->indexOfJoint("LeftFoot");
if (leftFootIndex >= 0) {
auto leftFootPos = getAbsoluteDefaultJointTranslationInObjectFrame(leftFootIndex);
auto leftFootRot = getAbsoluteDefaultJointRotationInObjectFrame(leftFootIndex);
return createMatFromQuatAndPos(leftFootRot, leftFootPos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_LEFTFOOT_POS, DEFAULT_AVATAR_LEFTFOOT_POS);
}
}
glm::mat4 MyAvatar::getRightFootCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int rightFootIndex = _rig->indexOfJoint("RightFoot");
if (rightFootIndex >= 0) {
auto rightFootPos = getAbsoluteDefaultJointTranslationInObjectFrame(rightFootIndex);
auto rightFootRot = getAbsoluteDefaultJointRotationInObjectFrame(rightFootIndex);
return createMatFromQuatAndPos(rightFootRot, rightFootPos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_RIGHTFOOT_POS, DEFAULT_AVATAR_RIGHTFOOT_POS);
}
}
bool MyAvatar::pinJoint(int index, const glm::vec3& position, const glm::quat& orientation) {
auto hipsIndex = getJointIndex("Hips");
if (index != hipsIndex) {

View file

@ -353,7 +353,6 @@ public:
eyeContactTarget getEyeContactTarget();
Q_INVOKABLE glm::vec3 getTrackedHeadPosition() const { return _trackedHeadPosition; }
Q_INVOKABLE glm::vec3 getHeadPosition() const { return getHead()->getPosition(); }
Q_INVOKABLE float getHeadFinalYaw() const { return getHead()->getFinalYaw(); }
Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); }
@ -453,6 +452,19 @@ public:
controller::Pose getLeftFootControllerPoseInAvatarFrame() const;
controller::Pose getRightFootControllerPoseInAvatarFrame() const;
void setSpineControllerPosesInSensorFrame(const controller::Pose& hips, const controller::Pose& spine2);
controller::Pose getHipsControllerPoseInSensorFrame() const;
controller::Pose getSpine2ControllerPoseInSensorFrame() const;
controller::Pose getHipsControllerPoseInWorldFrame() const;
controller::Pose getSpine2ControllerPoseInWorldFrame() const;
controller::Pose getHipsControllerPoseInAvatarFrame() const;
controller::Pose getSpine2ControllerPoseInAvatarFrame() const;
void setHeadControllerPoseInSensorFrame(const controller::Pose& head);
controller::Pose getHeadControllerPoseInSensorFrame() const;
controller::Pose getHeadControllerPoseInWorldFrame() const;
controller::Pose getHeadControllerPoseInAvatarFrame() const;
bool hasDriveInput() const;
Q_INVOKABLE void setCharacterControllerEnabled(bool enabled);
@ -461,10 +473,22 @@ public:
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
// all calibration matrices are in absolute avatar space.
glm::mat4 getCenterEyeCalibrationMat() const;
glm::mat4 getHeadCalibrationMat() const;
glm::mat4 getSpine2CalibrationMat() const;
glm::mat4 getHipsCalibrationMat() const;
glm::mat4 getLeftFootCalibrationMat() const;
glm::mat4 getRightFootCalibrationMat() const;
void addHoldAction(AvatarActionHold* holdAction); // thread-safe
void removeHoldAction(AvatarActionHold* holdAction); // thread-safe
void updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose& postUpdatePose);
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in HMD frame
glm::mat4 deriveBodyFromHMDSensor() const;
public slots:
void increaseSize();
void decreaseSize();
@ -553,9 +577,7 @@ private:
void setVisibleInSceneIfReady(Model* model, const render::ScenePointer& scene, bool visiblity);
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in HMD frame
glm::mat4 deriveBodyFromHMDSensor() const;
private:
virtual void updatePalms() override {}
void lateUpdatePalms();
@ -691,9 +713,11 @@ private:
// These are stored in SENSOR frame
ThreadSafeValueCache<controller::Pose> _leftHandControllerPoseInSensorFrameCache { controller::Pose() };
ThreadSafeValueCache<controller::Pose> _rightHandControllerPoseInSensorFrameCache { controller::Pose() };
ThreadSafeValueCache<controller::Pose> _leftFootControllerPoseInSensorFrameCache{ controller::Pose() };
ThreadSafeValueCache<controller::Pose> _rightFootControllerPoseInSensorFrameCache{ controller::Pose() };
ThreadSafeValueCache<controller::Pose> _hipsControllerPoseInSensorFrameCache{ controller::Pose() };
ThreadSafeValueCache<controller::Pose> _spine2ControllerPoseInSensorFrameCache{ controller::Pose() };
ThreadSafeValueCache<controller::Pose> _headControllerPoseInSensorFrameCache{ controller::Pose() };
bool _hmdLeanRecenterEnabled = true;

View file

@ -107,27 +107,49 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Rig::HeadParameters headParams;
if (qApp->isHMDMode()) {
headParams.isInHMD = true;
// get HMD position from sensor space into world space, and back into rig space
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
glm::mat4 worldToRig = glm::inverse(rigToWorld);
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
headParams.rigHeadPosition = extractTranslation(rigHMDMat);
headParams.rigHeadOrientation = extractRotation(rigHMDMat);
headParams.worldHeadOrientation = extractRotation(worldHMDMat);
// input action is the highest priority source for head orientation.
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
if (avatarHeadPose.isValid()) {
glm::mat4 rigHeadMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
headParams.headEnabled = true;
} else {
headParams.isInHMD = false;
// We don't have a valid localHeadPosition.
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame();
headParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
if (qApp->isHMDMode()) {
// get HMD position from sensor space into world space, and back into rig space
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
glm::mat4 worldToRig = glm::inverse(rigToWorld);
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
_rig->computeHeadFromHMD(AnimPose(rigHMDMat), headParams.rigHeadPosition, headParams.rigHeadOrientation);
headParams.headEnabled = true;
} else {
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and down in desktop mode.
// preMult 180 is necessary to convert from avatar to rig coordinates.
// postMult 180 is necessary to convert head from -z forward to z forward.
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
headParams.headEnabled = false;
}
}
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
if (avatarHipsPose.isValid()) {
glm::mat4 rigHipsMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHipsPose.getRotation(), avatarHipsPose.getTranslation());
headParams.hipsMatrix = rigHipsMat;
headParams.hipsEnabled = true;
} else {
headParams.hipsEnabled = false;
}
auto avatarSpine2Pose = myAvatar->getSpine2ControllerPoseInAvatarFrame();
if (avatarSpine2Pose.isValid()) {
glm::mat4 rigSpine2Mat = Matrices::Y_180 * createMatFromQuatAndPos(avatarSpine2Pose.getRotation(), avatarSpine2Pose.getTranslation());
headParams.spine2Matrix = rigSpine2Mat;
headParams.spine2Enabled = true;
} else {
headParams.spine2Enabled = false;
}
headParams.neckJointIndex = geometry.neckJointIndex;
headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;
_rig->updateFromHeadParameters(headParams, deltaTime);
@ -187,7 +209,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Model::updateRig(deltaTime, parentTransform);
Rig::EyeParameters eyeParams;
eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
eyeParams.eyeLookAt = lookAt;
eyeParams.eyeSaccade = head->getSaccade();
eyeParams.modelRotation = getRotation();
@ -219,7 +240,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
head->setBaseRoll(glm::degrees(-eulers.z));
Rig::EyeParameters eyeParams;
eyeParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
eyeParams.eyeLookAt = lookAt;
eyeParams.eyeSaccade = glm::vec3(0.0f);
eyeParams.modelRotation = getRotation();

View file

@ -22,7 +22,7 @@
#include <DependencyManager.h>
#include <ui/overlays/TextOverlay.h>
#include "FaceTracker.h"
#include <trackers/FaceTracker.h>
class DdeFaceTracker : public FaceTracker, public Dependency {
Q_OBJECT

View file

@ -1,310 +0,0 @@
//
// Faceshift.cpp
// interface/src/devices
//
// Created by Andrzej Kapolka on 9/3/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QTimer>
#include <GLMHelpers.h>
#include <NumericalConstants.h>
#include <PerfStat.h>
#include "Faceshift.h"
#include "Menu.h"
#include "Util.h"
#include "InterfaceLogging.h"
#ifdef HAVE_FACESHIFT
using namespace fs;
#endif
using namespace std;
const QString DEFAULT_FACESHIFT_HOSTNAME = "localhost";
const quint16 FACESHIFT_PORT = 33433;
Faceshift::Faceshift() :
_hostname("faceshiftHostname", DEFAULT_FACESHIFT_HOSTNAME)
{
#ifdef HAVE_FACESHIFT
connect(&_tcpSocket, SIGNAL(connected()), SLOT(noteConnected()));
connect(&_tcpSocket, SIGNAL(error(QAbstractSocket::SocketError)), SLOT(noteError(QAbstractSocket::SocketError)));
connect(&_tcpSocket, SIGNAL(readyRead()), SLOT(readFromSocket()));
connect(&_tcpSocket, SIGNAL(stateChanged(QAbstractSocket::SocketState)), SIGNAL(connectionStateChanged()));
connect(&_tcpSocket, SIGNAL(disconnected()), SLOT(noteDisconnected()));
connect(&_udpSocket, SIGNAL(readyRead()), SLOT(readPendingDatagrams()));
_udpSocket.bind(FACESHIFT_PORT);
#endif
}
#ifdef HAVE_FACESHIFT
void Faceshift::init() {
FaceTracker::init();
setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !_isMuted);
}
void Faceshift::update(float deltaTime) {
if (!isActive()) {
return;
}
FaceTracker::update(deltaTime);
// get the euler angles relative to the window
glm::vec3 eulers = glm::degrees(safeEulerAngles(_headRotation * glm::quat(glm::radians(glm::vec3(
(_eyeGazeLeftPitch + _eyeGazeRightPitch) / 2.0f, (_eyeGazeLeftYaw + _eyeGazeRightYaw) / 2.0f, 0.0f)))));
// compute and subtract the long term average
const float LONG_TERM_AVERAGE_SMOOTHING = 0.999f;
if (!_longTermAverageInitialized) {
_longTermAverageEyePitch = eulers.x;
_longTermAverageEyeYaw = eulers.y;
_longTermAverageInitialized = true;
} else {
_longTermAverageEyePitch = glm::mix(eulers.x, _longTermAverageEyePitch, LONG_TERM_AVERAGE_SMOOTHING);
_longTermAverageEyeYaw = glm::mix(eulers.y, _longTermAverageEyeYaw, LONG_TERM_AVERAGE_SMOOTHING);
}
_estimatedEyePitch = eulers.x - _longTermAverageEyePitch;
_estimatedEyeYaw = eulers.y - _longTermAverageEyeYaw;
}
void Faceshift::reset() {
if (_tcpSocket.state() == QAbstractSocket::ConnectedState) {
qCDebug(interfaceapp, "Faceshift: Reset");
FaceTracker::reset();
string message;
fsBinaryStream::encode_message(message, fsMsgCalibrateNeutral());
send(message);
}
_longTermAverageInitialized = false;
}
bool Faceshift::isActive() const {
const quint64 ACTIVE_TIMEOUT_USECS = 1000000;
return (usecTimestampNow() - _lastReceiveTimestamp) < ACTIVE_TIMEOUT_USECS;
}
bool Faceshift::isTracking() const {
return isActive() && _tracking;
}
#endif
bool Faceshift::isConnectedOrConnecting() const {
return _tcpSocket.state() == QAbstractSocket::ConnectedState ||
(_tcpRetryCount == 0 && _tcpSocket.state() != QAbstractSocket::UnconnectedState);
}
void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float browUp,
float jawOpen, float mouth2, float mouth3, float mouth4, QVector<float>& coefficients) const {
const int MMMM_BLENDSHAPE = 34;
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
const int MAX_FAKE_BLENDSHAPE = 40; // Largest modified blendshape from above and below
coefficients.resize(max((int)coefficients.size(), MAX_FAKE_BLENDSHAPE + 1));
qFill(coefficients.begin(), coefficients.end(), 0.0f);
coefficients[_leftBlinkIndex] = leftBlink;
coefficients[_rightBlinkIndex] = rightBlink;
coefficients[_browUpCenterIndex] = browUp;
coefficients[_browUpLeftIndex] = browUp;
coefficients[_browUpRightIndex] = browUp;
coefficients[_jawOpenIndex] = jawOpen;
coefficients[SMILE_LEFT_BLENDSHAPE] = coefficients[SMILE_RIGHT_BLENDSHAPE] = mouth4;
coefficients[MMMM_BLENDSHAPE] = mouth2;
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
}
void Faceshift::setEnabled(bool enabled) {
// Don't enable until have explicitly initialized
if (!_isInitialized) {
return;
}
#ifdef HAVE_FACESHIFT
if ((_tcpEnabled = enabled)) {
connectSocket();
} else {
qCDebug(interfaceapp, "Faceshift: Disconnecting...");
_tcpSocket.disconnectFromHost();
}
#endif
}
void Faceshift::connectSocket() {
if (_tcpEnabled) {
if (!_tcpRetryCount) {
qCDebug(interfaceapp, "Faceshift: Connecting...");
}
_tcpSocket.connectToHost(_hostname.get(), FACESHIFT_PORT);
_tracking = false;
}
}
void Faceshift::noteConnected() {
#ifdef HAVE_FACESHIFT
qCDebug(interfaceapp, "Faceshift: Connected");
// request the list of blendshape names
string message;
fsBinaryStream::encode_message(message, fsMsgSendBlendshapeNames());
send(message);
#endif
}
void Faceshift::noteDisconnected() {
#ifdef HAVE_FACESHIFT
qCDebug(interfaceapp, "Faceshift: Disconnected");
#endif
}
void Faceshift::noteError(QAbstractSocket::SocketError error) {
if (!_tcpRetryCount) {
// Only spam log with fail to connect the first time, so that we can keep waiting for server
qCWarning(interfaceapp) << "Faceshift: " << _tcpSocket.errorString();
}
// retry connection after a 2 second delay
if (_tcpEnabled) {
_tcpRetryCount++;
QTimer::singleShot(2000, this, SLOT(connectSocket()));
}
}
void Faceshift::readPendingDatagrams() {
QByteArray buffer;
while (_udpSocket.hasPendingDatagrams()) {
buffer.resize(_udpSocket.pendingDatagramSize());
_udpSocket.readDatagram(buffer.data(), buffer.size());
receive(buffer);
}
}
void Faceshift::readFromSocket() {
receive(_tcpSocket.readAll());
}
void Faceshift::send(const std::string& message) {
_tcpSocket.write(message.data(), message.size());
}
void Faceshift::receive(const QByteArray& buffer) {
#ifdef HAVE_FACESHIFT
_lastReceiveTimestamp = usecTimestampNow();
_stream.received(buffer.size(), buffer.constData());
fsMsgPtr msg;
for (fsMsgPtr msg; (msg = _stream.get_message()); ) {
switch (msg->id()) {
case fsMsg::MSG_OUT_TRACKING_STATE: {
const fsTrackingData& data = static_pointer_cast<fsMsgTrackingState>(msg)->tracking_data();
if ((_tracking = data.m_trackingSuccessful)) {
glm::quat newRotation = glm::quat(data.m_headRotation.w, -data.m_headRotation.x,
data.m_headRotation.y, -data.m_headRotation.z);
// Compute angular velocity of the head
glm::quat r = glm::normalize(newRotation * glm::inverse(_headRotation));
float theta = 2 * acos(r.w);
if (theta > EPSILON) {
float rMag = glm::length(glm::vec3(r.x, r.y, r.z));
_headAngularVelocity = theta / _averageFrameTime * glm::vec3(r.x, r.y, r.z) / rMag;
} else {
_headAngularVelocity = glm::vec3(0,0,0);
}
const float ANGULAR_VELOCITY_FILTER_STRENGTH = 0.3f;
_headRotation = safeMix(_headRotation, newRotation, glm::clamp(glm::length(_headAngularVelocity) *
ANGULAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f));
const float TRANSLATION_SCALE = 0.02f;
glm::vec3 newHeadTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y,
-data.m_headTranslation.z) * TRANSLATION_SCALE;
_headLinearVelocity = (newHeadTranslation - _lastHeadTranslation) / _averageFrameTime;
const float LINEAR_VELOCITY_FILTER_STRENGTH = 0.3f;
float velocityFilter = glm::clamp(1.0f - glm::length(_headLinearVelocity) *
LINEAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
_filteredHeadTranslation = velocityFilter * _filteredHeadTranslation + (1.0f - velocityFilter) * newHeadTranslation;
_lastHeadTranslation = newHeadTranslation;
_headTranslation = _filteredHeadTranslation;
_eyeGazeLeftPitch = -data.m_eyeGazeLeftPitch;
_eyeGazeLeftYaw = data.m_eyeGazeLeftYaw;
_eyeGazeRightPitch = -data.m_eyeGazeRightPitch;
_eyeGazeRightYaw = data.m_eyeGazeRightYaw;
_blendshapeCoefficients = QVector<float>::fromStdVector(data.m_coeffs);
const float FRAME_AVERAGING_FACTOR = 0.99f;
quint64 usecsNow = usecTimestampNow();
if (_lastMessageReceived != 0) {
_averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime +
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastMessageReceived) / 1000000.0f;
}
_lastMessageReceived = usecsNow;
}
break;
}
case fsMsg::MSG_OUT_BLENDSHAPE_NAMES: {
const vector<string>& names = static_pointer_cast<fsMsgBlendshapeNames>(msg)->blendshape_names();
for (int i = 0; i < (int)names.size(); i++) {
if (names[i] == "EyeBlink_L") {
_leftBlinkIndex = i;
} else if (names[i] == "EyeBlink_R") {
_rightBlinkIndex = i;
} else if (names[i] == "EyeOpen_L") {
_leftEyeOpenIndex = i;
} else if (names[i] == "EyeOpen_R") {
_rightEyeOpenIndex = i;
} else if (names[i] == "BrowsD_L") {
_browDownLeftIndex = i;
} else if (names[i] == "BrowsD_R") {
_browDownRightIndex = i;
} else if (names[i] == "BrowsU_C") {
_browUpCenterIndex = i;
} else if (names[i] == "BrowsU_L") {
_browUpLeftIndex = i;
} else if (names[i] == "BrowsU_R") {
_browUpRightIndex = i;
} else if (names[i] == "JawOpen") {
_jawOpenIndex = i;
} else if (names[i] == "MouthSmile_L") {
_mouthSmileLeftIndex = i;
} else if (names[i] == "MouthSmile_R") {
_mouthSmileRightIndex = i;
}
}
break;
}
default:
break;
}
}
#endif
FaceTracker::countFrame();
}
void Faceshift::setHostname(const QString& hostname) {
_hostname.set(hostname);
}

View file

@ -1,155 +0,0 @@
//
// Faceshift.h
// interface/src/devices
//
// Created by Andrzej Kapolka on 9/3/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Faceshift_h
#define hifi_Faceshift_h
#include <QTcpSocket>
#include <QUdpSocket>
#ifdef HAVE_FACESHIFT
#include <fsbinarystream.h>
#endif
#include <DependencyManager.h>
#include <SettingHandle.h>
#include "FaceTracker.h"
const float STARTING_FACESHIFT_FRAME_TIME = 0.033f;
/// Handles interaction with the Faceshift software, which provides head position/orientation and facial features.
class Faceshift : public FaceTracker, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
#ifdef HAVE_FACESHIFT
// If we don't have faceshift, use the base class' methods
virtual void init() override;
virtual void update(float deltaTime) override;
virtual void reset() override;
virtual bool isActive() const override;
virtual bool isTracking() const override;
#endif
bool isConnectedOrConnecting() const;
const glm::vec3& getHeadAngularVelocity() const { return _headAngularVelocity; }
// these pitch/yaw angles are in degrees
float getEyeGazeLeftPitch() const { return _eyeGazeLeftPitch; }
float getEyeGazeLeftYaw() const { return _eyeGazeLeftYaw; }
float getEyeGazeRightPitch() const { return _eyeGazeRightPitch; }
float getEyeGazeRightYaw() const { return _eyeGazeRightYaw; }
float getLeftBlink() const { return getBlendshapeCoefficient(_leftBlinkIndex); }
float getRightBlink() const { return getBlendshapeCoefficient(_rightBlinkIndex); }
float getLeftEyeOpen() const { return getBlendshapeCoefficient(_leftEyeOpenIndex); }
float getRightEyeOpen() const { return getBlendshapeCoefficient(_rightEyeOpenIndex); }
float getBrowDownLeft() const { return getBlendshapeCoefficient(_browDownLeftIndex); }
float getBrowDownRight() const { return getBlendshapeCoefficient(_browDownRightIndex); }
float getBrowUpCenter() const { return getBlendshapeCoefficient(_browUpCenterIndex); }
float getBrowUpLeft() const { return getBlendshapeCoefficient(_browUpLeftIndex); }
float getBrowUpRight() const { return getBlendshapeCoefficient(_browUpRightIndex); }
float getMouthSize() const { return getBlendshapeCoefficient(_jawOpenIndex); }
float getMouthSmileLeft() const { return getBlendshapeCoefficient(_mouthSmileLeftIndex); }
float getMouthSmileRight() const { return getBlendshapeCoefficient(_mouthSmileRightIndex); }
QString getHostname() { return _hostname.get(); }
void setHostname(const QString& hostname);
void updateFakeCoefficients(float leftBlink,
float rightBlink,
float browUp,
float jawOpen,
float mouth2,
float mouth3,
float mouth4,
QVector<float>& coefficients) const;
signals:
void connectionStateChanged();
public slots:
void setEnabled(bool enabled) override;
private slots:
void connectSocket();
void noteConnected();
void noteError(QAbstractSocket::SocketError error);
void readPendingDatagrams();
void readFromSocket();
void noteDisconnected();
private:
Faceshift();
virtual ~Faceshift() {}
void send(const std::string& message);
void receive(const QByteArray& buffer);
QTcpSocket _tcpSocket;
QUdpSocket _udpSocket;
#ifdef HAVE_FACESHIFT
fs::fsBinaryStream _stream;
#endif
bool _tcpEnabled = true;
int _tcpRetryCount = 0;
bool _tracking = false;
quint64 _lastReceiveTimestamp = 0;
quint64 _lastMessageReceived = 0;
float _averageFrameTime = STARTING_FACESHIFT_FRAME_TIME;
glm::vec3 _headAngularVelocity = glm::vec3(0.0f);
glm::vec3 _headLinearVelocity = glm::vec3(0.0f);
glm::vec3 _lastHeadTranslation = glm::vec3(0.0f);
glm::vec3 _filteredHeadTranslation = glm::vec3(0.0f);
// degrees
float _eyeGazeLeftPitch = 0.0f;
float _eyeGazeLeftYaw = 0.0f;
float _eyeGazeRightPitch = 0.0f;
float _eyeGazeRightYaw = 0.0f;
// degrees
float _longTermAverageEyePitch = 0.0f;
float _longTermAverageEyeYaw = 0.0f;
bool _longTermAverageInitialized = false;
Setting::Handle<QString> _hostname;
// see http://support.faceshift.com/support/articles/35129-export-of-blendshapes
int _leftBlinkIndex = 0;
int _rightBlinkIndex = 1;
int _leftEyeOpenIndex = 8;
int _rightEyeOpenIndex = 9;
// Brows
int _browDownLeftIndex = 14;
int _browDownRightIndex = 15;
int _browUpCenterIndex = 16;
int _browUpLeftIndex = 17;
int _browUpRightIndex = 18;
int _mouthSmileLeftIndex = 28;
int _mouthSmileRightIndex = 29;
int _jawOpenIndex = 21;
};
#endif // hifi_Faceshift_h

View file

@ -14,7 +14,7 @@
#include <QDateTime>
#include "MotionTracker.h"
#include <trackers/MotionTracker.h>
#ifdef HAVE_LEAPMOTION
#include <Leap.h>

View file

@ -17,7 +17,7 @@
#include <plugins/PluginManager.h>
#include "Application.h"
#include "devices/MotionTracker.h"
#include <trackers/MotionTracker.h>
void ControllerScriptingInterface::handleMetaEvent(HFMetaEvent* event) {
if (event->type() == HFActionEvent::startType()) {

View file

@ -168,6 +168,28 @@ void WindowScriptingInterface::ensureReticleVisible() const {
}
}
/// Display a "browse to directory" dialog. If `directory` is an invalid file or directory the browser will start at the current
/// working directory.
/// \param const QString& title title of the window
/// \param const QString& directory directory to start the file browser at
/// \param const QString& nameFilter filter to filter filenames by - see `QFileDialog`
/// \return QScriptValue file path as a string if one was selected, otherwise `QScriptValue::NullValue`
QScriptValue WindowScriptingInterface::browseDir(const QString& title, const QString& directory) {
ensureReticleVisible();
QString path = directory;
if (path.isEmpty()) {
path = getPreviousBrowseLocation();
}
#ifndef Q_OS_WIN
path = fixupPathForMac(directory);
#endif
QString result = OffscreenUi::getExistingDirectory(nullptr, title, path);
if (!result.isEmpty()) {
setPreviousBrowseLocation(QFileInfo(result).absolutePath());
}
return result.isEmpty() ? QScriptValue::NullValue : QScriptValue(result);
}
/// Display an open file dialog. If `directory` is an invalid file or directory the browser will start at the current
/// working directory.
/// \param const QString& title title of the window
@ -278,6 +300,10 @@ void WindowScriptingInterface::makeConnection(bool success, const QString& userN
}
}
void WindowScriptingInterface::displayAnnouncement(const QString& message) {
emit announcement(message);
}
bool WindowScriptingInterface::isPhysicsEnabled() {
return qApp->isPhysicsEnabled();
}

View file

@ -51,6 +51,7 @@ public slots:
QScriptValue confirm(const QString& message = "");
QScriptValue prompt(const QString& message = "", const QString& defaultText = "");
CustomPromptResult customPrompt(const QVariant& config);
QScriptValue browseDir(const QString& title = "", const QString& directory = "");
QScriptValue browse(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
QScriptValue save(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
QScriptValue browseAssets(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
@ -58,6 +59,7 @@ public slots:
void copyToClipboard(const QString& text);
void takeSnapshot(bool notify = true, bool includeAnimated = false, float aspectRatio = 0.0f);
void makeConnection(bool success, const QString& userNameOrError);
void displayAnnouncement(const QString& message);
void shareSnapshot(const QString& path, const QUrl& href = QUrl(""));
bool isPhysicsEnabled();
@ -73,12 +75,13 @@ signals:
void svoImportRequested(const QString& url);
void domainConnectionRefused(const QString& reasonMessage, int reasonCode, const QString& extraInfo);
void stillSnapshotTaken(const QString& pathStillSnapshot, bool notify);
void snapshotShared(const QString& error);
void snapshotShared(bool isError, const QString& reply);
void processingGifStarted(const QString& pathStillSnapshot);
void processingGifCompleted(const QString& pathAnimatedSnapshot);
void connectionAdded(const QString& connectionName);
void connectionError(const QString& errorString);
void announcement(const QString& message);
void messageBoxClosed(int id, int button);

View file

@ -11,9 +11,9 @@
#include <AudioClient.h>
#include <SettingHandle.h>
#include <trackers/FaceTracker.h>
#include "Application.h"
#include "devices/FaceTracker.h"
#include "Menu.h"
HIFI_QML_DEF(AvatarInputs)

View file

@ -11,7 +11,6 @@
#include <AudioClient.h>
#include <avatar/AvatarManager.h>
#include <devices/DdeFaceTracker.h>
#include <devices/Faceshift.h>
#include <NetworkingConstants.h>
#include <ScriptEngines.h>
#include <OffscreenUi.h>
@ -116,11 +115,6 @@ void setupPreferences() {
auto preference = new BrowsePreference(SNAPSHOTS, "Put my snapshots here", getter, setter);
preferences->addPreference(preference);
}
{
auto getter = []()->bool { return SnapshotAnimated::alsoTakeAnimatedSnapshot.get(); };
auto setter = [](bool value) { SnapshotAnimated::alsoTakeAnimatedSnapshot.set(value); };
preferences->addPreference(new CheckPreference(SNAPSHOTS, "Take Animated GIF Snapshot", getter, setter));
}
{
auto getter = []()->float { return SnapshotAnimated::snapshotAnimatedDuration.get(); };
auto setter = [](float value) { SnapshotAnimated::snapshotAnimatedDuration.set(value); };
@ -207,13 +201,6 @@ void setupPreferences() {
auto setter = [](float value) { FaceTracker::setEyeDeflection(value); };
preferences->addPreference(new SliderPreference(AVATAR_TUNING, "Face tracker eye deflection", getter, setter));
}
{
auto getter = []()->QString { return DependencyManager::get<Faceshift>()->getHostname(); };
auto setter = [](const QString& value) { DependencyManager::get<Faceshift>()->setHostname(value); };
auto preference = new EditPreference(AVATAR_TUNING, "Faceshift hostname", getter, setter);
preference->setPlaceholderText("localhost");
preferences->addPreference(preference);
}
{
auto getter = [=]()->QString { return myAvatar->getAnimGraphOverrideUrl().toString(); };
auto setter = [=](const QString& value) { myAvatar->setAnimGraphOverrideUrl(QUrl(value)); };

View file

@ -194,3 +194,10 @@ void Snapshot::uploadSnapshot(const QString& filename, const QUrl& href) {
multiPart);
}
QString Snapshot::getSnapshotsLocation() {
return snapshotsLocation.get("");
}
void Snapshot::setSnapshotsLocation(const QString& location) {
snapshotsLocation.set(location);
}

View file

@ -18,6 +18,7 @@
#include <QStandardPaths>
#include <SettingHandle.h>
#include <DependencyManager.h>
class QFile;
class QTemporaryFile;
@ -32,7 +33,9 @@ private:
QUrl _URL;
};
class Snapshot {
class Snapshot : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
static QString saveSnapshot(QImage image);
static QTemporaryFile* saveTempSnapshot(QImage image);
@ -40,6 +43,10 @@ public:
static Setting::Handle<QString> snapshotsLocation;
static void uploadSnapshot(const QString& filename, const QUrl& href = QUrl(""));
public slots:
Q_INVOKABLE QString getSnapshotsLocation();
Q_INVOKABLE void setSnapshotsLocation(const QString& location);
private:
static QFile* savedFileForSnapshot(QImage & image, bool isTemporary);
};

View file

@ -49,6 +49,7 @@ void SnapshotUploader::uploadSuccess(QNetworkReply& reply) {
userStoryObject.insert("place_name", placeName);
userStoryObject.insert("path", currentPath);
userStoryObject.insert("action", "snapshot");
userStoryObject.insert("audience", "for_url");
rootObject.insert("user_story", userStoryObject);
auto accountManager = DependencyManager::get<AccountManager>();
@ -61,7 +62,7 @@ void SnapshotUploader::uploadSuccess(QNetworkReply& reply) {
QJsonDocument(rootObject).toJson());
} else {
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(contents);
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(true, contents);
delete this;
}
}
@ -72,12 +73,13 @@ void SnapshotUploader::uploadFailure(QNetworkReply& reply) {
if (replyString.size() == 0) {
replyString = reply.errorString();
}
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(replyString); // maybe someday include _inWorldLocation, _filename?
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(true, replyString); // maybe someday include _inWorldLocation, _filename?
delete this;
}
void SnapshotUploader::createStorySuccess(QNetworkReply& reply) {
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(QString());
QString replyString = reply.readAll();
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(false, replyString);
delete this;
}
@ -87,7 +89,7 @@ void SnapshotUploader::createStoryFailure(QNetworkReply& reply) {
if (replyString.size() == 0) {
replyString = reply.errorString();
}
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(replyString);
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(true, replyString);
delete this;
}

View file

@ -51,6 +51,7 @@
#include "ui/AvatarInputs.h"
#include "avatar/AvatarManager.h"
#include "scripting/GlobalServicesScriptingInterface.h"
#include "ui/Snapshot.h"
static const float DPI = 30.47f;
static const float INCHES_TO_METERS = 1.0f / 39.3701f;
@ -177,6 +178,7 @@ void Web3DOverlay::loadSourceURL() {
_webSurface->getRootContext()->setContextProperty("Quat", new Quat());
_webSurface->getRootContext()->setContextProperty("MyAvatar", DependencyManager::get<AvatarManager>()->getMyAvatar().get());
_webSurface->getRootContext()->setContextProperty("Entities", DependencyManager::get<EntityScriptingInterface>().data());
_webSurface->getRootContext()->setContextProperty("Snapshot", DependencyManager::get<Snapshot>().data());
if (_webSurface->getRootItem() && _webSurface->getRootItem()->objectName() == "tabletRoot") {
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();

View file

@ -86,7 +86,9 @@ void AnimInverseKinematics::setTargetVars(
void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses) {
// build a list of valid targets from _targetVarVec and animVars
_maxTargetIndex = -1;
_hipsTargetIndex = -1;
bool removeUnfoundJoints = false;
for (auto& targetVar : _targetVarVec) {
if (targetVar.jointIndex == -1) {
// this targetVar hasn't been validated yet...
@ -105,15 +107,18 @@ void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::
AnimPose defaultPose = _skeleton->getAbsolutePose(targetVar.jointIndex, underPoses);
glm::quat rotation = animVars.lookupRigToGeometry(targetVar.rotationVar, defaultPose.rot());
glm::vec3 translation = animVars.lookupRigToGeometry(targetVar.positionVar, defaultPose.trans());
if (target.getType() == IKTarget::Type::HipsRelativeRotationAndPosition) {
translation += _hipsOffset;
}
target.setPose(rotation, translation);
target.setIndex(targetVar.jointIndex);
targets.push_back(target);
if (targetVar.jointIndex > _maxTargetIndex) {
_maxTargetIndex = targetVar.jointIndex;
}
// record the index of the hips ik target.
if (target.getIndex() == _hipsIndex) {
_hipsTargetIndex = (int)targets.size() - 1;
}
}
}
}
@ -242,18 +247,21 @@ int AnimInverseKinematics::solveTargetWithCCD(const IKTarget& target, AnimPoseVe
// the tip's parent-relative as we proceed up the chain
glm::quat tipParentOrientation = absolutePoses[pivotIndex].rot();
// NOTE: if this code is removed, the head will remain rigid, causing the spine/hips to thrust forward backward
// as the head is nodded.
if (targetType == IKTarget::Type::HmdHead) {
// rotate tip directly to target orientation
tipOrientation = target.getRotation();
glm::quat tipRelativeRotation = glm::normalize(tipOrientation * glm::inverse(tipParentOrientation));
glm::quat tipRelativeRotation = glm::inverse(tipParentOrientation) * tipOrientation;
// enforce tip's constraint
// then enforce tip's constraint
RotationConstraint* constraint = getConstraint(tipIndex);
if (constraint) {
bool constrained = constraint->apply(tipRelativeRotation);
if (constrained) {
tipOrientation = glm::normalize(tipRelativeRotation * tipParentOrientation);
tipRelativeRotation = glm::normalize(tipOrientation * glm::inverse(tipParentOrientation));
tipOrientation = tipParentOrientation * tipRelativeRotation;
tipRelativeRotation = tipRelativeRotation;
}
}
// store the relative rotation change in the accumulator
@ -277,7 +285,9 @@ int AnimInverseKinematics::solveTargetWithCCD(const IKTarget& target, AnimPoseVe
const float MIN_AXIS_LENGTH = 1.0e-4f;
RotationConstraint* constraint = getConstraint(pivotIndex);
if (constraint && constraint->isLowerSpine() && tipIndex != _headIndex) {
// only allow swing on lowerSpine if there is a hips IK target.
if (_hipsTargetIndex < 0 && constraint && constraint->isLowerSpine() && tipIndex != _headIndex) {
// for these types of targets we only allow twist at the lower-spine
// (this prevents the hand targets from bending the spine too much and thereby driving the hips too far)
glm::vec3 twistAxis = absolutePoses[pivotIndex].trans() - absolutePoses[pivotsParentIndex].trans();
@ -420,13 +430,13 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
_relativePoses[i].trans() = underPoses[i].trans();
}
if (!_relativePoses.empty()) {
if (!underPoses.empty()) {
// Sometimes the underpose itself can violate the constraints. Rather than
// clamp the animation we dynamically expand each constraint to accomodate it.
std::map<int, RotationConstraint*>::iterator constraintItr = _constraints.begin();
while (constraintItr != _constraints.end()) {
int index = constraintItr->first;
constraintItr->second->dynamicallyAdjustLimits(_relativePoses[index].rot());
constraintItr->second->dynamicallyAdjustLimits(underPoses[index].rot());
++constraintItr;
}
}
@ -441,64 +451,76 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
computeTargets(animVars, targets, underPoses);
}
// debug render ik targets
if (context.getEnableDebugDrawIKTargets()) {
const vec4 WHITE(1.0f);
glm::mat4 rigToAvatarMat = createMatFromQuatAndPos(Quaternions::Y_180, glm::vec3());
for (auto& target : targets) {
glm::mat4 geomTargetMat = createMatFromQuatAndPos(target.getRotation(), target.getTranslation());
glm::mat4 avatarTargetMat = rigToAvatarMat * context.getGeometryToRigMatrix() * geomTargetMat;
QString name = QString("ikTarget%1").arg(target.getIndex());
DebugDraw::getInstance().addMyAvatarMarker(name, glmExtractRotation(avatarTargetMat), extractTranslation(avatarTargetMat), WHITE);
}
} else if (context.getEnableDebugDrawIKTargets() != _previousEnableDebugIKTargets) {
// remove markers if they were added last frame.
for (auto& target : targets) {
QString name = QString("ikTarget%1").arg(target.getIndex());
DebugDraw::getInstance().removeMyAvatarMarker(name);
}
}
_previousEnableDebugIKTargets = context.getEnableDebugDrawIKTargets();
if (targets.empty()) {
// no IK targets but still need to enforce constraints
std::map<int, RotationConstraint*>::iterator constraintItr = _constraints.begin();
while (constraintItr != _constraints.end()) {
int index = constraintItr->first;
glm::quat rotation = _relativePoses[index].rot();
constraintItr->second->apply(rotation);
_relativePoses[index].rot() = rotation;
++constraintItr;
}
_relativePoses = underPoses;
} else {
{
PROFILE_RANGE_EX(simulation_animation, "ik/shiftHips", 0xffff00ff, 0);
// shift hips according to the _hipsOffset from the previous frame
float offsetLength = glm::length(_hipsOffset);
const float MIN_HIPS_OFFSET_LENGTH = 0.03f;
if (offsetLength > MIN_HIPS_OFFSET_LENGTH && _hipsIndex >= 0) {
// but only if offset is long enough
float scaleFactor = ((offsetLength - MIN_HIPS_OFFSET_LENGTH) / offsetLength);
if (_hipsParentIndex == -1) {
// the hips are the root so _hipsOffset is in the correct frame
_relativePoses[_hipsIndex].trans() = underPoses[_hipsIndex].trans() + scaleFactor * _hipsOffset;
if (_hipsTargetIndex >= 0 && _hipsTargetIndex < (int)targets.size()) {
// slam the hips to match the _hipsTarget
AnimPose absPose = targets[_hipsTargetIndex].getPose();
int parentIndex = _skeleton->getParentIndex(targets[_hipsTargetIndex].getIndex());
if (parentIndex != -1) {
_relativePoses[_hipsIndex] = _skeleton->getAbsolutePose(parentIndex, _relativePoses).inverse() * absPose;
} else {
// the hips are NOT the root so we need to transform _hipsOffset into hips local-frame
glm::quat hipsFrameRotation = _relativePoses[_hipsParentIndex].rot();
int index = _skeleton->getParentIndex(_hipsParentIndex);
while (index != -1) {
hipsFrameRotation *= _relativePoses[index].rot();
index = _skeleton->getParentIndex(index);
_relativePoses[_hipsIndex] = absPose;
}
} else {
// if there is no hips target, shift hips according to the _hipsOffset from the previous frame
float offsetLength = glm::length(_hipsOffset);
const float MIN_HIPS_OFFSET_LENGTH = 0.03f;
if (offsetLength > MIN_HIPS_OFFSET_LENGTH && _hipsIndex >= 0) {
float scaleFactor = ((offsetLength - MIN_HIPS_OFFSET_LENGTH) / offsetLength);
glm::vec3 hipsOffset = scaleFactor * _hipsOffset;
if (_hipsParentIndex == -1) {
_relativePoses[_hipsIndex].trans() = underPoses[_hipsIndex].trans() + hipsOffset;
} else {
auto absHipsPose = _skeleton->getAbsolutePose(_hipsIndex, underPoses);
absHipsPose.trans() += hipsOffset;
_relativePoses[_hipsIndex] = _skeleton->getAbsolutePose(_hipsParentIndex, _relativePoses).inverse() * absHipsPose;
}
_relativePoses[_hipsIndex].trans() = underPoses[_hipsIndex].trans()
+ glm::inverse(glm::normalize(hipsFrameRotation)) * (scaleFactor * _hipsOffset);
}
}
// update all HipsRelative targets to account for the hips shift/ik target.
auto shiftedHipsAbsPose = _skeleton->getAbsolutePose(_hipsIndex, _relativePoses);
auto underHipsAbsPose = _skeleton->getAbsolutePose(_hipsIndex, underPoses);
auto absHipsOffset = shiftedHipsAbsPose.trans() - underHipsAbsPose.trans();
for (auto& target: targets) {
if (target.getType() == IKTarget::Type::HipsRelativeRotationAndPosition) {
auto pose = target.getPose();
pose.trans() = pose.trans() + absHipsOffset;
target.setPose(pose.rot(), pose.trans());
}
}
}
{
PROFILE_RANGE_EX(simulation_animation, "ik/debugDraw", 0xffff00ff, 0);
// debug render ik targets
if (context.getEnableDebugDrawIKTargets()) {
const vec4 WHITE(1.0f);
glm::mat4 rigToAvatarMat = createMatFromQuatAndPos(Quaternions::Y_180, glm::vec3());
for (auto& target : targets) {
glm::mat4 geomTargetMat = createMatFromQuatAndPos(target.getRotation(), target.getTranslation());
glm::mat4 avatarTargetMat = rigToAvatarMat * context.getGeometryToRigMatrix() * geomTargetMat;
QString name = QString("ikTarget%1").arg(target.getIndex());
DebugDraw::getInstance().addMyAvatarMarker(name, glmExtractRotation(avatarTargetMat), extractTranslation(avatarTargetMat), WHITE);
}
} else if (context.getEnableDebugDrawIKTargets() != _previousEnableDebugIKTargets) {
// remove markers if they were added last frame.
for (auto& target : targets) {
QString name = QString("ikTarget%1").arg(target.getIndex());
DebugDraw::getInstance().removeMyAvatarMarker(name);
}
}
_previousEnableDebugIKTargets = context.getEnableDebugDrawIKTargets();
}
{
@ -506,64 +528,70 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
solveWithCyclicCoordinateDescent(targets);
}
{
if (_hipsTargetIndex < 0) {
PROFILE_RANGE_EX(simulation_animation, "ik/measureHipsOffset", 0xffff00ff, 0);
// measure new _hipsOffset for next frame
// by looking for discrepancies between where a targeted endEffector is
// and where it wants to be (after IK solutions are done)
glm::vec3 newHipsOffset = Vectors::ZERO;
for (auto& target: targets) {
int targetIndex = target.getIndex();
if (targetIndex == _headIndex && _headIndex != -1) {
// special handling for headTarget
if (target.getType() == IKTarget::Type::RotationOnly) {
// we want to shift the hips to bring the underPose closer
// to where the head happens to be (overpose)
glm::vec3 under = _skeleton->getAbsolutePose(_headIndex, underPoses).trans();
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans();
const float HEAD_OFFSET_SLAVE_FACTOR = 0.65f;
newHipsOffset += HEAD_OFFSET_SLAVE_FACTOR * (actual - under);
} else if (target.getType() == IKTarget::Type::HmdHead) {
// we want to shift the hips to bring the head to its designated position
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans();
_hipsOffset += target.getTranslation() - actual;
// and ignore all other targets
newHipsOffset = _hipsOffset;
break;
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans();
glm::vec3 targetPosition = target.getTranslation();
newHipsOffset += targetPosition - actualPosition;
// Add downward pressure on the hips
newHipsOffset *= 0.95f;
newHipsOffset -= 1.0f;
}
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans();
glm::vec3 targetPosition = target.getTranslation();
newHipsOffset += targetPosition - actualPosition;
}
}
// smooth transitions by relaxing _hipsOffset toward the new value
const float HIPS_OFFSET_SLAVE_TIMESCALE = 0.10f;
float tau = dt < HIPS_OFFSET_SLAVE_TIMESCALE ? dt / HIPS_OFFSET_SLAVE_TIMESCALE : 1.0f;
_hipsOffset += (newHipsOffset - _hipsOffset) * tau;
// clamp the hips offset
float hipsOffsetLength = glm::length(_hipsOffset);
if (hipsOffsetLength > _maxHipsOffsetLength) {
_hipsOffset *= _maxHipsOffsetLength / hipsOffsetLength;
}
computeHipsOffset(targets, underPoses, dt);
} else {
_hipsOffset = Vectors::ZERO;
}
}
}
return _relativePoses;
}
void AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt) {
// measure new _hipsOffset for next frame
// by looking for discrepancies between where a targeted endEffector is
// and where it wants to be (after IK solutions are done)
glm::vec3 newHipsOffset = Vectors::ZERO;
for (auto& target: targets) {
int targetIndex = target.getIndex();
if (targetIndex == _headIndex && _headIndex != -1) {
// special handling for headTarget
if (target.getType() == IKTarget::Type::RotationOnly) {
// we want to shift the hips to bring the underPose closer
// to where the head happens to be (overpose)
glm::vec3 under = _skeleton->getAbsolutePose(_headIndex, underPoses).trans();
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans();
const float HEAD_OFFSET_SLAVE_FACTOR = 0.65f;
newHipsOffset += HEAD_OFFSET_SLAVE_FACTOR * (actual - under);
} else if (target.getType() == IKTarget::Type::HmdHead) {
// we want to shift the hips to bring the head to its designated position
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans();
_hipsOffset += target.getTranslation() - actual;
// and ignore all other targets
newHipsOffset = _hipsOffset;
break;
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans();
glm::vec3 targetPosition = target.getTranslation();
newHipsOffset += targetPosition - actualPosition;
// Add downward pressure on the hips
const float PRESSURE_SCALE_FACTOR = 0.95f;
const float PRESSURE_TRANSLATION_OFFSET = 1.0f;
newHipsOffset *= PRESSURE_SCALE_FACTOR;
newHipsOffset -= PRESSURE_TRANSLATION_OFFSET;
}
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans();
glm::vec3 targetPosition = target.getTranslation();
newHipsOffset += targetPosition - actualPosition;
}
}
// smooth transitions by relaxing _hipsOffset toward the new value
const float HIPS_OFFSET_SLAVE_TIMESCALE = 0.10f;
float tau = dt < HIPS_OFFSET_SLAVE_TIMESCALE ? dt / HIPS_OFFSET_SLAVE_TIMESCALE : 1.0f;
_hipsOffset += (newHipsOffset - _hipsOffset) * tau;
// clamp the hips offset
float hipsOffsetLength = glm::length(_hipsOffset);
if (hipsOffsetLength > _maxHipsOffsetLength) {
_hipsOffset *= _maxHipsOffsetLength / hipsOffsetLength;
}
}
void AnimInverseKinematics::setMaxHipsOffsetLength(float maxLength) {
// manually adjust scale here
const float METERS_TO_CENTIMETERS = 100.0f;
@ -594,6 +622,22 @@ void AnimInverseKinematics::clearConstraints() {
_constraints.clear();
}
// set up swing limits around a swingTwistConstraint in an ellipse, where lateralSwingTheta is the swing limit for lateral swings (side to side)
// anteriorSwingTheta is swing limit for forward and backward swings. (where x-axis of reference rotation is sideways and -z-axis is forward)
static void setEllipticalSwingLimits(SwingTwistConstraint* stConstraint, float lateralSwingTheta, float anteriorSwingTheta) {
assert(stConstraint);
const int NUM_SUBDIVISIONS = 8;
std::vector<float> minDots;
minDots.reserve(NUM_SUBDIVISIONS);
float dTheta = TWO_PI / NUM_SUBDIVISIONS;
float theta = 0.0f;
for (int i = 0; i < NUM_SUBDIVISIONS; i++) {
minDots.push_back(cosf(glm::length(glm::vec2(anteriorSwingTheta * cosf(theta), lateralSwingTheta * sinf(theta)))));
theta += dTheta;
}
stConstraint->setSwingLimits(minDots);
}
void AnimInverseKinematics::initConstraints() {
if (!_skeleton) {
return;
@ -783,41 +827,31 @@ void AnimInverseKinematics::initConstraints() {
} else if (baseName.startsWith("Spine", Qt::CaseSensitive)) {
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot());
const float MAX_SPINE_TWIST = PI / 12.0f;
const float MAX_SPINE_TWIST = PI / 20.0f;
stConstraint->setTwistLimits(-MAX_SPINE_TWIST, MAX_SPINE_TWIST);
std::vector<float> minDots;
const float MAX_SPINE_SWING = PI / 10.0f;
minDots.push_back(cosf(MAX_SPINE_SWING));
stConstraint->setSwingLimits(minDots);
// limit lateral swings more then forward-backward swings
const float MAX_SPINE_LATERAL_SWING = PI / 30.0f;
const float MAX_SPINE_ANTERIOR_SWING = PI / 20.0f;
setEllipticalSwingLimits(stConstraint, MAX_SPINE_LATERAL_SWING, MAX_SPINE_ANTERIOR_SWING);
if (0 == baseName.compare("Spine1", Qt::CaseSensitive)
|| 0 == baseName.compare("Spine", Qt::CaseSensitive)) {
stConstraint->setLowerSpine(true);
}
constraint = static_cast<RotationConstraint*>(stConstraint);
} else if (baseName.startsWith("Hips2", Qt::CaseSensitive)) {
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot());
const float MAX_SPINE_TWIST = PI / 8.0f;
stConstraint->setTwistLimits(-MAX_SPINE_TWIST, MAX_SPINE_TWIST);
std::vector<float> minDots;
const float MAX_SPINE_SWING = PI / 14.0f;
minDots.push_back(cosf(MAX_SPINE_SWING));
stConstraint->setSwingLimits(minDots);
constraint = static_cast<RotationConstraint*>(stConstraint);
} else if (0 == baseName.compare("Neck", Qt::CaseSensitive)) {
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot());
const float MAX_NECK_TWIST = PI / 9.0f;
const float MAX_NECK_TWIST = PI / 10.0f;
stConstraint->setTwistLimits(-MAX_NECK_TWIST, MAX_NECK_TWIST);
std::vector<float> minDots;
const float MAX_NECK_SWING = PI / 8.0f;
minDots.push_back(cosf(MAX_NECK_SWING));
stConstraint->setSwingLimits(minDots);
// limit lateral swings more then forward-backward swings
const float MAX_NECK_LATERAL_SWING = PI / 10.0f;
const float MAX_NECK_ANTERIOR_SWING = PI / 8.0f;
setEllipticalSwingLimits(stConstraint, MAX_NECK_LATERAL_SWING, MAX_NECK_ANTERIOR_SWING);
constraint = static_cast<RotationConstraint*>(stConstraint);
} else if (0 == baseName.compare("Head", Qt::CaseSensitive)) {
@ -872,7 +906,7 @@ void AnimInverseKinematics::initConstraints() {
// we determine the max/min angles by rotating the swing limit lines from parent- to child-frame
// then measure the angles to swing the yAxis into alignment
const float MIN_KNEE_ANGLE = 0.0f;
const float MIN_KNEE_ANGLE = 0.097f; // ~5 deg
const float MAX_KNEE_ANGLE = 7.0f * PI / 8.0f;
glm::quat invReferenceRotation = glm::inverse(referenceRotation);
glm::vec3 minSwingAxis = invReferenceRotation * glm::angleAxis(MIN_KNEE_ANGLE, hingeAxis) * Vectors::UNIT_Y;

View file

@ -55,6 +55,7 @@ protected:
RotationConstraint* getConstraint(int index);
void clearConstraints();
void initConstraints();
void computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt);
// no copies
AnimInverseKinematics(const AnimInverseKinematics&) = delete;
@ -91,6 +92,7 @@ protected:
int _headIndex { -1 };
int _hipsIndex { -1 };
int _hipsParentIndex { -1 };
int _hipsTargetIndex { -1 };
// _maxTargetIndex is tracked to help optimize the recalculation of absolute poses
// during the the cyclic coordinate descent algorithm

View file

@ -12,6 +12,16 @@
#include "AnimUtil.h"
#include "AnimationLogging.h"
AnimManipulator::JointVar::JointVar(const QString& jointNameIn, Type rotationTypeIn, Type translationTypeIn,
const QString& rotationVarIn, const QString& translationVarIn) :
jointName(jointNameIn),
rotationType(rotationTypeIn),
translationType(translationTypeIn),
rotationVar(rotationVarIn),
translationVar(translationVarIn),
jointIndex(-1),
hasPerformedJointLookup(false) {}
AnimManipulator::AnimManipulator(const QString& id, float alpha) :
AnimNode(AnimNode::Type::Manipulator, id),
_alpha(alpha) {
@ -36,7 +46,10 @@ const AnimPoseVec& AnimManipulator::overlay(const AnimVariantMap& animVars, cons
}
for (auto& jointVar : _jointVars) {
if (!jointVar.hasPerformedJointLookup) {
// map from joint name to joint index and cache the result.
jointVar.jointIndex = _skeleton->nameToJointIndex(jointVar.jointName);
if (jointVar.jointIndex < 0) {
qCWarning(animation) << "AnimManipulator could not find jointName" << jointVar.jointName << "in skeleton";
@ -100,34 +113,62 @@ AnimPose AnimManipulator::computeRelativePoseFromJointVar(const AnimVariantMap&
AnimPose defaultAbsPose = _skeleton->getAbsolutePose(jointVar.jointIndex, underPoses);
if (jointVar.type == JointVar::Type::AbsoluteRotation || jointVar.type == JointVar::Type::AbsolutePosition) {
// compute relative translation
glm::vec3 relTrans;
switch (jointVar.translationType) {
case JointVar::Type::Absolute: {
glm::vec3 absTrans = animVars.lookupRigToGeometry(jointVar.translationVar, defaultAbsPose.trans());
if (jointVar.type == JointVar::Type::AbsoluteRotation) {
defaultAbsPose.rot() = animVars.lookupRigToGeometry(jointVar.var, defaultAbsPose.rot());
} else if (jointVar.type == JointVar::Type::AbsolutePosition) {
defaultAbsPose.trans() = animVars.lookupRigToGeometry(jointVar.var, defaultAbsPose.trans());
// convert to from absolute to relative.
AnimPose parentAbsPose;
int parentIndex = _skeleton->getParentIndex(jointVar.jointIndex);
if (parentIndex >= 0) {
parentAbsPose = _skeleton->getAbsolutePose(parentIndex, underPoses);
}
// convert from absolute to relative
relTrans = transformPoint(parentAbsPose.inverse(), absTrans);
break;
}
// because jointVar is absolute, we must use an absolute parent frame to convert into a relative pose.
AnimPose parentAbsPose = AnimPose::identity;
int parentIndex = _skeleton->getParentIndex(jointVar.jointIndex);
if (parentIndex >= 0) {
parentAbsPose = _skeleton->getAbsolutePose(parentIndex, underPoses);
}
// convert from absolute to relative
return parentAbsPose.inverse() * defaultAbsPose;
} else {
// override the default rel pose
AnimPose relPose = defaultRelPose;
if (jointVar.type == JointVar::Type::RelativeRotation) {
relPose.rot() = animVars.lookupRigToGeometry(jointVar.var, defaultRelPose.rot());
} else if (jointVar.type == JointVar::Type::RelativePosition) {
relPose.trans() = animVars.lookupRigToGeometry(jointVar.var, defaultRelPose.trans());
}
return relPose;
case JointVar::Type::Relative:
relTrans = animVars.lookupRigToGeometryVector(jointVar.translationVar, defaultRelPose.trans());
break;
case JointVar::Type::UnderPose:
relTrans = underPoses[jointVar.jointIndex].trans();
break;
case JointVar::Type::Default:
default:
relTrans = defaultRelPose.trans();
break;
}
glm::quat relRot;
switch (jointVar.rotationType) {
case JointVar::Type::Absolute: {
glm::quat absRot = animVars.lookupRigToGeometry(jointVar.translationVar, defaultAbsPose.rot());
// convert to from absolute to relative.
AnimPose parentAbsPose;
int parentIndex = _skeleton->getParentIndex(jointVar.jointIndex);
if (parentIndex >= 0) {
parentAbsPose = _skeleton->getAbsolutePose(parentIndex, underPoses);
}
// convert from absolute to relative
relRot = glm::inverse(parentAbsPose.rot()) * absRot;
break;
}
case JointVar::Type::Relative:
relRot = animVars.lookupRigToGeometry(jointVar.translationVar, defaultRelPose.rot());
break;
case JointVar::Type::UnderPose:
relRot = underPoses[jointVar.jointIndex].rot();
break;
case JointVar::Type::Default:
default:
relRot = defaultRelPose.rot();
break;
}
return AnimPose(glm::vec3(1), relRot, relTrans);
}

View file

@ -31,17 +31,20 @@ public:
struct JointVar {
enum class Type {
AbsoluteRotation = 0,
AbsolutePosition,
RelativeRotation,
RelativePosition,
Absolute,
Relative,
UnderPose,
Default,
NumTypes
};
JointVar(const QString& varIn, const QString& jointNameIn, Type typeIn) : var(varIn), jointName(jointNameIn), type(typeIn), jointIndex(-1), hasPerformedJointLookup(false) {}
QString var = "";
JointVar(const QString& jointNameIn, Type rotationType, Type translationType, const QString& rotationVarIn, const QString& translationVarIn);
QString jointName = "";
Type type = Type::AbsoluteRotation;
Type rotationType = Type::Absolute;
Type translationType = Type::Absolute;
QString rotationVar = "";
QString translationVar = "";
int jointIndex = -1;
bool hasPerformedJointLookup = false;
bool isRelative = false;

View file

@ -79,10 +79,10 @@ static AnimStateMachine::InterpType stringToInterpType(const QString& str) {
static const char* animManipulatorJointVarTypeToString(AnimManipulator::JointVar::Type type) {
switch (type) {
case AnimManipulator::JointVar::Type::AbsoluteRotation: return "absoluteRotation";
case AnimManipulator::JointVar::Type::AbsolutePosition: return "absolutePosition";
case AnimManipulator::JointVar::Type::RelativeRotation: return "relativeRotation";
case AnimManipulator::JointVar::Type::RelativePosition: return "relativePosition";
case AnimManipulator::JointVar::Type::Absolute: return "absolute";
case AnimManipulator::JointVar::Type::Relative: return "relative";
case AnimManipulator::JointVar::Type::UnderPose: return "underPose";
case AnimManipulator::JointVar::Type::Default: return "default";
case AnimManipulator::JointVar::Type::NumTypes: return nullptr;
};
return nullptr;
@ -339,7 +339,8 @@ static const char* boneSetStrings[AnimOverlay::NumBoneSets] = {
"spineOnly",
"empty",
"leftHand",
"rightHand"
"rightHand",
"hipsOnly"
};
static AnimOverlay::BoneSet stringToBoneSetEnum(const QString& str) {
@ -406,17 +407,25 @@ static AnimNode::Pointer loadManipulatorNode(const QJsonObject& jsonObj, const Q
}
auto jointObj = jointValue.toObject();
READ_STRING(type, jointObj, id, jsonUrl, nullptr);
READ_STRING(jointName, jointObj, id, jsonUrl, nullptr);
READ_STRING(var, jointObj, id, jsonUrl, nullptr);
READ_STRING(rotationType, jointObj, id, jsonUrl, nullptr);
READ_STRING(translationType, jointObj, id, jsonUrl, nullptr);
READ_STRING(rotationVar, jointObj, id, jsonUrl, nullptr);
READ_STRING(translationVar, jointObj, id, jsonUrl, nullptr);
AnimManipulator::JointVar::Type jointVarType = stringToAnimManipulatorJointVarType(type);
if (jointVarType == AnimManipulator::JointVar::Type::NumTypes) {
qCCritical(animation) << "AnimNodeLoader, bad type in \"joints\", id =" << id << ", url =" << jsonUrl.toDisplayString();
return nullptr;
AnimManipulator::JointVar::Type jointVarRotationType = stringToAnimManipulatorJointVarType(rotationType);
if (jointVarRotationType == AnimManipulator::JointVar::Type::NumTypes) {
qCWarning(animation) << "AnimNodeLoader, bad rotationType in \"joints\", id =" << id << ", url =" << jsonUrl.toDisplayString();
jointVarRotationType = AnimManipulator::JointVar::Type::Default;
}
AnimManipulator::JointVar jointVar(var, jointName, jointVarType);
AnimManipulator::JointVar::Type jointVarTranslationType = stringToAnimManipulatorJointVarType(translationType);
if (jointVarTranslationType == AnimManipulator::JointVar::Type::NumTypes) {
qCWarning(animation) << "AnimNodeLoader, bad translationType in \"joints\", id =" << id << ", url =" << jsonUrl.toDisplayString();
jointVarTranslationType = AnimManipulator::JointVar::Type::Default;
}
AnimManipulator::JointVar jointVar(jointName, jointVarRotationType, jointVarTranslationType, rotationVar, translationVar);
node->addJointVar(jointVar);
};

View file

@ -34,6 +34,7 @@ void AnimOverlay::buildBoneSet(BoneSet boneSet) {
case SpineOnlyBoneSet: buildSpineOnlyBoneSet(); break;
case LeftHandBoneSet: buildLeftHandBoneSet(); break;
case RightHandBoneSet: buildRightHandBoneSet(); break;
case HipsOnlyBoneSet: buildHipsOnlyBoneSet(); break;
default:
case EmptyBoneSet: buildEmptyBoneSet(); break;
}
@ -188,6 +189,13 @@ void AnimOverlay::buildRightHandBoneSet() {
});
}
void AnimOverlay::buildHipsOnlyBoneSet() {
assert(_skeleton);
buildEmptyBoneSet();
int hipsJoint = _skeleton->nameToJointIndex("Hips");
_boneSetVec[hipsJoint] = 1.0f;
}
// for AnimDebugDraw rendering
const AnimPoseVec& AnimOverlay::getPosesInternal() const {
return _poses;

View file

@ -37,6 +37,7 @@ public:
EmptyBoneSet,
LeftHandBoneSet,
RightHandBoneSet,
HipsOnlyBoneSet,
NumBoneSets
};
@ -75,6 +76,7 @@ public:
void buildEmptyBoneSet();
void buildLeftHandBoneSet();
void buildRightHandBoneSet();
void buildHipsOnlyBoneSet();
// no copies
AnimOverlay(const AnimOverlay&) = delete;

View file

@ -165,6 +165,15 @@ public:
}
}
glm::vec3 lookupRigToGeometryVector(const QString& key, const glm::vec3& defaultValue) const {
if (key.isEmpty()) {
return defaultValue;
} else {
auto iter = _map.find(key);
return iter != _map.end() ? transformVectorFast(_rigToGeometryMat, iter->second.getVec3()) : defaultValue;
}
}
const glm::quat& lookupRaw(const QString& key, const glm::quat& defaultValue) const {
if (key.isEmpty()) {
return defaultValue;

View file

@ -21,13 +21,14 @@ public:
RotationOnly,
HmdHead,
HipsRelativeRotationAndPosition,
Unknown,
Unknown
};
IKTarget() {}
const glm::vec3& getTranslation() const { return _pose.trans(); }
const glm::quat& getRotation() const { return _pose.rot(); }
const AnimPose& getPose() const { return _pose; }
int getIndex() const { return _index; }
Type getType() const { return _type; }

View file

@ -46,7 +46,6 @@ static bool isEqual(const glm::quat& p, const glm::quat& q) {
const glm::vec3 DEFAULT_RIGHT_EYE_POS(-0.3f, 0.9f, 0.0f);
const glm::vec3 DEFAULT_LEFT_EYE_POS(0.3f, 0.9f, 0.0f);
const glm::vec3 DEFAULT_HEAD_POS(0.0f, 0.75f, 0.0f);
const glm::vec3 DEFAULT_NECK_POS(0.0f, 0.70f, 0.0f);
void Rig::overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame) {
@ -1020,98 +1019,81 @@ glm::quat Rig::getJointDefaultRotationInParentFrame(int jointIndex) {
}
void Rig::updateFromHeadParameters(const HeadParameters& params, float dt) {
updateNeckJoint(params.neckJointIndex, params);
updateHeadAnimVars(params);
_animVars.set("isTalking", params.isTalking);
_animVars.set("notIsTalking", !params.isTalking);
if (params.hipsEnabled) {
_animVars.set("hipsType", (int)IKTarget::Type::RotationAndPosition);
_animVars.set("hipsPosition", extractTranslation(params.hipsMatrix));
_animVars.set("hipsRotation", glmExtractRotation(params.hipsMatrix));
} else {
_animVars.set("hipsType", (int)IKTarget::Type::Unknown);
}
if (params.spine2Enabled) {
_animVars.set("spine2Type", (int)IKTarget::Type::RotationAndPosition);
_animVars.set("spine2Position", extractTranslation(params.spine2Matrix));
_animVars.set("spine2Rotation", glmExtractRotation(params.spine2Matrix));
} else {
_animVars.set("spine2Type", (int)IKTarget::Type::Unknown);
}
}
void Rig::updateFromEyeParameters(const EyeParameters& params) {
updateEyeJoint(params.leftEyeJointIndex, params.modelTranslation, params.modelRotation,
params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoint(params.rightEyeJointIndex, params.modelTranslation, params.modelRotation,
params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoint(params.leftEyeJointIndex, params.modelTranslation, params.modelRotation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoint(params.rightEyeJointIndex, params.modelTranslation, params.modelRotation, params.eyeLookAt, params.eyeSaccade);
}
void Rig::computeHeadNeckAnimVars(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut,
glm::vec3& neckPositionOut, glm::quat& neckOrientationOut) const {
void Rig::computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const {
// the input hmd values are in avatar/rig space
const glm::vec3& hmdPosition = hmdPose.trans();
const glm::quat& hmdOrientation = hmdPose.rot();
// the HMD looks down the negative z axis, but the head bone looks down the z axis, so apply a 180 degree rotation.
const glm::quat& hmdOrientation = hmdPose.rot() * Quaternions::Y_180;
// TODO: cache jointIndices
int rightEyeIndex = indexOfJoint("RightEye");
int leftEyeIndex = indexOfJoint("LeftEye");
int headIndex = indexOfJoint("Head");
int neckIndex = indexOfJoint("Neck");
glm::vec3 absRightEyePos = rightEyeIndex != -1 ? getAbsoluteDefaultPose(rightEyeIndex).trans() : DEFAULT_RIGHT_EYE_POS;
glm::vec3 absLeftEyePos = leftEyeIndex != -1 ? getAbsoluteDefaultPose(leftEyeIndex).trans() : DEFAULT_LEFT_EYE_POS;
glm::vec3 absHeadPos = headIndex != -1 ? getAbsoluteDefaultPose(headIndex).trans() : DEFAULT_HEAD_POS;
glm::vec3 absNeckPos = neckIndex != -1 ? getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_NECK_POS;
glm::vec3 absCenterEyePos = (absRightEyePos + absLeftEyePos) / 2.0f;
glm::vec3 eyeOffset = absCenterEyePos - absHeadPos;
glm::vec3 headOffset = absHeadPos - absNeckPos;
// apply simplistic head/neck model
// head
headPositionOut = hmdPosition - hmdOrientation * eyeOffset;
headOrientationOut = hmdOrientation;
// neck
neckPositionOut = hmdPosition - hmdOrientation * (headOffset + eyeOffset);
// slerp between default orientation and hmdOrientation
neckOrientationOut = safeMix(hmdOrientation, _animSkeleton->getRelativeDefaultPose(neckIndex).rot(), 0.5f);
}
void Rig::updateNeckJoint(int index, const HeadParameters& params) {
if (_animSkeleton && index >= 0 && index < _animSkeleton->getNumJoints()) {
glm::quat yFlip180 = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
if (params.isInHMD) {
glm::vec3 headPos, neckPos;
glm::quat headRot, neckRot;
AnimPose hmdPose(glm::vec3(1.0f), params.rigHeadOrientation * yFlip180, params.rigHeadPosition);
computeHeadNeckAnimVars(hmdPose, headPos, headRot, neckPos, neckRot);
// debug rendering
#ifdef DEBUG_RENDERING
const glm::vec4 red(1.0f, 0.0f, 0.0f, 1.0f);
const glm::vec4 green(0.0f, 1.0f, 0.0f, 1.0f);
// transform from bone into avatar space
AnimPose headPose(glm::vec3(1), headRot, headPos);
DebugDraw::getInstance().addMyAvatarMarker("headTarget", headPose.rot, headPose.trans, red);
// transform from bone into avatar space
AnimPose neckPose(glm::vec3(1), neckRot, neckPos);
DebugDraw::getInstance().addMyAvatarMarker("neckTarget", neckPose.rot, neckPose.trans, green);
#endif
_animVars.set("headPosition", headPos);
_animVars.set("headRotation", headRot);
_animVars.set("headType", (int)IKTarget::Type::HmdHead);
_animVars.set("neckPosition", neckPos);
_animVars.set("neckRotation", neckRot);
_animVars.set("neckType", (int)IKTarget::Type::Unknown); // 'Unknown' disables the target
void Rig::updateHeadAnimVars(const HeadParameters& params) {
if (_animSkeleton) {
if (params.headEnabled) {
_animVars.set("headPosition", params.rigHeadPosition);
_animVars.set("headRotation", params.rigHeadOrientation);
if (params.hipsEnabled) {
// Since there is an explicit hips ik target, switch the head to use the more generic RotationAndPosition IK chain type.
// this will allow the spine to bend more, ensuring that it can reach the head target position.
_animVars.set("headType", (int)IKTarget::Type::RotationAndPosition);
} else {
// When there is no hips IK target, use the HmdHead IK chain type. This will make the spine very stiff,
// but because the IK _hipsOffset is enabled, the hips will naturally follow underneath the head.
_animVars.set("headType", (int)IKTarget::Type::HmdHead);
}
} else {
_animVars.unset("headPosition");
_animVars.set("headRotation", params.rigHeadOrientation * yFlip180);
_animVars.set("headAndNeckType", (int)IKTarget::Type::RotationOnly);
_animVars.set("headRotation", params.rigHeadOrientation);
_animVars.set("headType", (int)IKTarget::Type::RotationOnly);
_animVars.unset("neckPosition");
_animVars.unset("neckRotation");
_animVars.set("neckType", (int)IKTarget::Type::RotationOnly);
}
}
}
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
// TODO: does not properly handle avatar scale.
@ -1161,13 +1143,19 @@ void Rig::updateFromHandAndFeetParameters(const HandAndFeetParameters& params, f
const glm::vec3 bodyCapsuleStart = bodyCapsuleCenter - glm::vec3(0, params.bodyCapsuleHalfHeight, 0);
const glm::vec3 bodyCapsuleEnd = bodyCapsuleCenter + glm::vec3(0, params.bodyCapsuleHalfHeight, 0);
// TODO: add isHipsEnabled
bool bodySensorTrackingEnabled = params.isLeftFootEnabled || params.isRightFootEnabled;
if (params.isLeftEnabled) {
// prevent the hand IK targets from intersecting the body capsule
glm::vec3 handPosition = params.leftPosition;
glm::vec3 displacement;
if (findSphereCapsulePenetration(handPosition, HAND_RADIUS, bodyCapsuleStart, bodyCapsuleEnd, bodyCapsuleRadius, displacement)) {
handPosition -= displacement;
if (!bodySensorTrackingEnabled) {
// prevent the hand IK targets from intersecting the body capsule
glm::vec3 displacement;
if (findSphereCapsulePenetration(handPosition, HAND_RADIUS, bodyCapsuleStart, bodyCapsuleEnd, bodyCapsuleRadius, displacement)) {
handPosition -= displacement;
}
}
_animVars.set("leftHandPosition", handPosition);
@ -1181,11 +1169,14 @@ void Rig::updateFromHandAndFeetParameters(const HandAndFeetParameters& params, f
if (params.isRightEnabled) {
// prevent the hand IK targets from intersecting the body capsule
glm::vec3 handPosition = params.rightPosition;
glm::vec3 displacement;
if (findSphereCapsulePenetration(handPosition, HAND_RADIUS, bodyCapsuleStart, bodyCapsuleEnd, bodyCapsuleRadius, displacement)) {
handPosition -= displacement;
if (!bodySensorTrackingEnabled) {
// prevent the hand IK targets from intersecting the body capsule
glm::vec3 displacement;
if (findSphereCapsulePenetration(handPosition, HAND_RADIUS, bodyCapsuleStart, bodyCapsuleEnd, bodyCapsuleRadius, displacement)) {
handPosition -= displacement;
}
}
_animVars.set("rightHandPosition", handPosition);

View file

@ -42,16 +42,17 @@ public:
};
struct HeadParameters {
glm::quat worldHeadOrientation = glm::quat(); // world space (-z forward)
glm::quat rigHeadOrientation = glm::quat(); // rig space (-z forward)
glm::vec3 rigHeadPosition = glm::vec3(); // rig space
bool isInHMD = false;
int neckJointIndex = -1;
glm::mat4 hipsMatrix = glm::mat4(); // rig space
glm::mat4 spine2Matrix = glm::mat4(); // rig space
glm::quat rigHeadOrientation = glm::quat(); // rig space (-z forward)
glm::vec3 rigHeadPosition = glm::vec3(); // rig space
bool hipsEnabled = false;
bool headEnabled = false;
bool spine2Enabled = false;
bool isTalking = false;
};
struct EyeParameters {
glm::quat worldHeadOrientation = glm::quat();
glm::vec3 eyeLookAt = glm::vec3(); // world space
glm::vec3 eyeSaccade = glm::vec3(); // world space
glm::vec3 modelTranslation = glm::vec3();
@ -228,6 +229,9 @@ public:
void setEnableDebugDrawIKTargets(bool enableDebugDrawIKTargets) { _enableDebugDrawIKTargets = enableDebugDrawIKTargets; }
// input assumed to be in rig space
void computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const;
signals:
void onLoadComplete();
@ -237,10 +241,9 @@ protected:
void applyOverridePoses();
void buildAbsoluteRigPoses(const AnimPoseVec& relativePoses, AnimPoseVec& absolutePosesOut);
void updateNeckJoint(int index, const HeadParameters& params);
void computeHeadNeckAnimVars(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut,
glm::vec3& neckPositionOut, glm::quat& neckOrientationOut) const;
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
void updateHeadAnimVars(const HeadParameters& params);
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::vec3& lookAt, const glm::vec3& saccade);
void calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds, float* alphaOut) const;
AnimPose _modelOffset; // model to rig space

View file

@ -393,9 +393,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
if (isFingerPointing) {
setAtBit(flags, HAND_STATE_FINGER_POINTING_BIT);
}
// faceshift state
// face tracker state
if (_headData->_isFaceTrackerConnected) {
setAtBit(flags, IS_FACESHIFT_CONNECTED);
setAtBit(flags, IS_FACE_TRACKER_CONNECTED);
}
// eye tracker state
if (_headData->_isEyeTrackerConnected) {
@ -883,7 +883,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
+ (oneAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
auto newFaceTrackerConnected = oneAtBit(bitItems, IS_FACESHIFT_CONNECTED);
auto newFaceTrackerConnected = oneAtBit(bitItems, IS_FACE_TRACKER_CONNECTED);
auto newEyeTrackerConnected = oneAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
bool keyStateChanged = (_keyState != newKeyState);

View file

@ -99,7 +99,7 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
// Referential Data - R is found in the 7th bit
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
const int IS_FACESHIFT_CONNECTED = 4; // 5th bit
const int IS_FACE_TRACKER_CONNECTED = 4; // 5th bit
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
const int HAS_REFERENTIAL = 6; // 7th bit
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit
@ -218,7 +218,7 @@ namespace AvatarDataPacket {
} PACKED_END;
const size_t AVATAR_LOCAL_POSITION_SIZE = 12;
// only present if IS_FACESHIFT_CONNECTED flag is set in AvatarInfo.flags
// only present if IS_FACE_TRACKER_CONNECTED flag is set in AvatarInfo.flags
PACKED_BEGIN struct FaceTrackerInfo {
float leftEyeBlink;
float rightEyeBlink;

View file

@ -53,6 +53,9 @@ namespace controller {
makePosePair(Action::RIGHT_HAND, "RightHand"),
makePosePair(Action::LEFT_FOOT, "LeftFoot"),
makePosePair(Action::RIGHT_FOOT, "RightFoot"),
makePosePair(Action::HIPS, "Hips"),
makePosePair(Action::SPINE2, "Spine2"),
makePosePair(Action::HEAD, "Head"),
makeButtonPair(Action::LEFT_HAND_CLICK, "LeftHandClick"),
makeButtonPair(Action::RIGHT_HAND_CLICK, "RightHandClick"),

View file

@ -44,6 +44,9 @@ enum class Action {
RIGHT_HAND,
LEFT_FOOT,
RIGHT_FOOT,
HIPS,
SPINE2,
HEAD,
LEFT_HAND_CLICK,
RIGHT_HAND_CLICK,

View file

@ -16,9 +16,15 @@
namespace controller {
struct InputCalibrationData {
glm::mat4 sensorToWorldMat;
glm::mat4 avatarMat;
glm::mat4 hmdSensorMat;
glm::mat4 sensorToWorldMat; // sensor to world
glm::mat4 avatarMat; // avatar to world
glm::mat4 hmdSensorMat; // hmd pos and orientation in sensor space
glm::mat4 defaultCenterEyeMat; // default pose for the center of the eyes in avatar space.
glm::mat4 defaultHeadMat; // default pose for head joint in avatar space
glm::mat4 defaultSpine2; // default pose for spine2 joint in avatar space
glm::mat4 defaultHips; // default pose for hips joint in avatar space
glm::mat4 defaultLeftFoot; // default pose for leftFoot joint in avatar space
glm::mat4 defaultRightFoot; // default pose for leftFoot joint in avatar space
};
enum class ChannelType {

View file

@ -69,5 +69,23 @@ namespace controller {
pose.valid = valid;
return pose;
}
Pose Pose::postTransform(const glm::mat4& mat) const {
glm::mat4 original = ::createMatFromQuatAndPos(rotation, translation);
glm::mat4 result = original * mat;
auto translationOut = ::extractTranslation(result);
auto rotationOut = ::glmExtractRotation(result);
auto velocityOut = velocity + glm::cross(angularVelocity, translationOut - translation); // warning: this may be completely wrong
auto angularVelocityOut = angularVelocity;
Pose pose(translationOut,
rotationOut,
velocityOut,
angularVelocityOut);
pose.valid = valid;
return pose;
}
}

View file

@ -41,6 +41,7 @@ namespace controller {
vec3 getAngularVelocity() const { return angularVelocity; }
Pose transform(const glm::mat4& mat) const;
Pose postTransform(const glm::mat4& mat) const;
static QScriptValue toScriptValue(QScriptEngine* engine, const Pose& event);
static void fromScriptValue(const QScriptValue& object, Pose& event);

View file

@ -104,6 +104,9 @@ Input::NamedVector StandardController::getAvailableInputs() const {
makePair(RIGHT_HAND, "RightHand"),
makePair(LEFT_FOOT, "LeftFoot"),
makePair(RIGHT_FOOT, "RightFoot"),
makePair(HIPS, "Hips"),
makePair(SPINE2, "Spine2"),
makePair(HEAD, "Head"),
// Aliases, PlayStation style names
makePair(LB, "L1"),

View file

@ -20,6 +20,8 @@
#include <PathUtils.h>
#include <NumericalConstants.h>
#include <StreamUtils.h>
#include "StandardController.h"
#include "StateController.h"
#include "InputRecorder.h"
@ -563,7 +565,18 @@ bool UserInputMapper::applyRoute(const Route::Pointer& route, bool force) {
if (source->isPose()) {
Pose value = getPose(source, route->peek);
static const Pose IDENTITY_POSE { vec3(), quat() };
if (debugRoutes && route->debug) {
qCDebug(controllers) << "Value was t:" << value.translation << "r:" << value.rotation;
}
// Apply each of the filters.
for (const auto& filter : route->filters) {
value = filter->apply(value);
}
if (debugRoutes && route->debug) {
qCDebug(controllers) << "Filtered value was t:" << value.translation << "r:" << value.rotation;
if (!value.valid) {
qCDebug(controllers) << "Applying invalid pose";
} else if (value == IDENTITY_POSE) {

View file

@ -24,6 +24,10 @@
#include "filters/InvertFilter.h"
#include "filters/PulseFilter.h"
#include "filters/ScaleFilter.h"
#include "filters/TranslateFilter.h"
#include "filters/TransformFilter.h"
#include "filters/PostTransformFilter.h"
#include "filters/RotateFilter.h"
using namespace controller;
@ -37,6 +41,10 @@ REGISTER_FILTER_CLASS_INSTANCE(HysteresisFilter, "hysteresis")
REGISTER_FILTER_CLASS_INSTANCE(InvertFilter, "invert")
REGISTER_FILTER_CLASS_INSTANCE(ScaleFilter, "scale")
REGISTER_FILTER_CLASS_INSTANCE(PulseFilter, "pulse")
REGISTER_FILTER_CLASS_INSTANCE(TranslateFilter, "translate")
REGISTER_FILTER_CLASS_INSTANCE(TransformFilter, "transform")
REGISTER_FILTER_CLASS_INSTANCE(PostTransformFilter, "postTransform")
REGISTER_FILTER_CLASS_INSTANCE(RotateFilter, "rotate")
const QString JSON_FILTER_TYPE = QStringLiteral("type");
const QString JSON_FILTER_PARAMS = QStringLiteral("params");
@ -76,7 +84,6 @@ bool Filter::parseSingleFloatParameter(const QJsonValue& parameters, const QStri
return true;
}
} else if (parameters.isObject()) {
static const QString JSON_MIN = QStringLiteral("interval");
auto objectParameters = parameters.toObject();
if (objectParameters.contains(name)) {
output = objectParameters[name].toDouble();
@ -86,6 +93,92 @@ bool Filter::parseSingleFloatParameter(const QJsonValue& parameters, const QStri
return false;
}
bool Filter::parseVec3Parameter(const QJsonValue& parameters, glm::vec3& output) {
if (parameters.isDouble()) {
output = glm::vec3(parameters.toDouble());
return true;
} else if (parameters.isArray()) {
auto arrayParameters = parameters.toArray();
if (arrayParameters.size() == 3) {
output = glm::vec3(arrayParameters[0].toDouble(),
arrayParameters[1].toDouble(),
arrayParameters[2].toDouble());
return true;
}
} else if (parameters.isObject()) {
auto objectParameters = parameters.toObject();
if (objectParameters.contains("x") && objectParameters.contains("y") && objectParameters.contains("z")) {
output = glm::vec3(objectParameters["x"].toDouble(),
objectParameters["y"].toDouble(),
objectParameters["z"].toDouble());
return true;
}
}
return false;
}
bool Filter::parseMat4Parameter(const QJsonValue& parameters, glm::mat4& output) {
if (parameters.isObject()) {
auto objectParameters = parameters.toObject();
if (objectParameters.contains("r0c0") &&
objectParameters.contains("r1c0") &&
objectParameters.contains("r2c0") &&
objectParameters.contains("r3c0") &&
objectParameters.contains("r0c1") &&
objectParameters.contains("r1c1") &&
objectParameters.contains("r2c1") &&
objectParameters.contains("r3c1") &&
objectParameters.contains("r0c2") &&
objectParameters.contains("r1c2") &&
objectParameters.contains("r2c2") &&
objectParameters.contains("r3c2") &&
objectParameters.contains("r0c3") &&
objectParameters.contains("r1c3") &&
objectParameters.contains("r2c3") &&
objectParameters.contains("r3c3")) {
output[0][0] = objectParameters["r0c0"].toDouble();
output[0][1] = objectParameters["r1c0"].toDouble();
output[0][2] = objectParameters["r2c0"].toDouble();
output[0][3] = objectParameters["r3c0"].toDouble();
output[1][0] = objectParameters["r0c1"].toDouble();
output[1][1] = objectParameters["r1c1"].toDouble();
output[1][2] = objectParameters["r2c1"].toDouble();
output[1][3] = objectParameters["r3c1"].toDouble();
output[2][0] = objectParameters["r0c2"].toDouble();
output[2][1] = objectParameters["r1c2"].toDouble();
output[2][2] = objectParameters["r2c2"].toDouble();
output[2][3] = objectParameters["r3c2"].toDouble();
output[3][0] = objectParameters["r0c3"].toDouble();
output[3][1] = objectParameters["r1c3"].toDouble();
output[3][2] = objectParameters["r2c3"].toDouble();
output[3][3] = objectParameters["r3c3"].toDouble();
return true;
}
}
return false;
}
bool Filter::parseQuatParameter(const QJsonValue& parameters, glm::quat& output) {
if (parameters.isObject()) {
auto objectParameters = parameters.toObject();
if (objectParameters.contains("w") &&
objectParameters.contains("x") &&
objectParameters.contains("y") &&
objectParameters.contains("z")) {
output = glm::quat(objectParameters["w"].toDouble(),
objectParameters["x"].toDouble(),
objectParameters["y"].toDouble(),
objectParameters["z"].toDouble());
return true;
}
}
return false;
}
#if 0

View file

@ -21,6 +21,8 @@
#include <QtCore/QEasingCurve>
#include "../Pose.h"
class QJsonValue;
namespace controller {
@ -34,6 +36,8 @@ namespace controller {
using Factory = hifi::SimpleFactory<Filter, QString>;
virtual float apply(float value) const = 0;
virtual Pose apply(Pose value) const = 0;
// Factory features
virtual bool parseParameters(const QJsonValue& parameters) { return true; }
@ -42,6 +46,9 @@ namespace controller {
static Factory& getFactory() { return _factory; }
static bool parseSingleFloatParameter(const QJsonValue& parameters, const QString& name, float& output);
static bool parseVec3Parameter(const QJsonValue& parameters, glm::vec3& output);
static bool parseQuatParameter(const QJsonValue& parameters, glm::quat& output);
static bool parseMat4Parameter(const QJsonValue& parameters, glm::mat4& output);
protected:
static Factory _factory;
};

View file

@ -26,6 +26,10 @@
#include "filters/InvertFilter.h"
#include "filters/PulseFilter.h"
#include "filters/ScaleFilter.h"
#include "filters/TranslateFilter.h"
#include "filters/TransformFilter.h"
#include "filters/PostTransformFilter.h"
#include "filters/RotateFilter.h"
#include "conditionals/AndConditional.h"
using namespace controller;
@ -103,6 +107,26 @@ QObject* RouteBuilderProxy::deadZone(float min) {
return this;
}
QObject* RouteBuilderProxy::translate(glm::vec3 translate) {
addFilter(std::make_shared<TranslateFilter>(translate));
return this;
}
QObject* RouteBuilderProxy::transform(glm::mat4 transform) {
addFilter(std::make_shared<TransformFilter>(transform));
return this;
}
QObject* RouteBuilderProxy::postTransform(glm::mat4 transform) {
addFilter(std::make_shared<PostTransformFilter>(transform));
return this;
}
QObject* RouteBuilderProxy::rotate(glm::quat rotation) {
addFilter(std::make_shared<RotateFilter>(rotation));
return this;
}
QObject* RouteBuilderProxy::constrainToInteger() {
addFilter(std::make_shared<ConstrainToIntegerFilter>());
return this;

View file

@ -48,6 +48,10 @@ class RouteBuilderProxy : public QObject {
Q_INVOKABLE QObject* deadZone(float min);
Q_INVOKABLE QObject* constrainToInteger();
Q_INVOKABLE QObject* constrainToPositiveInteger();
Q_INVOKABLE QObject* translate(glm::vec3 translate);
Q_INVOKABLE QObject* transform(glm::mat4 transform);
Q_INVOKABLE QObject* postTransform(glm::mat4 transform);
Q_INVOKABLE QObject* rotate(glm::quat rotation);
private:
void to(const Endpoint::Pointer& destination);

View file

@ -21,6 +21,9 @@ public:
virtual float apply(float value) const override {
return glm::clamp(value, _min, _max);
}
virtual Pose apply(Pose value) const override { return value; }
virtual bool parseParameters(const QJsonValue& parameters) override;
protected:
float _min = 0.0f;

View file

@ -22,6 +22,9 @@ public:
virtual float apply(float value) const override {
return glm::sign(value);
}
virtual Pose apply(Pose value) const override { return value; }
protected:
};

View file

@ -22,6 +22,9 @@ public:
virtual float apply(float value) const override {
return (value <= 0.0f) ? 0.0f : 1.0f;
}
virtual Pose apply(Pose value) const override { return value; }
protected:
};

View file

@ -20,6 +20,9 @@ public:
DeadZoneFilter(float min = 0.0) : _min(min) {};
virtual float apply(float value) const override;
virtual Pose apply(Pose value) const override { return value; }
virtual bool parseParameters(const QJsonValue& parameters) override;
protected:
float _min = 0.0f;

View file

@ -19,6 +19,9 @@ class HysteresisFilter : public Filter {
public:
HysteresisFilter(float min = 0.25, float max = 0.75);
virtual float apply(float value) const override;
virtual Pose apply(Pose value) const override { return value; }
virtual bool parseParameters(const QJsonValue& parameters) override;
protected:
float _min;

View file

@ -0,0 +1,33 @@
//
// Created by Brad Hefta-Gaub 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Controllers_Filters_PostTransform_h
#define hifi_Controllers_Filters_PostTransform_h
#include <glm/gtx/transform.hpp>
#include "../Filter.h"
namespace controller {
class PostTransformFilter : public Filter {
REGISTER_FILTER_CLASS(PostTransformFilter);
public:
PostTransformFilter() { }
PostTransformFilter(glm::mat4 transform) : _transform(transform) {}
virtual float apply(float value) const override { return value; }
virtual Pose apply(Pose value) const override { return value.postTransform(_transform); }
virtual bool parseParameters(const QJsonValue& parameters) override { return parseMat4Parameter(parameters, _transform); }
private:
glm::mat4 _transform;
};
}
#endif

View file

@ -23,6 +23,8 @@ public:
virtual float apply(float value) const override;
virtual Pose apply(Pose value) const override { return value; }
virtual bool parseParameters(const QJsonValue& parameters) override;
private:

View file

@ -0,0 +1,39 @@
//
// Created by Brad Hefta-Gaub 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Controllers_Filters_Rotate_h
#define hifi_Controllers_Filters_Rotate_h
#include <glm/gtx/transform.hpp>
#include "../Filter.h"
namespace controller {
class RotateFilter : public Filter {
REGISTER_FILTER_CLASS(RotateFilter);
public:
RotateFilter() { }
RotateFilter(glm::quat rotation) : _rotation(rotation) {}
virtual float apply(float value) const override { return value; }
virtual Pose apply(Pose value) const override {
return value.transform(glm::mat4(glm::quat(_rotation)));
}
virtual bool parseParameters(const QJsonValue& parameters) override { return parseQuatParameter(parameters, _rotation); }
private:
glm::quat _rotation;
};
}
#endif

View file

@ -10,6 +10,8 @@
#ifndef hifi_Controllers_Filters_Scale_h
#define hifi_Controllers_Filters_Scale_h
#include <glm/gtc/matrix_transform.hpp>
#include "../Filter.h"
namespace controller {
@ -23,6 +25,11 @@ public:
virtual float apply(float value) const override {
return value * _scale;
}
virtual Pose apply(Pose value) const override {
return value.transform(glm::scale(glm::mat4(), glm::vec3(_scale)));
}
virtual bool parseParameters(const QJsonValue& parameters) override;
private:

View file

@ -0,0 +1,35 @@
//
// Created by Brad Hefta-Gaub 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Controllers_Filters_Transform_h
#define hifi_Controllers_Filters_Transform_h
#include <glm/gtx/transform.hpp>
#include "../Filter.h"
namespace controller {
class TransformFilter : public Filter {
REGISTER_FILTER_CLASS(TransformFilter);
public:
TransformFilter() { }
TransformFilter(glm::mat4 transform) : _transform(transform) {}
virtual float apply(float value) const override { return value; }
virtual Pose apply(Pose value) const override { return value.transform(_transform); }
virtual bool parseParameters(const QJsonValue& parameters) override { return parseMat4Parameter(parameters, _transform); }
private:
glm::mat4 _transform;
};
}
#endif

View file

@ -0,0 +1,35 @@
//
// Created by Brad Hefta-Gaub 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Controllers_Filters_Translate_h
#define hifi_Controllers_Filters_Translate_h
#include <glm/gtx/transform.hpp>
#include "../Filter.h"
namespace controller {
class TranslateFilter : public Filter {
REGISTER_FILTER_CLASS(TranslateFilter);
public:
TranslateFilter() { }
TranslateFilter(glm::vec3 translate) : _translate(translate) {}
virtual float apply(float value) const override { return value; }
virtual Pose apply(Pose value) const override { return value.transform(glm::translate(_translate)); }
virtual bool parseParameters(const QJsonValue& parameters) override { return parseVec3Parameter(parameters, _translate); }
private:
glm::vec3 _translate { 0.0f };
};
}
#endif

View file

@ -49,20 +49,20 @@
An dynamic is a callback which is registered with bullet. An dynamic is called-back every physics
A dynamic is a callback which is registered with bullet. A dynamic is called-back every physics
simulation step and can do whatever it wants with the various datastructures it has available. An
dynamic, for example, can pull an EntityItem toward a point as if that EntityItem were connected to that
point by a spring.
In this system, an dynamic is a property of an EntityItem (rather, an EntityItem has a property which
In this system, a dynamic is a property of an EntityItem (rather, an EntityItem has a property which
encodes a list of dynamics). Each dynamic has a type and some arguments. Dynamics can be created by a
script or when receiving information via an EntityTree data-stream (either over the network or from an
svo file).
In the interface, if an EntityItem has dynamics, this EntityItem will have pointers to ObjectDynamic
subclass (like ObjectDynamicSpring) instantiations. Code in the entities library affects an dynamic-object
subclass (like ObjectDynamicSpring) instantiations. Code in the entities library affects a dynamic-object
via the EntityDynamicInterface (which knows nothing about bullet). When the ObjectDynamic subclass
instance is created, it is registered as an dynamic with bullet. Bullet will call into code in this
instance is created, it is registered as a dynamic with bullet. Bullet will call into code in this
instance with the btDynamicInterface every physics-simulation step.
Because the dynamic can exist next to the interface's EntityTree or the entity-server's EntityTree,

View file

@ -50,7 +50,8 @@ Q_LOGGING_CATEGORY(trace_resource_parse_image_ktx, "trace.resource.parse.image.k
const std::string TextureCache::KTX_DIRNAME { "ktx_cache" };
const std::string TextureCache::KTX_EXT { "ktx" };
static const int SKYBOX_LOAD_PRIORITY { 10 }; // Make sure skybox loads first
static const float SKYBOX_LOAD_PRIORITY { 10.0f }; // Make sure skybox loads first
static const float HIGH_MIPS_LOAD_PRIORITY { 9.0f }; // Make sure high mips loads after skybox but before models
TextureCache::TextureCache() :
_ktxCache(KTX_DIRNAME, KTX_EXT) {
@ -261,9 +262,6 @@ QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSh
auto content = textureExtra ? textureExtra->content : QByteArray();
auto maxNumPixels = textureExtra ? textureExtra->maxNumPixels : ABSOLUTE_MAX_TEXTURE_NUM_PIXELS;
NetworkTexture* texture = new NetworkTexture(url, type, content, maxNumPixels);
if (type == image::TextureUsage::CUBE_TEXTURE) {
texture->setLoadPriority(this, SKYBOX_LOAD_PRIORITY);
}
return QSharedPointer<Resource>(texture, &Resource::deleter);
}
@ -276,6 +274,12 @@ NetworkTexture::NetworkTexture(const QUrl& url, image::TextureUsage::Type type,
_textureSource = std::make_shared<gpu::TextureSource>();
_lowestRequestedMipLevel = 0;
if (type == image::TextureUsage::CUBE_TEXTURE) {
setLoadPriority(this, SKYBOX_LOAD_PRIORITY);
} else if (_sourceIsKTX) {
setLoadPriority(this, HIGH_MIPS_LOAD_PRIORITY);
}
if (!url.isValid()) {
_loaded = true;
}
@ -397,7 +401,8 @@ void NetworkTexture::startRequestForNextMipLevel() {
_ktxResourceState = PENDING_MIP_REQUEST;
init();
setLoadPriority(this, -static_cast<int>(_originalKtxDescriptor->header.numberOfMipmapLevels) + _lowestKnownPopulatedMip);
float priority = -(float)_originalKtxDescriptor->header.numberOfMipmapLevels + (float)_lowestKnownPopulatedMip;
setLoadPriority(this, priority);
_url.setFragment(QString::number(_lowestKnownPopulatedMip - 1));
TextureCache::attemptRequest(_self);
}

View file

@ -348,18 +348,19 @@ void AssetClient::handleAssetGetReply(QSharedPointer<ReceivedMessage> message, S
// Store message in case we need to disconnect from it later.
callbacks.message = message;
auto weakNode = senderNode.toWeakRef();
connect(message.data(), &ReceivedMessage::progress, this, [this, weakNode, messageID, length](qint64 size) {
handleProgressCallback(weakNode, messageID, size, length);
});
connect(message.data(), &ReceivedMessage::completed, this, [this, weakNode, messageID]() {
handleCompleteCallback(weakNode, messageID);
});
if (message->isComplete()) {
disconnect(message.data(), nullptr, this, nullptr);
callbacks.completeCallback(true, error, message->readAll());
messageCallbackMap.erase(requestIt);
} else {
auto weakNode = senderNode.toWeakRef();
connect(message.data(), &ReceivedMessage::progress, this, [this, weakNode, messageID, length](qint64 size) {
handleProgressCallback(weakNode, messageID, size, length);
});
connect(message.data(), &ReceivedMessage::completed, this, [this, weakNode, messageID]() {
handleCompleteCallback(weakNode, messageID);
});
}
}

View file

@ -77,7 +77,7 @@ void AssetRequest::start() {
_assetRequestID = assetClient->getAsset(_hash, _byteRange.fromInclusive, _byteRange.toExclusive,
[this, that, hash](bool responseReceived, AssetServerError serverError, const QByteArray& data) {
if (!that) {
if (!that) {
qCWarning(asset_client) << "Got reply for dead asset request " << hash << "- error code" << _error;
// If the request is dead, return
return;
@ -113,8 +113,10 @@ void AssetRequest::start() {
_data = data;
_totalReceived += data.size();
emit progress(_totalReceived, data.size());
saveToCache(getUrl(), data);
if (!_byteRange.isSet()) {
saveToCache(getUrl(), data);
}
}
}

View file

@ -39,7 +39,7 @@ const QSet<PacketType> NON_SOURCED_PACKETS = QSet<PacketType>()
<< PacketType::ICEServerPeerInformation << PacketType::ICEServerQuery << PacketType::ICEServerHeartbeat
<< PacketType::ICEServerHeartbeatACK << PacketType::ICEPing << PacketType::ICEPingReply
<< PacketType::ICEServerHeartbeatDenied << PacketType::AssignmentClientStatus << PacketType::StopNode
<< PacketType::DomainServerRemovedNode << PacketType::UsernameFromIDReply;
<< PacketType::DomainServerRemovedNode << PacketType::UsernameFromIDReply << PacketType::OctreeFileReplacement;
PacketVersion versionForPacketType(PacketType packetType) {
switch (packetType) {

View file

@ -113,7 +113,8 @@ public:
EntityPhysics,
EntityServerScriptLog,
AdjustAvatarSorting,
LAST_PACKET_TYPE = AdjustAvatarSorting
OctreeFileReplacement,
LAST_PACKET_TYPE = OctreeFileReplacement
};
};

View file

@ -33,6 +33,7 @@
#include "OctreePersistThread.h"
const int OctreePersistThread::DEFAULT_PERSIST_INTERVAL = 1000 * 30; // every 30 seconds
const QString OctreePersistThread::REPLACEMENT_FILE_EXTENSION = ".replace";
OctreePersistThread::OctreePersistThread(OctreePointer tree, const QString& filename, const QString& backupDirectory, int persistInterval,
bool wantBackup, const QJsonObject& settings, bool debugTimestampNow,
@ -131,10 +132,47 @@ quint64 OctreePersistThread::getMostRecentBackupTimeInUsecs(const QString& forma
return mostRecentBackupInUsecs;
}
void OctreePersistThread::possiblyReplaceContent() {
// before we load the normal file, check if there's a pending replacement file
auto replacementFileName = _filename + REPLACEMENT_FILE_EXTENSION;
QFile replacementFile { replacementFileName };
if (replacementFile.exists()) {
// we have a replacement file to process
qDebug() << "Replacing models file with" << replacementFileName;
// first take the current models file and move it to a different filename, appended with the timestamp
QFile currentFile { _filename };
if (currentFile.exists()) {
static const QString FILENAME_TIMESTAMP_FORMAT = "yyyyMMdd-hhmmss";
auto backupFileName = _filename + ".backup." + QDateTime::currentDateTime().toString(FILENAME_TIMESTAMP_FORMAT);
if (currentFile.rename(backupFileName)) {
qDebug() << "Moved previous models file to" << backupFileName;
} else {
qWarning() << "Could not backup previous models file to" << backupFileName << "- removing replacement models file";
if (!replacementFile.remove()) {
qWarning() << "Could not remove replacement models file from" << replacementFileName
<< "- replacement will be re-attempted on next server restart";
return;
}
}
}
// rename the replacement file to match what the persist thread is just about to read
if (!replacementFile.rename(_filename)) {
qWarning() << "Could not replace models file with" << replacementFileName << "- starting with empty models file";
}
}
}
bool OctreePersistThread::process() {
if (!_initialLoadComplete) {
possiblyReplaceContent();
quint64 loadStarted = usecTimestampNow();
qCDebug(octree) << "loading Octrees from file: " << _filename << "...";

View file

@ -32,6 +32,7 @@ public:
};
static const int DEFAULT_PERSIST_INTERVAL;
static const QString REPLACEMENT_FILE_EXTENSION;
OctreePersistThread(OctreePointer tree, const QString& filename, const QString& backupDirectory,
int persistInterval = DEFAULT_PERSIST_INTERVAL, bool wantBackup = false,
@ -60,6 +61,7 @@ protected:
bool getMostRecentBackup(const QString& format, QString& mostRecentBackupFileName, QDateTime& mostRecentBackupTime);
quint64 getMostRecentBackupTimeInUsecs(const QString& format);
void parseSettings(const QJsonObject& settings);
void possiblyReplaceContent();
private:
OctreePointer _tree;

View file

@ -166,6 +166,12 @@ void Deck::processFrames() {
if (!overLimit) {
auto nextFrameTime = nextClip->positionFrameTime();
nextInterval = (int)Frame::frameTimeToMilliseconds(nextFrameTime - _position);
if (nextInterval < 0) {
qCWarning(recordingLog) << "Unexpected nextInterval < 0 nextFrameTime:" << nextFrameTime
<< "_position:" << _position << "-- setting nextInterval to 0";
nextInterval = 0;
}
#ifdef WANT_RECORDING_DEBUG
qCDebug(recordingLog) << "Now " << _position;
qCDebug(recordingLog) << "Next frame time " << nextInterval;

View file

@ -122,3 +122,10 @@ bool Quat::equal(const glm::quat& q1, const glm::quat& q2) {
return q1 == q2;
}
glm::quat Quat::cancelOutRollAndPitch(const glm::quat& q) {
return ::cancelOutRollAndPitch(q);
}
glm::quat Quat::cancelOutRoll(const glm::quat& q) {
return ::cancelOutRoll(q);
}

View file

@ -60,6 +60,8 @@ public slots:
float dot(const glm::quat& q1, const glm::quat& q2);
void print(const QString& label, const glm::quat& q);
bool equal(const glm::quat& q1, const glm::quat& q2);
glm::quat cancelOutRollAndPitch(const glm::quat& q);
glm::quat cancelOutRoll(const glm::quat& q);
};
#endif // hifi_Quat_h

View file

@ -508,7 +508,7 @@ void TabletProxy::gotoWebScreen(const QString& url, const QString& injectedJavaS
if (root) {
removeButtonsFromHomeScreen();
QMetaObject::invokeMethod(root, "loadSource", Q_ARG(const QVariant&, QVariant(WEB_VIEW_SOURCE_URL)));
QMetaObject::invokeMethod(root, "loadWebBase");
QMetaObject::invokeMethod(root, "setShown", Q_ARG(const QVariant&, QVariant(true)));
QMetaObject::invokeMethod(root, "loadWebUrl", Q_ARG(const QVariant&, QVariant(url)), Q_ARG(const QVariant&, QVariant(injectedJavaScriptUrl)));
}

View file

@ -38,6 +38,11 @@ const quat Quaternions::X_180{ 0.0f, 1.0f, 0.0f, 0.0f };
const quat Quaternions::Y_180{ 0.0f, 0.0f, 1.0f, 0.0f };
const quat Quaternions::Z_180{ 0.0f, 0.0f, 0.0f, 1.0f };
const mat4 Matrices::IDENTITY { glm::mat4() };
const mat4 Matrices::X_180 { createMatFromQuatAndPos(Quaternions::X_180, Vectors::ZERO) };
const mat4 Matrices::Y_180 { createMatFromQuatAndPos(Quaternions::Y_180, Vectors::ZERO) };
const mat4 Matrices::Z_180 { createMatFromQuatAndPos(Quaternions::Z_180, Vectors::ZERO) };
// Safe version of glm::mix; based on the code in Nick Bobick's article,
// http://www.gamasutra.com/features/19980703/quaternions_01.htm (via Clyde,
// https://github.com/threerings/clyde/blob/master/src/main/java/com/threerings/math/Quaternion.java)

Some files were not shown because too many files have changed in this diff Show more