Merge branch 'master' of https://github.com/highfidelity/hifi into ambient-bis

This commit is contained in:
samcake 2017-05-02 09:52:09 -07:00
commit e561a33b01
81 changed files with 1757 additions and 1309 deletions

View file

@ -1,47 +0,0 @@
set(EXTERNAL_NAME faceshift)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://hifi-public.s3.amazonaws.com/dependencies/faceshift.zip
CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
LOG_DOWNLOAD 1
LOG_CONFIGURE 1
LOG_BUILD 1
)
# URL_MD5 1bdcb8a0b8d5b1ede434cc41efade41d
# Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${INSTALL_DIR}/include CACHE FILEPATH "Path to Faceshift include directory")
set(LIBRARY_DEBUG_PATH "lib/Debug")
set(LIBRARY_RELEASE_PATH "lib/Release")
if (WIN32)
set(LIBRARY_PREFIX "")
set(LIBRARY_EXT "lib")
# use selected configuration in release path when building on Windows
set(LIBRARY_RELEASE_PATH "$<$<CONFIG:RelWithDebInfo>:build/RelWithDebInfo>")
set(LIBRARY_RELEASE_PATH "${LIBRARY_RELEASE_PATH}$<$<CONFIG:MinSizeRel>:build/MinSizeRel>")
set(LIBRARY_RELEASE_PATH "${LIBRARY_RELEASE_PATH}$<$<OR:$<CONFIG:Release>,$<CONFIG:Debug>>:lib/Release>")
elseif (APPLE)
set(LIBRARY_EXT "a")
set(LIBRARY_PREFIX "lib")
if (CMAKE_GENERATOR STREQUAL "Unix Makefiles")
set(LIBRARY_DEBUG_PATH "build")
set(LIBRARY_RELEASE_PATH "build")
endif ()
endif()
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG
${INSTALL_DIR}/${LIBRARY_DEBUG_PATH}/${LIBRARY_PREFIX}faceshift.${LIBRARY_EXT} CACHE FILEPATH "Faceshift libraries")
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE
${INSTALL_DIR}/${LIBRARY_RELEASE_PATH}/${LIBRARY_PREFIX}faceshift.${LIBRARY_EXT} CACHE FILEPATH "Faceshift libraries")

View file

@ -1,14 +0,0 @@
#
# Copyright 2015 High Fidelity, Inc.
# Created by Bradley Austin Davis on 2015/10/10
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
macro(TARGET_FACESHIFT)
add_dependency_external_projects(faceshift)
find_package(Faceshift REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${FACESHIFT_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${FACESHIFT_LIBRARIES})
add_definitions(-DHAVE_FACESHIFT)
endmacro()

View file

@ -1,26 +0,0 @@
#
# FindFaceshift.cmake
#
# Try to find the Faceshift networking library
#
# You must provide a FACESHIFT_ROOT_DIR which contains lib and include directories
#
# Once done this will define
#
# FACESHIFT_FOUND - system found Faceshift
# FACESHIFT_INCLUDE_DIRS - the Faceshift include directory
# FACESHIFT_LIBRARIES - Link this to use Faceshift
#
# Created on 8/30/2013 by Andrzej Kapolka
# Copyright 2013 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
include(SelectLibraryConfigurations)
select_library_configurations(FACESHIFT)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Faceshift DEFAULT_MSG FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES)
mark_as_advanced(FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES FACESHIFT_SEARCH_DIRS)

View file

@ -194,7 +194,7 @@ link_hifi_libraries(
recording fbx networking model-networking entities avatars
audio audio-client animation script-engine physics
render-utils entities-renderer avatars-renderer ui auto-updater
controllers plugins image
controllers plugins image trackers
ui-plugins display-plugins input-plugins
${NON_ANDROID_LIBRARIES}
)
@ -202,7 +202,6 @@ link_hifi_libraries(
# include the binary directory of render-utils for shader includes
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_BINARY_DIR}/libraries/render-utils")
#fixme find a way to express faceshift as a plugin
target_bullet()
target_opengl()
@ -210,10 +209,6 @@ if (NOT ANDROID)
target_glew()
endif ()
if (WIN32 OR APPLE)
target_faceshift()
endif()
# perform standard include and linking for found externals
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})

View file

@ -61,6 +61,11 @@
{ "from": "Standard.RightHand", "to": "Actions.RightHand" },
{ "from": "Standard.LeftFoot", "to": "Actions.LeftFoot" },
{ "from": "Standard.RightFoot", "to": "Actions.RightFoot" }
{ "from": "Standard.RightFoot", "to": "Actions.RightFoot" },
{ "from": "Standard.Hips", "to": "Actions.Hips" },
{ "from": "Standard.Spine2", "to": "Actions.Spine2" },
{ "from": "Standard.Head", "to": "Actions.Head" }
]
}

View file

@ -19,7 +19,7 @@
function shouldRaiseKeyboard() {
var nodeName = document.activeElement.nodeName;
var nodeType = document.activeElement.type;
if (nodeName === "INPUT" && (nodeType === "text" || nodeType === "number" || nodeType === "password")
if (nodeName === "INPUT" && ["email", "number", "password", "tel", "text", "url"].indexOf(nodeType) !== -1
|| document.activeElement.nodeName === "TEXTAREA") {
return true;
} else {

View file

@ -0,0 +1,132 @@
import QtQuick 2.5
import QtWebEngine 1.1
import QtWebChannel 1.0
import "../controls-uit" as HiFiControls
import HFTabletWebEngineProfile 1.0
Item {
property alias url: root.url
property alias scriptURL: root.userScriptUrl
property alias eventBridge: eventBridgeWrapper.eventBridge
property alias canGoBack: root.canGoBack;
property var goBack: root.goBack;
property alias urlTag: root.urlTag
property bool keyboardEnabled: true // FIXME - Keyboard HMD only: Default to false
property bool keyboardRaised: false
property bool punctuationMode: false
// FIXME - Keyboard HMD only: Make Interface either set keyboardRaised property directly in OffscreenQmlSurface
// or provide HMDinfo object to QML in RenderableWebEntityItem and do the following.
/*
onKeyboardRaisedChanged: {
keyboardEnabled = HMDinfo.active;
}
*/
QtObject {
id: eventBridgeWrapper
WebChannel.id: "eventBridgeWrapper"
property var eventBridge;
}
property alias viewProfile: root.profile
WebEngineView {
id: root
objectName: "webEngineView"
x: 0
y: 0
width: parent.width
height: keyboardEnabled && keyboardRaised ? parent.height - keyboard.height : parent.height
profile: HFTabletWebEngineProfile {
id: webviewProfile
storageName: "qmlTabletWebEngine"
}
property string userScriptUrl: ""
// creates a global EventBridge object.
WebEngineScript {
id: createGlobalEventBridge
sourceCode: eventBridgeJavaScriptToInject
injectionPoint: WebEngineScript.DocumentCreation
worldId: WebEngineScript.MainWorld
}
// detects when to raise and lower virtual keyboard
WebEngineScript {
id: raiseAndLowerKeyboard
injectionPoint: WebEngineScript.Deferred
sourceUrl: resourceDirectoryUrl + "/html/raiseAndLowerKeyboard.js"
worldId: WebEngineScript.MainWorld
}
// User script.
WebEngineScript {
id: userScript
sourceUrl: root.userScriptUrl
injectionPoint: WebEngineScript.DocumentReady // DOM ready but page load may not be finished.
worldId: WebEngineScript.MainWorld
}
property string urlTag: "noDownload=false";
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard, userScript ]
property string newUrl: ""
webChannel.registeredObjects: [eventBridgeWrapper]
Component.onCompleted: {
// Ensure the JS from the web-engine makes it to our logging
root.javaScriptConsoleMessage.connect(function(level, message, lineNumber, sourceID) {
console.log("Web Entity JS message: " + sourceID + " " + lineNumber + " " + message);
});
root.profile.httpUserAgent = "Mozilla/5.0 Chrome (HighFidelityInterface)";
}
onFeaturePermissionRequested: {
grantFeaturePermission(securityOrigin, feature, true);
}
onLoadingChanged: {
keyboardRaised = false;
punctuationMode = false;
keyboard.resetShiftMode(false);
// Required to support clicking on "hifi://" links
if (WebEngineView.LoadStartedStatus == loadRequest.status) {
var url = loadRequest.url.toString();
url = (url.indexOf("?") >= 0) ? url + urlTag : url + "?" + urlTag;
if (urlHandler.canHandleUrl(url)) {
if (urlHandler.handleUrl(url)) {
root.stop();
}
}
}
}
onNewViewRequested:{
// desktop is not defined for web-entities or tablet
if (typeof desktop !== "undefined") {
desktop.openBrowserWindow(request, profile);
} else {
tabletRoot.openBrowserWindow(request, profile);
}
}
}
HiFiControls.Keyboard {
id: keyboard
raised: parent.keyboardEnabled && parent.keyboardRaised
numeric: parent.punctuationMode
anchors {
left: parent.left
right: parent.right
bottom: parent.bottom
}
}
}

View file

@ -23,8 +23,13 @@ Item {
property bool keyboardRaised: false
property bool punctuationMode: false
property bool isDesktop: false
property bool removingPage: false
property bool loadingPage: false
property string initialPage: ""
property bool startingUp: true
property alias webView: webview
property alias profile: webview.profile
property bool remove: false
property var urlList: []
property var forwardList: []
property int currentPage: -1 // used as a model for repeater
@ -76,11 +81,21 @@ Item {
id: displayUrl
color: hifi.colors.baseGray
font.pixelSize: 12
verticalAlignment: Text.AlignLeft
anchors {
top: nav.bottom
horizontalCenter: parent.horizontalCenter;
left: parent.left
leftMargin: 20
}
}
MouseArea {
anchors.fill: parent
preventStealing: true
propagateComposedEvents: true
}
}
ListModel {
@ -96,20 +111,24 @@ Item {
}
function goBack() {
if (webview.canGoBack && !isUrlLoaded(webview.url)) {
if (currentPage > 0) {
removingPage = true;
pagesModel.remove(currentPage);
}
if (webview.canGoBack) {
forwardList.push(webview.url);
webview.goBack();
} else if (currentPage > 0) {
removingPage = true;
pagesModel.remove(currentPage);
} else if (web.urlList.length > 0) {
var url = web.urlList.pop();
loadUrl(url);
} else if (web.forwardList.length > 0) {
var url = web.forwardList.pop();
loadUrl(url);
web.forwardList = [];
}
}
function closeWebEngine() {
if (remove) {
web.destroy();
return;
}
if (parentStackItem) {
parentStackItem.pop();
} else {
@ -137,32 +156,42 @@ Item {
view.setEnabled(true);
}
function loadUrl(url) {
webview.url = url
web.url = webview.url;
web.address = webview.url;
}
function onInitialPage(url) {
return (url === webview.url);
}
function urlAppend(url) {
if (removingPage) {
removingPage = false;
return;
}
var lurl = decodeURIComponent(url)
if (lurl[lurl.length - 1] !== "/") {
lurl = lurl + "/"
}
if (currentPage === -1 || (pagesModel.get(currentPage).webUrl !== lurl && !timer.running)) {
timer.start();
pagesModel.append({webUrl: lurl});
}
web.urlList.push(url);
setBackButtonStatus();
}
onCurrentPageChanged: {
if (currentPage >= 0 && currentPage < pagesModel.count) {
timer.start();
webview.url = pagesModel.get(currentPage).webUrl;
web.url = webview.url;
web.address = webview.url;
function setBackButtonStatus() {
if (web.urlList.length > 0 || webview.canGoBack) {
back.enabledColor = hifi.colors.darkGray;
back.enabled = true;
} else {
back.enabledColor = hifi.colors.baseGray;
back.enabled = false;
}
}
onUrlChanged: {
gotoPage(url)
loadUrl(url);
if (startingUp) {
web.initialPage = webview.url;
startingUp = false;
}
}
QtObject {
@ -170,18 +199,7 @@ Item {
WebChannel.id: "eventBridgeWrapper"
property var eventBridge;
}
Timer {
id: timer
interval: 200
running: false
repeat: false
onTriggered: timer.stop();
}
WebEngineView {
id: webview
objectName: "webEngineView"
@ -221,6 +239,7 @@ Item {
worldId: WebEngineScript.MainWorld
}
property string urlTag: "noDownload=false";
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard, userScript ]
property string newUrl: ""
@ -247,9 +266,7 @@ Item {
keyboard.resetShiftMode(false);
// Required to support clicking on "hifi://" links
if (WebEngineView.LoadStartedStatus == loadRequest.status) {
urlAppend(loadRequest.url.toString());
loadingPage = true;
var url = loadRequest.url.toString();
var url = loadRequest.url.toString();
if (urlHandler.canHandleUrl(url)) {
if (urlHandler.handleUrl(url)) {
root.stop();
@ -260,9 +277,19 @@ Item {
if (WebEngineView.LoadFailedStatus == loadRequest.status) {
console.log(" Tablet WebEngineView failed to laod url: " + loadRequest.url.toString());
}
}
if (WebEngineView.LoadSucceededStatus == loadRequest.status) {
web.address = webview.url;
if (startingUp) {
web.initialPage = webview.url;
startingUp = false;
}
}
}
onNewViewRequested: {
var currentUrl = webview.url;
urlAppend(currentUrl);
request.openIn(webview);
}
}

View file

@ -113,7 +113,7 @@ Item {
if (typeof desktop !== "undefined") {
desktop.openBrowserWindow(request, profile);
} else {
console.log("onNewViewRequested: desktop not defined");
tabletRoot.openBrowserWindow(request, profile);
}
}
}

View file

@ -31,7 +31,7 @@ Item {
property bool drillDownToPlace: false;
property bool showPlace: isConcurrency;
property string messageColor: hifi.colors.blueAccent;
property string messageColor: isAnnouncement ? "white" : hifi.colors.blueAccent;
property string timePhrase: pastTime(timestamp);
property int onlineUsers: 0;
property bool isConcurrency: action === 'concurrency';
@ -71,6 +71,10 @@ Item {
property bool hasGif: imageUrl.indexOf('.gif') === (imageUrl.length - 4);
function pluralize(count, singular, optionalPlural) {
return (count === 1) ? singular : (optionalPlural || (singular + "s"));
}
DropShadow {
visible: isStacked;
anchors.fill: shadow1;
@ -115,7 +119,7 @@ Item {
id: lobby;
visible: !hasGif || (animation.status !== Image.Ready);
width: parent.width - (isConcurrency ? 0 : (2 * smallMargin));
height: parent.height - messageHeight - (isConcurrency ? 0 : smallMargin);
height: parent.height -(isAnnouncement ? smallMargin : messageHeight) - (isConcurrency ? 0 : smallMargin);
source: thumbnail || defaultThumbnail;
fillMode: Image.PreserveAspectCrop;
anchors {
@ -160,7 +164,24 @@ Item {
margins: textPadding;
}
}
Rectangle {
id: lozenge;
visible: isAnnouncement;
color: hifi.colors.redHighlight;
anchors.fill: infoRow;
radius: lozenge.height / 2.0;
border.width: lozengeHot.containsMouse ? 4 : 0;
border.color: "white";
}
Row {
id: infoRow;
Image {
id: icon;
source: isAnnouncement ? "../../images/Announce-Blast.svg" : "../../images/snap-icon.svg";
width: 40;
height: 40;
visible: ((action === 'snapshot') || isAnnouncement) && (messageHeight >= 40);
}
FiraSansRegular {
id: users;
visible: isConcurrency || isAnnouncement;
@ -169,34 +190,42 @@ Item {
color: messageColor;
anchors.verticalCenter: message.verticalCenter;
}
Image {
id: icon;
source: "../../images/snap-icon.svg"
width: 40;
height: 40;
visible: (action === 'snapshot') && (messageHeight >= 40);
}
RalewayRegular {
id: message;
text: isConcurrency ? ((onlineUsers === 1) ? "person" : "people") : (isAnnouncement ? "connections" : (drillDownToPlace ? "snapshots" : ("by " + userName)));
visible: !isAnnouncement;
text: isConcurrency ? pluralize(onlineUsers, "person", "people") : (drillDownToPlace ? "snapshots" : ("by " + userName));
size: textSizeSmall;
color: messageColor;
elide: Text.ElideRight; // requires a width to be specified`
width: root.width - textPadding
- (users.visible ? users.width + parent.spacing : 0)
- (icon.visible ? icon.width + parent.spacing : 0)
- (users.visible ? users.width + parent.spacing : 0)
- (actionIcon.width + (2 * smallMargin));
anchors {
bottom: parent.bottom;
bottomMargin: parent.spacing;
}
}
Column {
visible: isAnnouncement;
RalewayRegular {
text: pluralize(onlineUsers, "connection") + " "; // hack padding
size: textSizeSmall;
color: messageColor;
}
RalewayRegular {
text: pluralize(onlineUsers, "is here now", "are here now");
size: textSizeSmall * 0.7;
color: messageColor;
}
}
spacing: textPadding;
height: messageHeight;
anchors {
bottom: parent.bottom;
left: parent.left;
leftMargin: textPadding;
bottomMargin: isAnnouncement ? textPadding : 0;
}
}
// These two can be supplied to provide hover behavior.
@ -214,6 +243,7 @@ Item {
}
StateImage {
id: actionIcon;
visible: !isAnnouncement;
imageURL: "../../images/info-icon-2-state.svg";
size: 30;
buttonState: messageArea.containsMouse ? 1 : 0;
@ -223,13 +253,25 @@ Item {
margins: smallMargin;
}
}
function go() {
goFunction(drillDownToPlace ? ("/places/" + placeName) : ("/user_stories/" + storyId));
}
MouseArea {
id: messageArea;
visible: !isAnnouncement;
width: parent.width;
height: messageHeight;
anchors.top: lobby.bottom;
acceptedButtons: Qt.LeftButton;
onClicked: goFunction(drillDownToPlace ? ("/places/" + placeName) : ("/user_stories/" + storyId));
onClicked: go();
hoverEnabled: true;
}
MouseArea {
id: lozengeHot;
visible: lozenge.visible;
anchors.fill: lozenge;
acceptedButtons: Qt.LeftButton;
onClicked: go();
hoverEnabled: true;
}
}

View file

@ -34,12 +34,13 @@ Column {
property string metaverseServerUrl: '';
property string actions: 'snapshot';
onActionsChanged: fillDestinations();
Component.onCompleted: fillDestinations();
// sendToScript doesn't get wired until after everything gets created. So we have to queue fillDestinations on nextTick.
Component.onCompleted: delay.start();
property string labelText: actions;
property string filter: '';
onFilterChanged: filterChoicesByText();
property var goFunction: null;
property var rpc: null;
HifiConstants { id: hifi }
ListModel { id: suggestions; }
@ -81,6 +82,20 @@ Column {
property var allStories: [];
property var placeMap: ({}); // Used for making stacks.
property int requestId: 0;
function handleError(url, error, data, cb) { // cb(error) and answer truthy if needed, else falsey
if (!error && (data.status === 'success')) {
return;
}
if (!error) { // Create a message from the data
error = data.status + ': ' + data.error;
}
if (typeof(error) === 'string') { // Make a proper Error object
error = new Error(error);
}
error.message += ' in ' + url; // Include the url.
cb(error);
return true;
}
function getUserStoryPage(pageNumber, cb, cb1) { // cb(error) after all pages of domain data have been added to model
// If supplied, cb1 will be run after the first page IFF it is not the last, for responsiveness.
var options = [
@ -93,8 +108,11 @@ Column {
];
var url = metaverseBase + 'user_stories?' + options.join('&');
var thisRequestId = ++requestId;
getRequest(url, function (error, data) {
if ((thisRequestId !== requestId) || handleError(url, error, data, cb)) {
rpc('request', url, function (error, data) {
if (thisRequestId !== requestId) {
error = 'stale';
}
if (handleError(url, error, data, cb)) {
return; // abandon stale requests
}
allStories = allStories.concat(data.user_stories.map(makeModelData));
@ -107,14 +125,21 @@ Column {
cb();
});
}
property var delay: Timer { // No setTimeout or nextTick in QML.
interval: 0;
onTriggered: fillDestinations();
}
function fillDestinations() { // Public
function report(label, error) {
console.log(label, actions, error || 'ok', allStories.length, 'filtered to', suggestions.count);
}
var filter = makeFilteredStoryProcessor(), counter = 0;
allStories = [];
suggestions.clear();
placeMap = {};
getUserStoryPage(1, function (error) {
allStories.slice(counter).forEach(filter);
console.log('user stories query', actions, error || 'ok', allStories.length, 'filtered to', suggestions.count);
report('user stories update', error);
root.visible = !!suggestions.count;
}, function () { // If there's more than a page, put what we have in the model right away, keeping track of how many are processed.
allStories.forEach(function (story) {
@ -122,15 +147,19 @@ Column {
filter(story);
root.visible = !!suggestions.count;
});
report('user stories');
});
}
function identity(x) {
return x;
}
function makeFilteredStoryProcessor() { // answer a function(storyData) that adds it to suggestions if it matches
var words = filter.toUpperCase().split(/\s+/).filter(identity);
function suggestable(story) {
if (story.action === 'snapshot') {
return true;
}
return (story.place_name !== AddressManager.placename); // Not our entry, but do show other entry points to current domain.
return story.place_name !== AddressManager.placename; // Not our entry, but do show other entry points to current domain.
}
function matches(story) {
if (!words.length) {

View file

@ -33,9 +33,27 @@ StackView {
property int cardWidth: 212;
property int cardHeight: 152;
property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/";
property var tablet: null;
// This version only implements rpc(method, parameters, callback(error, result)) calls initiated from here, not initiated from .js, nor "notifications".
property var rpcCalls: ({});
property var rpcCounter: 0;
signal sendToScript(var message);
function rpc(method, parameters, callback) {
rpcCalls[rpcCounter] = callback;
var message = {method: method, params: parameters, id: rpcCounter++, jsonrpc: "2.0"};
sendToScript(message);
}
function fromScript(message) {
var callback = rpcCalls[message.id];
if (!callback) {
console.log('No callback for message fromScript', JSON.stringify(message));
return;
}
delete rpcCalls[message.id];
callback(message.error, message.result);
}
Component { id: tabletWebView; TabletWebView {} }
Component.onCompleted: {
updateLocationText(false);
@ -54,7 +72,7 @@ StackView {
}
function resetAfterTeleport() {
function resetAfterTeleport() {
//storyCardFrame.shown = root.shown = false;
}
function goCard(targetString) {
@ -262,10 +280,10 @@ StackView {
cardHeight: 163 + (2 * 4);
metaverseServerUrl: addressBarDialog.metaverseServerUrl;
labelText: 'HAPPENING NOW';
//actions: 'concurrency,snapshot'; // uncomment this line instead of next to produce fake announcement data for testing.
actions: 'announcement';
filter: addressLine.text;
goFunction: goCard;
rpc: root.rpc;
}
Feed {
id: places;
@ -278,6 +296,7 @@ StackView {
actions: 'concurrency';
filter: addressLine.text;
goFunction: goCard;
rpc: root.rpc;
}
Feed {
id: snapshots;
@ -291,6 +310,7 @@ StackView {
actions: 'snapshot';
filter: addressLine.text;
goFunction: goCard;
rpc: root.rpc;
}
}
}
@ -330,50 +350,6 @@ StackView {
}
function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
// TODO: make available to other .qml.
var request = new XMLHttpRequest();
// QT bug: apparently doesn't handle onload. Workaround using readyState.
request.onreadystatechange = function () {
var READY_STATE_DONE = 4;
var HTTP_OK = 200;
if (request.readyState >= READY_STATE_DONE) {
var error = (request.status !== HTTP_OK) && request.status.toString() + ':' + request.statusText,
response = !error && request.responseText,
contentType = !error && request.getResponseHeader('content-type');
if (!error && contentType.indexOf('application/json') === 0) {
try {
response = JSON.parse(response);
} catch (e) {
error = e;
}
}
cb(error, response);
}
};
request.open("GET", url, true);
request.send();
}
function identity(x) {
return x;
}
function handleError(url, error, data, cb) { // cb(error) and answer truthy if needed, else falsey
if (!error && (data.status === 'success')) {
return;
}
if (!error) { // Create a message from the data
error = data.status + ': ' + data.error;
}
if (typeof(error) === 'string') { // Make a proper Error object
error = new Error(error);
}
error.message += ' in ' + url; // Include the url.
cb(error);
return true;
}
function updateLocationText(enteringAddress) {
if (enteringAddress) {
notice.text = "Go To a place, @user, path, or network address:";

View file

@ -1,7 +1,9 @@
import QtQuick 2.0
import Hifi 1.0
import QtQuick.Controls 1.4
import HFTabletWebEngineProfile 1.0
import "../../dialogs"
import "../../controls"
Item {
id: tabletRoot
@ -11,6 +13,7 @@ Item {
property var rootMenu;
property var openModal: null;
property var openMessage: null;
property var openBrowser: null;
property string subMenu: ""
signal showDesktop();
property bool shown: true
@ -87,13 +90,18 @@ Item {
loader.item.gotoPreviousApp = true;
}
}
function loadWebBase() {
loader.source = "";
loader.source = "TabletWebView.qml";
}
function returnToPreviousApp() {
tabletApps.remove(currentApp);
var isWebPage = tabletApps.get(currentApp).isWebUrl;
if (isWebPage) {
var webUrl = tabletApps.get(currentApp).appWebUrl;
var scriptUrl = tabletApps.get(currentApp).scriptUrl;
var webUrl = tabletApps.get(currentApp).appWebUrl;
var scriptUrl = tabletApps.get(currentApp).scriptUrl;
loadSource("TabletWebView.qml");
loadWebUrl(webUrl, scriptUrl);
} else {
@ -101,6 +109,16 @@ Item {
}
}
function openBrowserWindow(request, profile) {
var component = Qt.createComponent("../../controls/TabletWebView.qml");
var newWindow = component.createObject(tabletRoot);
newWindow.eventBridge = tabletRoot.eventBridge;
newWindow.remove = true;
newWindow.profile = profile;
request.openIn(newWindow.webView);
tabletRoot.openBrowser = newWindow;
}
function loadWebUrl(url, injectedJavaScriptUrl) {
tabletApps.clear();
loader.item.url = url;
@ -180,6 +198,11 @@ Item {
openModal.destroy();
openModal = null;
}
if (openBrowser) {
openBrowser.destroy();
openBrowser = null;
}
}
}

View file

@ -3,7 +3,7 @@ import QtWebEngine 1.2
import "../../controls" as Controls
Controls.WebView {
Controls.TabletWebScreen {
}

View file

@ -38,6 +38,11 @@ Windows.ScrollingWindow {
loader.source = url;
}
function loadWebBase() {
loader.source = "";
loader.source = "WindowWebView.qml";
}
function loadWebUrl(url, injectedJavaScriptUrl) {
loader.item.url = url;
loader.item.scriptURL = injectedJavaScriptUrl;

View file

@ -0,0 +1,10 @@
import QtQuick 2.0
import QtWebEngine 1.2
import "../../controls" as Controls
Controls.WebView {
}

View file

@ -128,6 +128,7 @@
#include <QmlWebWindowClass.h>
#include <Preferences.h>
#include <display-plugins/CompositorHelper.h>
#include <trackers/EyeTracker.h>
#include "AudioClient.h"
@ -136,8 +137,6 @@
#include "avatar/ScriptAvatar.h"
#include "CrashHandler.h"
#include "devices/DdeFaceTracker.h"
#include "devices/EyeTracker.h"
#include "devices/Faceshift.h"
#include "devices/Leapmotion.h"
#include "DiscoverabilityManager.h"
#include "GLCanvas.h"
@ -480,7 +479,6 @@ bool setupEssentials(int& argc, char** argv) {
DependencyManager::set<ModelCache>();
DependencyManager::set<ScriptCache>();
DependencyManager::set<SoundCache>();
DependencyManager::set<Faceshift>();
DependencyManager::set<DdeFaceTracker>();
DependencyManager::set<EyeTracker>();
DependencyManager::set<AudioClient>();
@ -1210,10 +1208,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
this->installEventFilter(this);
// initialize our face trackers after loading the menu settings
auto faceshiftTracker = DependencyManager::get<Faceshift>();
faceshiftTracker->init();
connect(faceshiftTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
#ifdef HAVE_DDE
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
ddeTracker->init();
@ -3626,20 +3620,13 @@ ivec2 Application::getMouse() const {
}
FaceTracker* Application::getActiveFaceTracker() {
auto faceshift = DependencyManager::get<Faceshift>();
auto dde = DependencyManager::get<DdeFaceTracker>();
return (dde->isActive() ? static_cast<FaceTracker*>(dde.data()) :
(faceshift->isActive() ? static_cast<FaceTracker*>(faceshift.data()) : nullptr));
return dde->isActive() ? static_cast<FaceTracker*>(dde.data()) : nullptr;
}
FaceTracker* Application::getSelectedFaceTracker() {
FaceTracker* faceTracker = nullptr;
#ifdef HAVE_FACESHIFT
if (Menu::getInstance()->isOptionChecked(MenuOption::Faceshift)) {
faceTracker = DependencyManager::get<Faceshift>().data();
}
#endif
#ifdef HAVE_DDE
if (Menu::getInstance()->isOptionChecked(MenuOption::UseCamera)) {
faceTracker = DependencyManager::get<DdeFaceTracker>().data();
@ -3649,15 +3636,8 @@ FaceTracker* Application::getSelectedFaceTracker() {
}
void Application::setActiveFaceTracker() const {
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
#endif
#ifdef HAVE_FACESHIFT
auto faceshiftTracker = DependencyManager::get<Faceshift>();
faceshiftTracker->setIsMuted(isMuted);
faceshiftTracker->setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !isMuted);
#endif
#ifdef HAVE_DDE
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
bool isUsingDDE = Menu::getInstance()->isOptionChecked(MenuOption::UseCamera);
Menu::getInstance()->getActionForOption(MenuOption::BinaryEyelidControl)->setVisible(isUsingDDE);
Menu::getInstance()->getActionForOption(MenuOption::CoupleEyelids)->setVisible(isUsingDDE);
@ -4375,7 +4355,13 @@ void Application::update(float deltaTime) {
controller::InputCalibrationData calibrationData = {
myAvatar->getSensorToWorldMatrix(),
createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition()),
myAvatar->getHMDSensorMatrix()
myAvatar->getHMDSensorMatrix(),
myAvatar->getCenterEyeCalibrationMat(),
myAvatar->getHeadCalibrationMat(),
myAvatar->getSpine2CalibrationMat(),
myAvatar->getHipsCalibrationMat(),
myAvatar->getLeftFootCalibrationMat(),
myAvatar->getRightFootCalibrationMat()
};
InputPluginPointer keyboardMousePlugin;
@ -4423,6 +4409,13 @@ void Application::update(float deltaTime) {
controller::Pose rightFootPose = userInputMapper->getPoseState(controller::Action::RIGHT_FOOT);
myAvatar->setFootControllerPosesInSensorFrame(leftFootPose.transform(avatarToSensorMatrix), rightFootPose.transform(avatarToSensorMatrix));
controller::Pose hipsPose = userInputMapper->getPoseState(controller::Action::HIPS);
controller::Pose spine2Pose = userInputMapper->getPoseState(controller::Action::SPINE2);
myAvatar->setSpineControllerPosesInSensorFrame(hipsPose.transform(avatarToSensorMatrix), spine2Pose.transform(avatarToSensorMatrix));
controller::Pose headPose = userInputMapper->getPoseState(controller::Action::HEAD);
myAvatar->setHeadControllerPoseInSensorFrame(headPose.transform(avatarToSensorMatrix));
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
updateDialogs(deltaTime); // update various stats dialogs if present
@ -5133,7 +5126,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
}
void Application::resetSensors(bool andReload) {
DependencyManager::get<Faceshift>()->reset();
DependencyManager::get<DdeFaceTracker>()->reset();
DependencyManager::get<EyeTracker>()->reset();
getActiveDisplayPlugin()->resetSensors();

View file

@ -34,7 +34,6 @@
#include "avatar/AvatarManager.h"
#include "AvatarBookmarks.h"
#include "devices/DdeFaceTracker.h"
#include "devices/Faceshift.h"
#include "MainWindow.h"
#include "render/DrawStatus.h"
#include "scripting/MenuScriptingInterface.h"
@ -451,12 +450,6 @@ Menu::Menu() {
qApp, SLOT(setActiveFaceTracker()));
faceTrackerGroup->addAction(noFaceTracker);
#ifdef HAVE_FACESHIFT
QAction* faceshiftFaceTracker = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::Faceshift,
0, false,
qApp, SLOT(setActiveFaceTracker()));
faceTrackerGroup->addAction(faceshiftFaceTracker);
#endif
#ifdef HAVE_DDE
QAction* ddeFaceTracker = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::UseCamera,
0, true,
@ -477,11 +470,10 @@ Menu::Menu() {
QAction* ddeCalibrate = addActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::CalibrateCamera, 0,
DependencyManager::get<DdeFaceTracker>().data(), SLOT(calibrate()));
ddeCalibrate->setVisible(true); // DDE face tracking is on by default
#endif
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
faceTrackingMenu->addSeparator();
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::MuteFaceTracking,
Qt::CTRL | Qt::SHIFT | Qt::Key_F, true); // DDE face tracking is on by default
[](bool mute) { FaceTracker::setIsMuted(mute); },
Qt::CTRL | Qt::SHIFT | Qt::Key_F, FaceTracker::isMuted());
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::AutoMuteAudio, 0, false);
#endif

View file

@ -105,7 +105,6 @@ namespace MenuOption {
const QString ExpandPaintGLTiming = "Expand /paintGL";
const QString ExpandPhysicsSimulationTiming = "Expand /physics";
const QString ExpandUpdateTiming = "Expand /update";
const QString Faceshift = "Faceshift";
const QString FirstPerson = "First Person";
const QString FivePointCalibration = "5 Point Calibration";
const QString FixGaze = "Fix Gaze (no saccade)";

View file

@ -13,6 +13,7 @@
#include <NodeList.h>
#include <recording/Deck.h>
#include <trackers/EyeTracker.h>
#include "Application.h"
#include "Avatar.h"
@ -22,8 +23,6 @@
#include "Menu.h"
#include "Util.h"
#include "devices/DdeFaceTracker.h"
#include "devices/EyeTracker.h"
#include "devices/Faceshift.h"
#include <Rig.h>
using namespace std;
@ -209,14 +208,14 @@ void Head::simulate(float deltaTime, bool isMine) {
// use data to update fake Faceshift blendshape coefficients
calculateMouthShapes(deltaTime);
DependencyManager::get<Faceshift>()->updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_blendshapeCoefficients);
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_blendshapeCoefficients);
applyEyelidOffset(getOrientation());

View file

@ -41,9 +41,9 @@
#include <recording/Clip.h>
#include <recording/Frame.h>
#include <RecordingScriptingInterface.h>
#include <trackers/FaceTracker.h>
#include "Application.h"
#include "devices/Faceshift.h"
#include "AvatarManager.h"
#include "AvatarActionHold.h"
#include "Menu.h"
@ -82,6 +82,18 @@ const float MyAvatar::ZOOM_MIN = 0.5f;
const float MyAvatar::ZOOM_MAX = 25.0f;
const float MyAvatar::ZOOM_DEFAULT = 1.5f;
// default values, used when avatar is missing joints... (avatar space)
// static const glm::quat DEFAULT_AVATAR_MIDDLE_EYE_ROT { Quaternions::Y_180 };
static const glm::vec3 DEFAULT_AVATAR_MIDDLE_EYE_POS { 0.0f, 0.6f, 0.0f };
static const glm::vec3 DEFAULT_AVATAR_HEAD_POS { 0.0f, 0.53f, 0.0f };
static const glm::vec3 DEFAULT_AVATAR_NECK_POS { 0.0f, 0.445f, 0.025f };
static const glm::vec3 DEFAULT_AVATAR_SPINE2_POS { 0.0f, 0.32f, 0.02f };
static const glm::vec3 DEFAULT_AVATAR_HIPS_POS { 0.0f, 0.0f, 0.0f };
static const glm::vec3 DEFAULT_AVATAR_LEFTFOOT_POS { -0.08f, -0.96f, 0.029f};
static const glm::quat DEFAULT_AVATAR_LEFTFOOT_ROT { -0.40167322754859924f, 0.9154590368270874f, -0.005437685176730156f, -0.023744143545627594f };
static const glm::vec3 DEFAULT_AVATAR_RIGHTFOOT_POS { 0.08f, -0.96f, 0.029f };
static const glm::quat DEFAULT_AVATAR_RIGHTFOOT_ROT { -0.4016716778278351f, 0.9154615998268127f, 0.0053307069465518f, 0.023696165531873703f };
MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
Avatar(thread, rig),
_wasPushing(false),
@ -650,18 +662,13 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
}
FaceTracker* tracker = qApp->getActiveFaceTracker();
bool inFacetracker = tracker && !tracker->isMuted();
bool inFacetracker = tracker && !FaceTracker::isMuted();
if (inHmd) {
estimatedPosition = extractTranslation(getHMDSensorMatrix());
estimatedPosition.x *= -1.0f;
_trackedHeadPosition = estimatedPosition;
const float OCULUS_LEAN_SCALE = 0.05f;
estimatedPosition /= OCULUS_LEAN_SCALE;
} else if (inFacetracker) {
estimatedPosition = tracker->getHeadTranslation();
_trackedHeadPosition = estimatedPosition;
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
}
@ -1378,6 +1385,65 @@ controller::Pose MyAvatar::getRightFootControllerPoseInAvatarFrame() const {
return getRightFootControllerPoseInWorldFrame().transform(invAvatarMatrix);
}
void MyAvatar::setSpineControllerPosesInSensorFrame(const controller::Pose& hips, const controller::Pose& spine2) {
if (controller::InputDevice::getLowVelocityFilter()) {
auto oldHipsPose = getHipsControllerPoseInSensorFrame();
auto oldSpine2Pose = getSpine2ControllerPoseInSensorFrame();
_hipsControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldHipsPose, hips));
_spine2ControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldSpine2Pose, spine2));
} else {
_hipsControllerPoseInSensorFrameCache.set(hips);
_spine2ControllerPoseInSensorFrameCache.set(spine2);
}
}
controller::Pose MyAvatar::getHipsControllerPoseInSensorFrame() const {
return _hipsControllerPoseInSensorFrameCache.get();
}
controller::Pose MyAvatar::getSpine2ControllerPoseInSensorFrame() const {
return _spine2ControllerPoseInSensorFrameCache.get();
}
controller::Pose MyAvatar::getHipsControllerPoseInWorldFrame() const {
return _hipsControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
}
controller::Pose MyAvatar::getSpine2ControllerPoseInWorldFrame() const {
return _spine2ControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
}
controller::Pose MyAvatar::getHipsControllerPoseInAvatarFrame() const {
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
return getHipsControllerPoseInWorldFrame().transform(invAvatarMatrix);
}
controller::Pose MyAvatar::getSpine2ControllerPoseInAvatarFrame() const {
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
return getSpine2ControllerPoseInWorldFrame().transform(invAvatarMatrix);
}
void MyAvatar::setHeadControllerPoseInSensorFrame(const controller::Pose& head) {
if (controller::InputDevice::getLowVelocityFilter()) {
auto oldHeadPose = getHeadControllerPoseInSensorFrame();
_headControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldHeadPose, head));
} else {
_headControllerPoseInSensorFrameCache.set(head);
}
}
controller::Pose MyAvatar::getHeadControllerPoseInSensorFrame() const {
return _headControllerPoseInSensorFrameCache.get();
}
controller::Pose MyAvatar::getHeadControllerPoseInWorldFrame() const {
return _headControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
}
controller::Pose MyAvatar::getHeadControllerPoseInAvatarFrame() const {
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
return getHeadControllerPoseInWorldFrame().transform(invAvatarMatrix);
}
void MyAvatar::updateMotors() {
_characterController.clearMotors();
@ -2220,22 +2286,17 @@ glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
const glm::quat hmdOrientation = getHMDSensorOrientation();
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
// 2 meter tall dude (in rig coordinates)
const glm::vec3 DEFAULT_RIG_MIDDLE_EYE_POS(0.0f, 0.9f, 0.0f);
const glm::vec3 DEFAULT_RIG_NECK_POS(0.0f, 0.70f, 0.0f);
const glm::vec3 DEFAULT_RIG_HIPS_POS(0.0f, 0.05f, 0.0f);
int rightEyeIndex = _rig->indexOfJoint("RightEye");
int leftEyeIndex = _rig->indexOfJoint("LeftEye");
int neckIndex = _rig->indexOfJoint("Neck");
int hipsIndex = _rig->indexOfJoint("Hips");
glm::vec3 rigMiddleEyePos = DEFAULT_RIG_MIDDLE_EYE_POS;
glm::vec3 rigMiddleEyePos = DEFAULT_AVATAR_MIDDLE_EYE_POS;
if (leftEyeIndex >= 0 && rightEyeIndex >= 0) {
rigMiddleEyePos = (_rig->getAbsoluteDefaultPose(leftEyeIndex).trans() + _rig->getAbsoluteDefaultPose(rightEyeIndex).trans()) / 2.0f;
}
glm::vec3 rigNeckPos = neckIndex != -1 ? _rig->getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_RIG_NECK_POS;
glm::vec3 rigHipsPos = hipsIndex != -1 ? _rig->getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_RIG_HIPS_POS;
glm::vec3 rigNeckPos = neckIndex != -1 ? _rig->getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_AVATAR_NECK_POS;
glm::vec3 rigHipsPos = hipsIndex != -1 ? _rig->getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_AVATAR_HIPS_POS;
glm::vec3 localEyes = (rigMiddleEyePos - rigHipsPos);
glm::vec3 localNeck = (rigNeckPos - rigHipsPos);
@ -2599,6 +2660,79 @@ glm::vec3 MyAvatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
}
}
glm::mat4 MyAvatar::getCenterEyeCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int rightEyeIndex = _rig->indexOfJoint("RightEye");
int leftEyeIndex = _rig->indexOfJoint("LeftEye");
if (rightEyeIndex >= 0 && leftEyeIndex >= 0) {
auto centerEyePos = (getAbsoluteDefaultJointTranslationInObjectFrame(rightEyeIndex) + getAbsoluteDefaultJointTranslationInObjectFrame(leftEyeIndex)) * 0.5f;
auto centerEyeRot = Quaternions::Y_180;
return createMatFromQuatAndPos(centerEyeRot, centerEyePos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_MIDDLE_EYE_POS, DEFAULT_AVATAR_MIDDLE_EYE_POS);
}
}
glm::mat4 MyAvatar::getHeadCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int headIndex = _rig->indexOfJoint("Head");
if (headIndex >= 0) {
auto headPos = getAbsoluteDefaultJointTranslationInObjectFrame(headIndex);
auto headRot = getAbsoluteDefaultJointRotationInObjectFrame(headIndex);
return createMatFromQuatAndPos(headRot, headPos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_HEAD_POS, DEFAULT_AVATAR_HEAD_POS);
}
}
glm::mat4 MyAvatar::getSpine2CalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int spine2Index = _rig->indexOfJoint("Spine2");
if (spine2Index >= 0) {
auto spine2Pos = getAbsoluteDefaultJointTranslationInObjectFrame(spine2Index);
auto spine2Rot = getAbsoluteDefaultJointRotationInObjectFrame(spine2Index);
return createMatFromQuatAndPos(spine2Rot, spine2Pos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_SPINE2_POS, DEFAULT_AVATAR_SPINE2_POS);
}
}
glm::mat4 MyAvatar::getHipsCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int hipsIndex = _rig->indexOfJoint("Hips");
if (hipsIndex >= 0) {
auto hipsPos = getAbsoluteDefaultJointTranslationInObjectFrame(hipsIndex);
auto hipsRot = getAbsoluteDefaultJointRotationInObjectFrame(hipsIndex);
return createMatFromQuatAndPos(hipsRot, hipsPos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_HIPS_POS, DEFAULT_AVATAR_HIPS_POS);
}
}
glm::mat4 MyAvatar::getLeftFootCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int leftFootIndex = _rig->indexOfJoint("LeftFoot");
if (leftFootIndex >= 0) {
auto leftFootPos = getAbsoluteDefaultJointTranslationInObjectFrame(leftFootIndex);
auto leftFootRot = getAbsoluteDefaultJointRotationInObjectFrame(leftFootIndex);
return createMatFromQuatAndPos(leftFootRot, leftFootPos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_LEFTFOOT_POS, DEFAULT_AVATAR_LEFTFOOT_POS);
}
}
glm::mat4 MyAvatar::getRightFootCalibrationMat() const {
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
int rightFootIndex = _rig->indexOfJoint("RightFoot");
if (rightFootIndex >= 0) {
auto rightFootPos = getAbsoluteDefaultJointTranslationInObjectFrame(rightFootIndex);
auto rightFootRot = getAbsoluteDefaultJointRotationInObjectFrame(rightFootIndex);
return createMatFromQuatAndPos(rightFootRot, rightFootPos);
} else {
return createMatFromQuatAndPos(DEFAULT_AVATAR_RIGHTFOOT_POS, DEFAULT_AVATAR_RIGHTFOOT_POS);
}
}
bool MyAvatar::pinJoint(int index, const glm::vec3& position, const glm::quat& orientation) {
auto hipsIndex = getJointIndex("Hips");
if (index != hipsIndex) {

View file

@ -353,7 +353,6 @@ public:
eyeContactTarget getEyeContactTarget();
Q_INVOKABLE glm::vec3 getTrackedHeadPosition() const { return _trackedHeadPosition; }
Q_INVOKABLE glm::vec3 getHeadPosition() const { return getHead()->getPosition(); }
Q_INVOKABLE float getHeadFinalYaw() const { return getHead()->getFinalYaw(); }
Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); }
@ -453,6 +452,19 @@ public:
controller::Pose getLeftFootControllerPoseInAvatarFrame() const;
controller::Pose getRightFootControllerPoseInAvatarFrame() const;
void setSpineControllerPosesInSensorFrame(const controller::Pose& hips, const controller::Pose& spine2);
controller::Pose getHipsControllerPoseInSensorFrame() const;
controller::Pose getSpine2ControllerPoseInSensorFrame() const;
controller::Pose getHipsControllerPoseInWorldFrame() const;
controller::Pose getSpine2ControllerPoseInWorldFrame() const;
controller::Pose getHipsControllerPoseInAvatarFrame() const;
controller::Pose getSpine2ControllerPoseInAvatarFrame() const;
void setHeadControllerPoseInSensorFrame(const controller::Pose& head);
controller::Pose getHeadControllerPoseInSensorFrame() const;
controller::Pose getHeadControllerPoseInWorldFrame() const;
controller::Pose getHeadControllerPoseInAvatarFrame() const;
bool hasDriveInput() const;
Q_INVOKABLE void setCharacterControllerEnabled(bool enabled);
@ -461,6 +473,14 @@ public:
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
// all calibration matrices are in absolute avatar space.
glm::mat4 getCenterEyeCalibrationMat() const;
glm::mat4 getHeadCalibrationMat() const;
glm::mat4 getSpine2CalibrationMat() const;
glm::mat4 getHipsCalibrationMat() const;
glm::mat4 getLeftFootCalibrationMat() const;
glm::mat4 getRightFootCalibrationMat() const;
void addHoldAction(AvatarActionHold* holdAction); // thread-safe
void removeHoldAction(AvatarActionHold* holdAction); // thread-safe
void updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose& postUpdatePose);
@ -693,9 +713,11 @@ private:
// These are stored in SENSOR frame
ThreadSafeValueCache<controller::Pose> _leftHandControllerPoseInSensorFrameCache { controller::Pose() };
ThreadSafeValueCache<controller::Pose> _rightHandControllerPoseInSensorFrameCache { controller::Pose() };
ThreadSafeValueCache<controller::Pose> _leftFootControllerPoseInSensorFrameCache{ controller::Pose() };
ThreadSafeValueCache<controller::Pose> _rightFootControllerPoseInSensorFrameCache{ controller::Pose() };
ThreadSafeValueCache<controller::Pose> _hipsControllerPoseInSensorFrameCache{ controller::Pose() };
ThreadSafeValueCache<controller::Pose> _spine2ControllerPoseInSensorFrameCache{ controller::Pose() };
ThreadSafeValueCache<controller::Pose> _headControllerPoseInSensorFrameCache{ controller::Pose() };
bool _hmdLeanRecenterEnabled = true;

View file

@ -107,33 +107,49 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Rig::HeadParameters headParams;
if (qApp->isHMDMode()) {
headParams.isInHMD = true;
// get HMD position from sensor space into world space, and back into rig space
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
glm::mat4 worldToRig = glm::inverse(rigToWorld);
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
headParams.rigHeadPosition = extractTranslation(rigHMDMat);
headParams.rigHeadOrientation = extractRotation(rigHMDMat);
headParams.worldHeadOrientation = extractRotation(worldHMDMat);
// TODO: if hips target sensor is valid.
// Copy it into headParams.hipsMatrix, and set headParams.hipsEnabled to true.
headParams.hipsEnabled = false;
// input action is the highest priority source for head orientation.
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
if (avatarHeadPose.isValid()) {
glm::mat4 rigHeadMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
headParams.headEnabled = true;
} else {
headParams.hipsEnabled = false;
headParams.isInHMD = false;
// We don't have a valid localHeadPosition.
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame();
headParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
if (qApp->isHMDMode()) {
// get HMD position from sensor space into world space, and back into rig space
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
glm::mat4 worldToRig = glm::inverse(rigToWorld);
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
_rig->computeHeadFromHMD(AnimPose(rigHMDMat), headParams.rigHeadPosition, headParams.rigHeadOrientation);
headParams.headEnabled = true;
} else {
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and down in desktop mode.
// preMult 180 is necessary to convert from avatar to rig coordinates.
// postMult 180 is necessary to convert head from -z forward to z forward.
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
headParams.headEnabled = false;
}
}
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
if (avatarHipsPose.isValid()) {
glm::mat4 rigHipsMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHipsPose.getRotation(), avatarHipsPose.getTranslation());
headParams.hipsMatrix = rigHipsMat;
headParams.hipsEnabled = true;
} else {
headParams.hipsEnabled = false;
}
auto avatarSpine2Pose = myAvatar->getSpine2ControllerPoseInAvatarFrame();
if (avatarSpine2Pose.isValid()) {
glm::mat4 rigSpine2Mat = Matrices::Y_180 * createMatFromQuatAndPos(avatarSpine2Pose.getRotation(), avatarSpine2Pose.getTranslation());
headParams.spine2Matrix = rigSpine2Mat;
headParams.spine2Enabled = true;
} else {
headParams.spine2Enabled = false;
}
headParams.neckJointIndex = geometry.neckJointIndex;
headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;
_rig->updateFromHeadParameters(headParams, deltaTime);
@ -193,7 +209,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Model::updateRig(deltaTime, parentTransform);
Rig::EyeParameters eyeParams;
eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
eyeParams.eyeLookAt = lookAt;
eyeParams.eyeSaccade = head->getSaccade();
eyeParams.modelRotation = getRotation();
@ -225,7 +240,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
head->setBaseRoll(glm::degrees(-eulers.z));
Rig::EyeParameters eyeParams;
eyeParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
eyeParams.eyeLookAt = lookAt;
eyeParams.eyeSaccade = glm::vec3(0.0f);
eyeParams.modelRotation = getRotation();

View file

@ -22,7 +22,7 @@
#include <DependencyManager.h>
#include <ui/overlays/TextOverlay.h>
#include "FaceTracker.h"
#include <trackers/FaceTracker.h>
class DdeFaceTracker : public FaceTracker, public Dependency {
Q_OBJECT

View file

@ -1,310 +0,0 @@
//
// Faceshift.cpp
// interface/src/devices
//
// Created by Andrzej Kapolka on 9/3/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QTimer>
#include <GLMHelpers.h>
#include <NumericalConstants.h>
#include <PerfStat.h>
#include "Faceshift.h"
#include "Menu.h"
#include "Util.h"
#include "InterfaceLogging.h"
#ifdef HAVE_FACESHIFT
using namespace fs;
#endif
using namespace std;
const QString DEFAULT_FACESHIFT_HOSTNAME = "localhost";
const quint16 FACESHIFT_PORT = 33433;
Faceshift::Faceshift() :
_hostname("faceshiftHostname", DEFAULT_FACESHIFT_HOSTNAME)
{
#ifdef HAVE_FACESHIFT
connect(&_tcpSocket, SIGNAL(connected()), SLOT(noteConnected()));
connect(&_tcpSocket, SIGNAL(error(QAbstractSocket::SocketError)), SLOT(noteError(QAbstractSocket::SocketError)));
connect(&_tcpSocket, SIGNAL(readyRead()), SLOT(readFromSocket()));
connect(&_tcpSocket, SIGNAL(stateChanged(QAbstractSocket::SocketState)), SIGNAL(connectionStateChanged()));
connect(&_tcpSocket, SIGNAL(disconnected()), SLOT(noteDisconnected()));
connect(&_udpSocket, SIGNAL(readyRead()), SLOT(readPendingDatagrams()));
_udpSocket.bind(FACESHIFT_PORT);
#endif
}
#ifdef HAVE_FACESHIFT
void Faceshift::init() {
FaceTracker::init();
setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !_isMuted);
}
void Faceshift::update(float deltaTime) {
if (!isActive()) {
return;
}
FaceTracker::update(deltaTime);
// get the euler angles relative to the window
glm::vec3 eulers = glm::degrees(safeEulerAngles(_headRotation * glm::quat(glm::radians(glm::vec3(
(_eyeGazeLeftPitch + _eyeGazeRightPitch) / 2.0f, (_eyeGazeLeftYaw + _eyeGazeRightYaw) / 2.0f, 0.0f)))));
// compute and subtract the long term average
const float LONG_TERM_AVERAGE_SMOOTHING = 0.999f;
if (!_longTermAverageInitialized) {
_longTermAverageEyePitch = eulers.x;
_longTermAverageEyeYaw = eulers.y;
_longTermAverageInitialized = true;
} else {
_longTermAverageEyePitch = glm::mix(eulers.x, _longTermAverageEyePitch, LONG_TERM_AVERAGE_SMOOTHING);
_longTermAverageEyeYaw = glm::mix(eulers.y, _longTermAverageEyeYaw, LONG_TERM_AVERAGE_SMOOTHING);
}
_estimatedEyePitch = eulers.x - _longTermAverageEyePitch;
_estimatedEyeYaw = eulers.y - _longTermAverageEyeYaw;
}
void Faceshift::reset() {
if (_tcpSocket.state() == QAbstractSocket::ConnectedState) {
qCDebug(interfaceapp, "Faceshift: Reset");
FaceTracker::reset();
string message;
fsBinaryStream::encode_message(message, fsMsgCalibrateNeutral());
send(message);
}
_longTermAverageInitialized = false;
}
bool Faceshift::isActive() const {
const quint64 ACTIVE_TIMEOUT_USECS = 1000000;
return (usecTimestampNow() - _lastReceiveTimestamp) < ACTIVE_TIMEOUT_USECS;
}
bool Faceshift::isTracking() const {
return isActive() && _tracking;
}
#endif
bool Faceshift::isConnectedOrConnecting() const {
return _tcpSocket.state() == QAbstractSocket::ConnectedState ||
(_tcpRetryCount == 0 && _tcpSocket.state() != QAbstractSocket::UnconnectedState);
}
void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float browUp,
float jawOpen, float mouth2, float mouth3, float mouth4, QVector<float>& coefficients) const {
const int MMMM_BLENDSHAPE = 34;
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
const int MAX_FAKE_BLENDSHAPE = 40; // Largest modified blendshape from above and below
coefficients.resize(max((int)coefficients.size(), MAX_FAKE_BLENDSHAPE + 1));
qFill(coefficients.begin(), coefficients.end(), 0.0f);
coefficients[_leftBlinkIndex] = leftBlink;
coefficients[_rightBlinkIndex] = rightBlink;
coefficients[_browUpCenterIndex] = browUp;
coefficients[_browUpLeftIndex] = browUp;
coefficients[_browUpRightIndex] = browUp;
coefficients[_jawOpenIndex] = jawOpen;
coefficients[SMILE_LEFT_BLENDSHAPE] = coefficients[SMILE_RIGHT_BLENDSHAPE] = mouth4;
coefficients[MMMM_BLENDSHAPE] = mouth2;
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
}
void Faceshift::setEnabled(bool enabled) {
// Don't enable until have explicitly initialized
if (!_isInitialized) {
return;
}
#ifdef HAVE_FACESHIFT
if ((_tcpEnabled = enabled)) {
connectSocket();
} else {
qCDebug(interfaceapp, "Faceshift: Disconnecting...");
_tcpSocket.disconnectFromHost();
}
#endif
}
void Faceshift::connectSocket() {
if (_tcpEnabled) {
if (!_tcpRetryCount) {
qCDebug(interfaceapp, "Faceshift: Connecting...");
}
_tcpSocket.connectToHost(_hostname.get(), FACESHIFT_PORT);
_tracking = false;
}
}
void Faceshift::noteConnected() {
#ifdef HAVE_FACESHIFT
qCDebug(interfaceapp, "Faceshift: Connected");
// request the list of blendshape names
string message;
fsBinaryStream::encode_message(message, fsMsgSendBlendshapeNames());
send(message);
#endif
}
void Faceshift::noteDisconnected() {
#ifdef HAVE_FACESHIFT
qCDebug(interfaceapp, "Faceshift: Disconnected");
#endif
}
void Faceshift::noteError(QAbstractSocket::SocketError error) {
if (!_tcpRetryCount) {
// Only spam log with fail to connect the first time, so that we can keep waiting for server
qCWarning(interfaceapp) << "Faceshift: " << _tcpSocket.errorString();
}
// retry connection after a 2 second delay
if (_tcpEnabled) {
_tcpRetryCount++;
QTimer::singleShot(2000, this, SLOT(connectSocket()));
}
}
void Faceshift::readPendingDatagrams() {
QByteArray buffer;
while (_udpSocket.hasPendingDatagrams()) {
buffer.resize(_udpSocket.pendingDatagramSize());
_udpSocket.readDatagram(buffer.data(), buffer.size());
receive(buffer);
}
}
void Faceshift::readFromSocket() {
receive(_tcpSocket.readAll());
}
void Faceshift::send(const std::string& message) {
_tcpSocket.write(message.data(), message.size());
}
void Faceshift::receive(const QByteArray& buffer) {
#ifdef HAVE_FACESHIFT
_lastReceiveTimestamp = usecTimestampNow();
_stream.received(buffer.size(), buffer.constData());
fsMsgPtr msg;
for (fsMsgPtr msg; (msg = _stream.get_message()); ) {
switch (msg->id()) {
case fsMsg::MSG_OUT_TRACKING_STATE: {
const fsTrackingData& data = static_pointer_cast<fsMsgTrackingState>(msg)->tracking_data();
if ((_tracking = data.m_trackingSuccessful)) {
glm::quat newRotation = glm::quat(data.m_headRotation.w, -data.m_headRotation.x,
data.m_headRotation.y, -data.m_headRotation.z);
// Compute angular velocity of the head
glm::quat r = glm::normalize(newRotation * glm::inverse(_headRotation));
float theta = 2 * acos(r.w);
if (theta > EPSILON) {
float rMag = glm::length(glm::vec3(r.x, r.y, r.z));
_headAngularVelocity = theta / _averageFrameTime * glm::vec3(r.x, r.y, r.z) / rMag;
} else {
_headAngularVelocity = glm::vec3(0,0,0);
}
const float ANGULAR_VELOCITY_FILTER_STRENGTH = 0.3f;
_headRotation = safeMix(_headRotation, newRotation, glm::clamp(glm::length(_headAngularVelocity) *
ANGULAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f));
const float TRANSLATION_SCALE = 0.02f;
glm::vec3 newHeadTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y,
-data.m_headTranslation.z) * TRANSLATION_SCALE;
_headLinearVelocity = (newHeadTranslation - _lastHeadTranslation) / _averageFrameTime;
const float LINEAR_VELOCITY_FILTER_STRENGTH = 0.3f;
float velocityFilter = glm::clamp(1.0f - glm::length(_headLinearVelocity) *
LINEAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
_filteredHeadTranslation = velocityFilter * _filteredHeadTranslation + (1.0f - velocityFilter) * newHeadTranslation;
_lastHeadTranslation = newHeadTranslation;
_headTranslation = _filteredHeadTranslation;
_eyeGazeLeftPitch = -data.m_eyeGazeLeftPitch;
_eyeGazeLeftYaw = data.m_eyeGazeLeftYaw;
_eyeGazeRightPitch = -data.m_eyeGazeRightPitch;
_eyeGazeRightYaw = data.m_eyeGazeRightYaw;
_blendshapeCoefficients = QVector<float>::fromStdVector(data.m_coeffs);
const float FRAME_AVERAGING_FACTOR = 0.99f;
quint64 usecsNow = usecTimestampNow();
if (_lastMessageReceived != 0) {
_averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime +
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastMessageReceived) / 1000000.0f;
}
_lastMessageReceived = usecsNow;
}
break;
}
case fsMsg::MSG_OUT_BLENDSHAPE_NAMES: {
const vector<string>& names = static_pointer_cast<fsMsgBlendshapeNames>(msg)->blendshape_names();
for (int i = 0; i < (int)names.size(); i++) {
if (names[i] == "EyeBlink_L") {
_leftBlinkIndex = i;
} else if (names[i] == "EyeBlink_R") {
_rightBlinkIndex = i;
} else if (names[i] == "EyeOpen_L") {
_leftEyeOpenIndex = i;
} else if (names[i] == "EyeOpen_R") {
_rightEyeOpenIndex = i;
} else if (names[i] == "BrowsD_L") {
_browDownLeftIndex = i;
} else if (names[i] == "BrowsD_R") {
_browDownRightIndex = i;
} else if (names[i] == "BrowsU_C") {
_browUpCenterIndex = i;
} else if (names[i] == "BrowsU_L") {
_browUpLeftIndex = i;
} else if (names[i] == "BrowsU_R") {
_browUpRightIndex = i;
} else if (names[i] == "JawOpen") {
_jawOpenIndex = i;
} else if (names[i] == "MouthSmile_L") {
_mouthSmileLeftIndex = i;
} else if (names[i] == "MouthSmile_R") {
_mouthSmileRightIndex = i;
}
}
break;
}
default:
break;
}
}
#endif
FaceTracker::countFrame();
}
void Faceshift::setHostname(const QString& hostname) {
_hostname.set(hostname);
}

View file

@ -1,155 +0,0 @@
//
// Faceshift.h
// interface/src/devices
//
// Created by Andrzej Kapolka on 9/3/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Faceshift_h
#define hifi_Faceshift_h
#include <QTcpSocket>
#include <QUdpSocket>
#ifdef HAVE_FACESHIFT
#include <fsbinarystream.h>
#endif
#include <DependencyManager.h>
#include <SettingHandle.h>
#include "FaceTracker.h"
const float STARTING_FACESHIFT_FRAME_TIME = 0.033f;
/// Handles interaction with the Faceshift software, which provides head position/orientation and facial features.
class Faceshift : public FaceTracker, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
#ifdef HAVE_FACESHIFT
// If we don't have faceshift, use the base class' methods
virtual void init() override;
virtual void update(float deltaTime) override;
virtual void reset() override;
virtual bool isActive() const override;
virtual bool isTracking() const override;
#endif
bool isConnectedOrConnecting() const;
const glm::vec3& getHeadAngularVelocity() const { return _headAngularVelocity; }
// these pitch/yaw angles are in degrees
float getEyeGazeLeftPitch() const { return _eyeGazeLeftPitch; }
float getEyeGazeLeftYaw() const { return _eyeGazeLeftYaw; }
float getEyeGazeRightPitch() const { return _eyeGazeRightPitch; }
float getEyeGazeRightYaw() const { return _eyeGazeRightYaw; }
float getLeftBlink() const { return getBlendshapeCoefficient(_leftBlinkIndex); }
float getRightBlink() const { return getBlendshapeCoefficient(_rightBlinkIndex); }
float getLeftEyeOpen() const { return getBlendshapeCoefficient(_leftEyeOpenIndex); }
float getRightEyeOpen() const { return getBlendshapeCoefficient(_rightEyeOpenIndex); }
float getBrowDownLeft() const { return getBlendshapeCoefficient(_browDownLeftIndex); }
float getBrowDownRight() const { return getBlendshapeCoefficient(_browDownRightIndex); }
float getBrowUpCenter() const { return getBlendshapeCoefficient(_browUpCenterIndex); }
float getBrowUpLeft() const { return getBlendshapeCoefficient(_browUpLeftIndex); }
float getBrowUpRight() const { return getBlendshapeCoefficient(_browUpRightIndex); }
float getMouthSize() const { return getBlendshapeCoefficient(_jawOpenIndex); }
float getMouthSmileLeft() const { return getBlendshapeCoefficient(_mouthSmileLeftIndex); }
float getMouthSmileRight() const { return getBlendshapeCoefficient(_mouthSmileRightIndex); }
QString getHostname() { return _hostname.get(); }
void setHostname(const QString& hostname);
void updateFakeCoefficients(float leftBlink,
float rightBlink,
float browUp,
float jawOpen,
float mouth2,
float mouth3,
float mouth4,
QVector<float>& coefficients) const;
signals:
void connectionStateChanged();
public slots:
void setEnabled(bool enabled) override;
private slots:
void connectSocket();
void noteConnected();
void noteError(QAbstractSocket::SocketError error);
void readPendingDatagrams();
void readFromSocket();
void noteDisconnected();
private:
Faceshift();
virtual ~Faceshift() {}
void send(const std::string& message);
void receive(const QByteArray& buffer);
QTcpSocket _tcpSocket;
QUdpSocket _udpSocket;
#ifdef HAVE_FACESHIFT
fs::fsBinaryStream _stream;
#endif
bool _tcpEnabled = true;
int _tcpRetryCount = 0;
bool _tracking = false;
quint64 _lastReceiveTimestamp = 0;
quint64 _lastMessageReceived = 0;
float _averageFrameTime = STARTING_FACESHIFT_FRAME_TIME;
glm::vec3 _headAngularVelocity = glm::vec3(0.0f);
glm::vec3 _headLinearVelocity = glm::vec3(0.0f);
glm::vec3 _lastHeadTranslation = glm::vec3(0.0f);
glm::vec3 _filteredHeadTranslation = glm::vec3(0.0f);
// degrees
float _eyeGazeLeftPitch = 0.0f;
float _eyeGazeLeftYaw = 0.0f;
float _eyeGazeRightPitch = 0.0f;
float _eyeGazeRightYaw = 0.0f;
// degrees
float _longTermAverageEyePitch = 0.0f;
float _longTermAverageEyeYaw = 0.0f;
bool _longTermAverageInitialized = false;
Setting::Handle<QString> _hostname;
// see http://support.faceshift.com/support/articles/35129-export-of-blendshapes
int _leftBlinkIndex = 0;
int _rightBlinkIndex = 1;
int _leftEyeOpenIndex = 8;
int _rightEyeOpenIndex = 9;
// Brows
int _browDownLeftIndex = 14;
int _browDownRightIndex = 15;
int _browUpCenterIndex = 16;
int _browUpLeftIndex = 17;
int _browUpRightIndex = 18;
int _mouthSmileLeftIndex = 28;
int _mouthSmileRightIndex = 29;
int _jawOpenIndex = 21;
};
#endif // hifi_Faceshift_h

View file

@ -14,7 +14,7 @@
#include <QDateTime>
#include "MotionTracker.h"
#include <trackers/MotionTracker.h>
#ifdef HAVE_LEAPMOTION
#include <Leap.h>

View file

@ -17,7 +17,7 @@
#include <plugins/PluginManager.h>
#include "Application.h"
#include "devices/MotionTracker.h"
#include <trackers/MotionTracker.h>
void ControllerScriptingInterface::handleMetaEvent(HFMetaEvent* event) {
if (event->type() == HFActionEvent::startType()) {

View file

@ -11,9 +11,9 @@
#include <AudioClient.h>
#include <SettingHandle.h>
#include <trackers/FaceTracker.h>
#include "Application.h"
#include "devices/FaceTracker.h"
#include "Menu.h"
HIFI_QML_DEF(AvatarInputs)

View file

@ -11,7 +11,6 @@
#include <AudioClient.h>
#include <avatar/AvatarManager.h>
#include <devices/DdeFaceTracker.h>
#include <devices/Faceshift.h>
#include <NetworkingConstants.h>
#include <ScriptEngines.h>
#include <OffscreenUi.h>
@ -202,13 +201,6 @@ void setupPreferences() {
auto setter = [](float value) { FaceTracker::setEyeDeflection(value); };
preferences->addPreference(new SliderPreference(AVATAR_TUNING, "Face tracker eye deflection", getter, setter));
}
{
auto getter = []()->QString { return DependencyManager::get<Faceshift>()->getHostname(); };
auto setter = [](const QString& value) { DependencyManager::get<Faceshift>()->setHostname(value); };
auto preference = new EditPreference(AVATAR_TUNING, "Faceshift hostname", getter, setter);
preference->setPlaceholderText("localhost");
preferences->addPreference(preference);
}
{
auto getter = [=]()->QString { return myAvatar->getAnimGraphOverrideUrl().toString(); };
auto setter = [=](const QString& value) { myAvatar->setAnimGraphOverrideUrl(QUrl(value)); };

View file

@ -46,7 +46,6 @@ static bool isEqual(const glm::quat& p, const glm::quat& q) {
const glm::vec3 DEFAULT_RIGHT_EYE_POS(-0.3f, 0.9f, 0.0f);
const glm::vec3 DEFAULT_LEFT_EYE_POS(0.3f, 0.9f, 0.0f);
const glm::vec3 DEFAULT_HEAD_POS(0.0f, 0.75f, 0.0f);
const glm::vec3 DEFAULT_NECK_POS(0.0f, 0.70f, 0.0f);
void Rig::overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame) {
@ -1020,7 +1019,7 @@ glm::quat Rig::getJointDefaultRotationInParentFrame(int jointIndex) {
}
void Rig::updateFromHeadParameters(const HeadParameters& params, float dt) {
updateNeckJoint(params.neckJointIndex, params);
updateHeadAnimVars(params);
_animVars.set("isTalking", params.isTalking);
_animVars.set("notIsTalking", !params.isTalking);
@ -1028,101 +1027,73 @@ void Rig::updateFromHeadParameters(const HeadParameters& params, float dt) {
if (params.hipsEnabled) {
_animVars.set("hipsType", (int)IKTarget::Type::RotationAndPosition);
_animVars.set("hipsPosition", extractTranslation(params.hipsMatrix));
_animVars.set("hipsRotation", glmExtractRotation(params.hipsMatrix) * Quaternions::Y_180);
_animVars.set("hipsRotation", glmExtractRotation(params.hipsMatrix));
} else {
_animVars.set("hipsType", (int)IKTarget::Type::Unknown);
}
// by default this IK target is disabled.
_animVars.set("spine2Type", (int)IKTarget::Type::Unknown);
if (params.spine2Enabled) {
_animVars.set("spine2Type", (int)IKTarget::Type::RotationAndPosition);
_animVars.set("spine2Position", extractTranslation(params.spine2Matrix));
_animVars.set("spine2Rotation", glmExtractRotation(params.spine2Matrix));
} else {
_animVars.set("spine2Type", (int)IKTarget::Type::Unknown);
}
}
void Rig::updateFromEyeParameters(const EyeParameters& params) {
updateEyeJoint(params.leftEyeJointIndex, params.modelTranslation, params.modelRotation,
params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoint(params.rightEyeJointIndex, params.modelTranslation, params.modelRotation,
params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoint(params.leftEyeJointIndex, params.modelTranslation, params.modelRotation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoint(params.rightEyeJointIndex, params.modelTranslation, params.modelRotation, params.eyeLookAt, params.eyeSaccade);
}
void Rig::computeHeadNeckAnimVars(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut,
glm::vec3& neckPositionOut, glm::quat& neckOrientationOut) const {
void Rig::computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const {
// the input hmd values are in avatar/rig space
const glm::vec3& hmdPosition = hmdPose.trans();
const glm::quat& hmdOrientation = hmdPose.rot();
// the HMD looks down the negative z axis, but the head bone looks down the z axis, so apply a 180 degree rotation.
const glm::quat& hmdOrientation = hmdPose.rot() * Quaternions::Y_180;
// TODO: cache jointIndices
int rightEyeIndex = indexOfJoint("RightEye");
int leftEyeIndex = indexOfJoint("LeftEye");
int headIndex = indexOfJoint("Head");
int neckIndex = indexOfJoint("Neck");
glm::vec3 absRightEyePos = rightEyeIndex != -1 ? getAbsoluteDefaultPose(rightEyeIndex).trans() : DEFAULT_RIGHT_EYE_POS;
glm::vec3 absLeftEyePos = leftEyeIndex != -1 ? getAbsoluteDefaultPose(leftEyeIndex).trans() : DEFAULT_LEFT_EYE_POS;
glm::vec3 absHeadPos = headIndex != -1 ? getAbsoluteDefaultPose(headIndex).trans() : DEFAULT_HEAD_POS;
glm::vec3 absNeckPos = neckIndex != -1 ? getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_NECK_POS;
glm::vec3 absCenterEyePos = (absRightEyePos + absLeftEyePos) / 2.0f;
glm::vec3 eyeOffset = absCenterEyePos - absHeadPos;
glm::vec3 headOffset = absHeadPos - absNeckPos;
// apply simplistic head/neck model
// head
headPositionOut = hmdPosition - hmdOrientation * eyeOffset;
headOrientationOut = hmdOrientation;
// neck
neckPositionOut = hmdPosition - hmdOrientation * (headOffset + eyeOffset);
// slerp between default orientation and hmdOrientation
neckOrientationOut = safeMix(hmdOrientation, _animSkeleton->getRelativeDefaultPose(neckIndex).rot(), 0.5f);
}
void Rig::updateNeckJoint(int index, const HeadParameters& params) {
if (_animSkeleton && index >= 0 && index < _animSkeleton->getNumJoints()) {
glm::quat yFlip180 = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
if (params.isInHMD) {
glm::vec3 headPos, neckPos;
glm::quat headRot, neckRot;
AnimPose hmdPose(glm::vec3(1.0f), params.rigHeadOrientation * yFlip180, params.rigHeadPosition);
computeHeadNeckAnimVars(hmdPose, headPos, headRot, neckPos, neckRot);
// debug rendering
#ifdef DEBUG_RENDERING
const glm::vec4 red(1.0f, 0.0f, 0.0f, 1.0f);
const glm::vec4 green(0.0f, 1.0f, 0.0f, 1.0f);
// transform from bone into avatar space
AnimPose headPose(glm::vec3(1), headRot, headPos);
DebugDraw::getInstance().addMyAvatarMarker("headTarget", headPose.rot, headPose.trans, red);
// transform from bone into avatar space
AnimPose neckPose(glm::vec3(1), neckRot, neckPos);
DebugDraw::getInstance().addMyAvatarMarker("neckTarget", neckPose.rot, neckPose.trans, green);
#endif
_animVars.set("headPosition", headPos);
_animVars.set("headRotation", headRot);
_animVars.set("headType", (int)IKTarget::Type::HmdHead);
_animVars.set("neckPosition", neckPos);
_animVars.set("neckRotation", neckRot);
_animVars.set("neckType", (int)IKTarget::Type::Unknown); // 'Unknown' disables the target
void Rig::updateHeadAnimVars(const HeadParameters& params) {
if (_animSkeleton) {
if (params.headEnabled) {
_animVars.set("headPosition", params.rigHeadPosition);
_animVars.set("headRotation", params.rigHeadOrientation);
if (params.hipsEnabled) {
// Since there is an explicit hips ik target, switch the head to use the more generic RotationAndPosition IK chain type.
// this will allow the spine to bend more, ensuring that it can reach the head target position.
_animVars.set("headType", (int)IKTarget::Type::RotationAndPosition);
} else {
// When there is no hips IK target, use the HmdHead IK chain type. This will make the spine very stiff,
// but because the IK _hipsOffset is enabled, the hips will naturally follow underneath the head.
_animVars.set("headType", (int)IKTarget::Type::HmdHead);
}
} else {
_animVars.unset("headPosition");
_animVars.set("headRotation", params.rigHeadOrientation * yFlip180);
_animVars.set("headAndNeckType", (int)IKTarget::Type::RotationOnly);
_animVars.set("headRotation", params.rigHeadOrientation);
_animVars.set("headType", (int)IKTarget::Type::RotationOnly);
_animVars.unset("neckPosition");
_animVars.unset("neckRotation");
_animVars.set("neckType", (int)IKTarget::Type::RotationOnly);
}
}
}
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
// TODO: does not properly handle avatar scale.

View file

@ -42,18 +42,17 @@ public:
};
struct HeadParameters {
glm::quat worldHeadOrientation = glm::quat(); // world space (-z forward)
glm::quat rigHeadOrientation = glm::quat(); // rig space (-z forward)
glm::vec3 rigHeadPosition = glm::vec3(); // rig space
glm::mat4 hipsMatrix = glm::mat4(); // rig space
glm::mat4 hipsMatrix = glm::mat4(); // rig space
glm::mat4 spine2Matrix = glm::mat4(); // rig space
glm::quat rigHeadOrientation = glm::quat(); // rig space (-z forward)
glm::vec3 rigHeadPosition = glm::vec3(); // rig space
bool hipsEnabled = false;
bool isInHMD = false;
int neckJointIndex = -1;
bool headEnabled = false;
bool spine2Enabled = false;
bool isTalking = false;
};
struct EyeParameters {
glm::quat worldHeadOrientation = glm::quat();
glm::vec3 eyeLookAt = glm::vec3(); // world space
glm::vec3 eyeSaccade = glm::vec3(); // world space
glm::vec3 modelTranslation = glm::vec3();
@ -230,6 +229,9 @@ public:
void setEnableDebugDrawIKTargets(bool enableDebugDrawIKTargets) { _enableDebugDrawIKTargets = enableDebugDrawIKTargets; }
// input assumed to be in rig space
void computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const;
signals:
void onLoadComplete();
@ -239,10 +241,9 @@ protected:
void applyOverridePoses();
void buildAbsoluteRigPoses(const AnimPoseVec& relativePoses, AnimPoseVec& absolutePosesOut);
void updateNeckJoint(int index, const HeadParameters& params);
void computeHeadNeckAnimVars(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut,
glm::vec3& neckPositionOut, glm::quat& neckOrientationOut) const;
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
void updateHeadAnimVars(const HeadParameters& params);
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::vec3& lookAt, const glm::vec3& saccade);
void calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds, float* alphaOut) const;
AnimPose _modelOffset; // model to rig space

View file

@ -393,9 +393,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
if (isFingerPointing) {
setAtBit(flags, HAND_STATE_FINGER_POINTING_BIT);
}
// faceshift state
// face tracker state
if (_headData->_isFaceTrackerConnected) {
setAtBit(flags, IS_FACESHIFT_CONNECTED);
setAtBit(flags, IS_FACE_TRACKER_CONNECTED);
}
// eye tracker state
if (_headData->_isEyeTrackerConnected) {
@ -883,7 +883,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
+ (oneAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
auto newFaceTrackerConnected = oneAtBit(bitItems, IS_FACESHIFT_CONNECTED);
auto newFaceTrackerConnected = oneAtBit(bitItems, IS_FACE_TRACKER_CONNECTED);
auto newEyeTrackerConnected = oneAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
bool keyStateChanged = (_keyState != newKeyState);

View file

@ -99,7 +99,7 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
// Referential Data - R is found in the 7th bit
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
const int IS_FACESHIFT_CONNECTED = 4; // 5th bit
const int IS_FACE_TRACKER_CONNECTED = 4; // 5th bit
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
const int HAS_REFERENTIAL = 6; // 7th bit
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit
@ -218,7 +218,7 @@ namespace AvatarDataPacket {
} PACKED_END;
const size_t AVATAR_LOCAL_POSITION_SIZE = 12;
// only present if IS_FACESHIFT_CONNECTED flag is set in AvatarInfo.flags
// only present if IS_FACE_TRACKER_CONNECTED flag is set in AvatarInfo.flags
PACKED_BEGIN struct FaceTrackerInfo {
float leftEyeBlink;
float rightEyeBlink;

View file

@ -53,6 +53,9 @@ namespace controller {
makePosePair(Action::RIGHT_HAND, "RightHand"),
makePosePair(Action::LEFT_FOOT, "LeftFoot"),
makePosePair(Action::RIGHT_FOOT, "RightFoot"),
makePosePair(Action::HIPS, "Hips"),
makePosePair(Action::SPINE2, "Spine2"),
makePosePair(Action::HEAD, "Head"),
makeButtonPair(Action::LEFT_HAND_CLICK, "LeftHandClick"),
makeButtonPair(Action::RIGHT_HAND_CLICK, "RightHandClick"),

View file

@ -44,6 +44,9 @@ enum class Action {
RIGHT_HAND,
LEFT_FOOT,
RIGHT_FOOT,
HIPS,
SPINE2,
HEAD,
LEFT_HAND_CLICK,
RIGHT_HAND_CLICK,

View file

@ -16,9 +16,15 @@
namespace controller {
struct InputCalibrationData {
glm::mat4 sensorToWorldMat;
glm::mat4 avatarMat;
glm::mat4 hmdSensorMat;
glm::mat4 sensorToWorldMat; // sensor to world
glm::mat4 avatarMat; // avatar to world
glm::mat4 hmdSensorMat; // hmd pos and orientation in sensor space
glm::mat4 defaultCenterEyeMat; // default pose for the center of the eyes in avatar space.
glm::mat4 defaultHeadMat; // default pose for head joint in avatar space
glm::mat4 defaultSpine2; // default pose for spine2 joint in avatar space
glm::mat4 defaultHips; // default pose for hips joint in avatar space
glm::mat4 defaultLeftFoot; // default pose for leftFoot joint in avatar space
glm::mat4 defaultRightFoot; // default pose for leftFoot joint in avatar space
};
enum class ChannelType {

View file

@ -69,5 +69,23 @@ namespace controller {
pose.valid = valid;
return pose;
}
Pose Pose::postTransform(const glm::mat4& mat) const {
glm::mat4 original = ::createMatFromQuatAndPos(rotation, translation);
glm::mat4 result = original * mat;
auto translationOut = ::extractTranslation(result);
auto rotationOut = ::glmExtractRotation(result);
auto velocityOut = velocity + glm::cross(angularVelocity, translationOut - translation); // warning: this may be completely wrong
auto angularVelocityOut = angularVelocity;
Pose pose(translationOut,
rotationOut,
velocityOut,
angularVelocityOut);
pose.valid = valid;
return pose;
}
}

View file

@ -41,6 +41,7 @@ namespace controller {
vec3 getAngularVelocity() const { return angularVelocity; }
Pose transform(const glm::mat4& mat) const;
Pose postTransform(const glm::mat4& mat) const;
static QScriptValue toScriptValue(QScriptEngine* engine, const Pose& event);
static void fromScriptValue(const QScriptValue& object, Pose& event);

View file

@ -104,6 +104,9 @@ Input::NamedVector StandardController::getAvailableInputs() const {
makePair(RIGHT_HAND, "RightHand"),
makePair(LEFT_FOOT, "LeftFoot"),
makePair(RIGHT_FOOT, "RightFoot"),
makePair(HIPS, "Hips"),
makePair(SPINE2, "Spine2"),
makePair(HEAD, "Head"),
// Aliases, PlayStation style names
makePair(LB, "L1"),

View file

@ -20,6 +20,8 @@
#include <PathUtils.h>
#include <NumericalConstants.h>
#include <StreamUtils.h>
#include "StandardController.h"
#include "StateController.h"
#include "InputRecorder.h"
@ -563,7 +565,18 @@ bool UserInputMapper::applyRoute(const Route::Pointer& route, bool force) {
if (source->isPose()) {
Pose value = getPose(source, route->peek);
static const Pose IDENTITY_POSE { vec3(), quat() };
if (debugRoutes && route->debug) {
qCDebug(controllers) << "Value was t:" << value.translation << "r:" << value.rotation;
}
// Apply each of the filters.
for (const auto& filter : route->filters) {
value = filter->apply(value);
}
if (debugRoutes && route->debug) {
qCDebug(controllers) << "Filtered value was t:" << value.translation << "r:" << value.rotation;
if (!value.valid) {
qCDebug(controllers) << "Applying invalid pose";
} else if (value == IDENTITY_POSE) {

View file

@ -24,6 +24,10 @@
#include "filters/InvertFilter.h"
#include "filters/PulseFilter.h"
#include "filters/ScaleFilter.h"
#include "filters/TranslateFilter.h"
#include "filters/TransformFilter.h"
#include "filters/PostTransformFilter.h"
#include "filters/RotateFilter.h"
using namespace controller;
@ -37,6 +41,10 @@ REGISTER_FILTER_CLASS_INSTANCE(HysteresisFilter, "hysteresis")
REGISTER_FILTER_CLASS_INSTANCE(InvertFilter, "invert")
REGISTER_FILTER_CLASS_INSTANCE(ScaleFilter, "scale")
REGISTER_FILTER_CLASS_INSTANCE(PulseFilter, "pulse")
REGISTER_FILTER_CLASS_INSTANCE(TranslateFilter, "translate")
REGISTER_FILTER_CLASS_INSTANCE(TransformFilter, "transform")
REGISTER_FILTER_CLASS_INSTANCE(PostTransformFilter, "postTransform")
REGISTER_FILTER_CLASS_INSTANCE(RotateFilter, "rotate")
const QString JSON_FILTER_TYPE = QStringLiteral("type");
const QString JSON_FILTER_PARAMS = QStringLiteral("params");
@ -76,7 +84,6 @@ bool Filter::parseSingleFloatParameter(const QJsonValue& parameters, const QStri
return true;
}
} else if (parameters.isObject()) {
static const QString JSON_MIN = QStringLiteral("interval");
auto objectParameters = parameters.toObject();
if (objectParameters.contains(name)) {
output = objectParameters[name].toDouble();
@ -86,6 +93,92 @@ bool Filter::parseSingleFloatParameter(const QJsonValue& parameters, const QStri
return false;
}
bool Filter::parseVec3Parameter(const QJsonValue& parameters, glm::vec3& output) {
if (parameters.isDouble()) {
output = glm::vec3(parameters.toDouble());
return true;
} else if (parameters.isArray()) {
auto arrayParameters = parameters.toArray();
if (arrayParameters.size() == 3) {
output = glm::vec3(arrayParameters[0].toDouble(),
arrayParameters[1].toDouble(),
arrayParameters[2].toDouble());
return true;
}
} else if (parameters.isObject()) {
auto objectParameters = parameters.toObject();
if (objectParameters.contains("x") && objectParameters.contains("y") && objectParameters.contains("z")) {
output = glm::vec3(objectParameters["x"].toDouble(),
objectParameters["y"].toDouble(),
objectParameters["z"].toDouble());
return true;
}
}
return false;
}
bool Filter::parseMat4Parameter(const QJsonValue& parameters, glm::mat4& output) {
if (parameters.isObject()) {
auto objectParameters = parameters.toObject();
if (objectParameters.contains("r0c0") &&
objectParameters.contains("r1c0") &&
objectParameters.contains("r2c0") &&
objectParameters.contains("r3c0") &&
objectParameters.contains("r0c1") &&
objectParameters.contains("r1c1") &&
objectParameters.contains("r2c1") &&
objectParameters.contains("r3c1") &&
objectParameters.contains("r0c2") &&
objectParameters.contains("r1c2") &&
objectParameters.contains("r2c2") &&
objectParameters.contains("r3c2") &&
objectParameters.contains("r0c3") &&
objectParameters.contains("r1c3") &&
objectParameters.contains("r2c3") &&
objectParameters.contains("r3c3")) {
output[0][0] = objectParameters["r0c0"].toDouble();
output[0][1] = objectParameters["r1c0"].toDouble();
output[0][2] = objectParameters["r2c0"].toDouble();
output[0][3] = objectParameters["r3c0"].toDouble();
output[1][0] = objectParameters["r0c1"].toDouble();
output[1][1] = objectParameters["r1c1"].toDouble();
output[1][2] = objectParameters["r2c1"].toDouble();
output[1][3] = objectParameters["r3c1"].toDouble();
output[2][0] = objectParameters["r0c2"].toDouble();
output[2][1] = objectParameters["r1c2"].toDouble();
output[2][2] = objectParameters["r2c2"].toDouble();
output[2][3] = objectParameters["r3c2"].toDouble();
output[3][0] = objectParameters["r0c3"].toDouble();
output[3][1] = objectParameters["r1c3"].toDouble();
output[3][2] = objectParameters["r2c3"].toDouble();
output[3][3] = objectParameters["r3c3"].toDouble();
return true;
}
}
return false;
}
bool Filter::parseQuatParameter(const QJsonValue& parameters, glm::quat& output) {
if (parameters.isObject()) {
auto objectParameters = parameters.toObject();
if (objectParameters.contains("w") &&
objectParameters.contains("x") &&
objectParameters.contains("y") &&
objectParameters.contains("z")) {
output = glm::quat(objectParameters["w"].toDouble(),
objectParameters["x"].toDouble(),
objectParameters["y"].toDouble(),
objectParameters["z"].toDouble());
return true;
}
}
return false;
}
#if 0

View file

@ -21,6 +21,8 @@
#include <QtCore/QEasingCurve>
#include "../Pose.h"
class QJsonValue;
namespace controller {
@ -34,6 +36,8 @@ namespace controller {
using Factory = hifi::SimpleFactory<Filter, QString>;
virtual float apply(float value) const = 0;
virtual Pose apply(Pose value) const = 0;
// Factory features
virtual bool parseParameters(const QJsonValue& parameters) { return true; }
@ -42,6 +46,9 @@ namespace controller {
static Factory& getFactory() { return _factory; }
static bool parseSingleFloatParameter(const QJsonValue& parameters, const QString& name, float& output);
static bool parseVec3Parameter(const QJsonValue& parameters, glm::vec3& output);
static bool parseQuatParameter(const QJsonValue& parameters, glm::quat& output);
static bool parseMat4Parameter(const QJsonValue& parameters, glm::mat4& output);
protected:
static Factory _factory;
};

View file

@ -26,6 +26,10 @@
#include "filters/InvertFilter.h"
#include "filters/PulseFilter.h"
#include "filters/ScaleFilter.h"
#include "filters/TranslateFilter.h"
#include "filters/TransformFilter.h"
#include "filters/PostTransformFilter.h"
#include "filters/RotateFilter.h"
#include "conditionals/AndConditional.h"
using namespace controller;
@ -103,6 +107,26 @@ QObject* RouteBuilderProxy::deadZone(float min) {
return this;
}
QObject* RouteBuilderProxy::translate(glm::vec3 translate) {
addFilter(std::make_shared<TranslateFilter>(translate));
return this;
}
QObject* RouteBuilderProxy::transform(glm::mat4 transform) {
addFilter(std::make_shared<TransformFilter>(transform));
return this;
}
QObject* RouteBuilderProxy::postTransform(glm::mat4 transform) {
addFilter(std::make_shared<PostTransformFilter>(transform));
return this;
}
QObject* RouteBuilderProxy::rotate(glm::quat rotation) {
addFilter(std::make_shared<RotateFilter>(rotation));
return this;
}
QObject* RouteBuilderProxy::constrainToInteger() {
addFilter(std::make_shared<ConstrainToIntegerFilter>());
return this;

View file

@ -48,6 +48,10 @@ class RouteBuilderProxy : public QObject {
Q_INVOKABLE QObject* deadZone(float min);
Q_INVOKABLE QObject* constrainToInteger();
Q_INVOKABLE QObject* constrainToPositiveInteger();
Q_INVOKABLE QObject* translate(glm::vec3 translate);
Q_INVOKABLE QObject* transform(glm::mat4 transform);
Q_INVOKABLE QObject* postTransform(glm::mat4 transform);
Q_INVOKABLE QObject* rotate(glm::quat rotation);
private:
void to(const Endpoint::Pointer& destination);

View file

@ -21,6 +21,9 @@ public:
virtual float apply(float value) const override {
return glm::clamp(value, _min, _max);
}
virtual Pose apply(Pose value) const override { return value; }
virtual bool parseParameters(const QJsonValue& parameters) override;
protected:
float _min = 0.0f;

View file

@ -22,6 +22,9 @@ public:
virtual float apply(float value) const override {
return glm::sign(value);
}
virtual Pose apply(Pose value) const override { return value; }
protected:
};

View file

@ -22,6 +22,9 @@ public:
virtual float apply(float value) const override {
return (value <= 0.0f) ? 0.0f : 1.0f;
}
virtual Pose apply(Pose value) const override { return value; }
protected:
};

View file

@ -20,6 +20,9 @@ public:
DeadZoneFilter(float min = 0.0) : _min(min) {};
virtual float apply(float value) const override;
virtual Pose apply(Pose value) const override { return value; }
virtual bool parseParameters(const QJsonValue& parameters) override;
protected:
float _min = 0.0f;

View file

@ -19,6 +19,9 @@ class HysteresisFilter : public Filter {
public:
HysteresisFilter(float min = 0.25, float max = 0.75);
virtual float apply(float value) const override;
virtual Pose apply(Pose value) const override { return value; }
virtual bool parseParameters(const QJsonValue& parameters) override;
protected:
float _min;

View file

@ -0,0 +1,33 @@
//
// Created by Brad Hefta-Gaub 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Controllers_Filters_PostTransform_h
#define hifi_Controllers_Filters_PostTransform_h
#include <glm/gtx/transform.hpp>
#include "../Filter.h"
namespace controller {
class PostTransformFilter : public Filter {
REGISTER_FILTER_CLASS(PostTransformFilter);
public:
PostTransformFilter() { }
PostTransformFilter(glm::mat4 transform) : _transform(transform) {}
virtual float apply(float value) const override { return value; }
virtual Pose apply(Pose value) const override { return value.postTransform(_transform); }
virtual bool parseParameters(const QJsonValue& parameters) override { return parseMat4Parameter(parameters, _transform); }
private:
glm::mat4 _transform;
};
}
#endif

View file

@ -23,6 +23,8 @@ public:
virtual float apply(float value) const override;
virtual Pose apply(Pose value) const override { return value; }
virtual bool parseParameters(const QJsonValue& parameters) override;
private:

View file

@ -0,0 +1,39 @@
//
// Created by Brad Hefta-Gaub 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Controllers_Filters_Rotate_h
#define hifi_Controllers_Filters_Rotate_h
#include <glm/gtx/transform.hpp>
#include "../Filter.h"
namespace controller {
class RotateFilter : public Filter {
REGISTER_FILTER_CLASS(RotateFilter);
public:
RotateFilter() { }
RotateFilter(glm::quat rotation) : _rotation(rotation) {}
virtual float apply(float value) const override { return value; }
virtual Pose apply(Pose value) const override {
return value.transform(glm::mat4(glm::quat(_rotation)));
}
virtual bool parseParameters(const QJsonValue& parameters) override { return parseQuatParameter(parameters, _rotation); }
private:
glm::quat _rotation;
};
}
#endif

View file

@ -10,6 +10,8 @@
#ifndef hifi_Controllers_Filters_Scale_h
#define hifi_Controllers_Filters_Scale_h
#include <glm/gtc/matrix_transform.hpp>
#include "../Filter.h"
namespace controller {
@ -23,6 +25,11 @@ public:
virtual float apply(float value) const override {
return value * _scale;
}
virtual Pose apply(Pose value) const override {
return value.transform(glm::scale(glm::mat4(), glm::vec3(_scale)));
}
virtual bool parseParameters(const QJsonValue& parameters) override;
private:

View file

@ -0,0 +1,35 @@
//
// Created by Brad Hefta-Gaub 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Controllers_Filters_Transform_h
#define hifi_Controllers_Filters_Transform_h
#include <glm/gtx/transform.hpp>
#include "../Filter.h"
namespace controller {
class TransformFilter : public Filter {
REGISTER_FILTER_CLASS(TransformFilter);
public:
TransformFilter() { }
TransformFilter(glm::mat4 transform) : _transform(transform) {}
virtual float apply(float value) const override { return value; }
virtual Pose apply(Pose value) const override { return value.transform(_transform); }
virtual bool parseParameters(const QJsonValue& parameters) override { return parseMat4Parameter(parameters, _transform); }
private:
glm::mat4 _transform;
};
}
#endif

View file

@ -0,0 +1,35 @@
//
// Created by Brad Hefta-Gaub 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Controllers_Filters_Translate_h
#define hifi_Controllers_Filters_Translate_h
#include <glm/gtx/transform.hpp>
#include "../Filter.h"
namespace controller {
class TranslateFilter : public Filter {
REGISTER_FILTER_CLASS(TranslateFilter);
public:
TranslateFilter() { }
TranslateFilter(glm::vec3 translate) : _translate(translate) {}
virtual float apply(float value) const override { return value; }
virtual Pose apply(Pose value) const override { return value.transform(glm::translate(_translate)); }
virtual bool parseParameters(const QJsonValue& parameters) override { return parseVec3Parameter(parameters, _translate); }
private:
glm::vec3 _translate { 0.0f };
};
}
#endif

View file

@ -50,7 +50,8 @@ Q_LOGGING_CATEGORY(trace_resource_parse_image_ktx, "trace.resource.parse.image.k
const std::string TextureCache::KTX_DIRNAME { "ktx_cache" };
const std::string TextureCache::KTX_EXT { "ktx" };
static const int SKYBOX_LOAD_PRIORITY { 10 }; // Make sure skybox loads first
static const float SKYBOX_LOAD_PRIORITY { 10.0f }; // Make sure skybox loads first
static const float HIGH_MIPS_LOAD_PRIORITY { 9.0f }; // Make sure high mips loads after skybox but before models
TextureCache::TextureCache() :
_ktxCache(KTX_DIRNAME, KTX_EXT) {
@ -261,9 +262,6 @@ QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSh
auto content = textureExtra ? textureExtra->content : QByteArray();
auto maxNumPixels = textureExtra ? textureExtra->maxNumPixels : ABSOLUTE_MAX_TEXTURE_NUM_PIXELS;
NetworkTexture* texture = new NetworkTexture(url, type, content, maxNumPixels);
if (type == image::TextureUsage::CUBE_TEXTURE) {
texture->setLoadPriority(this, SKYBOX_LOAD_PRIORITY);
}
return QSharedPointer<Resource>(texture, &Resource::deleter);
}
@ -276,6 +274,12 @@ NetworkTexture::NetworkTexture(const QUrl& url, image::TextureUsage::Type type,
_textureSource = std::make_shared<gpu::TextureSource>();
_lowestRequestedMipLevel = 0;
if (type == image::TextureUsage::CUBE_TEXTURE) {
setLoadPriority(this, SKYBOX_LOAD_PRIORITY);
} else if (_sourceIsKTX) {
setLoadPriority(this, HIGH_MIPS_LOAD_PRIORITY);
}
if (!url.isValid()) {
_loaded = true;
}
@ -397,7 +401,8 @@ void NetworkTexture::startRequestForNextMipLevel() {
_ktxResourceState = PENDING_MIP_REQUEST;
init();
setLoadPriority(this, -static_cast<int>(_originalKtxDescriptor->header.numberOfMipmapLevels) + _lowestKnownPopulatedMip);
float priority = -(float)_originalKtxDescriptor->header.numberOfMipmapLevels + (float)_lowestKnownPopulatedMip;
setLoadPriority(this, priority);
_url.setFragment(QString::number(_lowestKnownPopulatedMip - 1));
TextureCache::attemptRequest(_self);
}

View file

@ -508,7 +508,7 @@ void TabletProxy::gotoWebScreen(const QString& url, const QString& injectedJavaS
if (root) {
removeButtonsFromHomeScreen();
QMetaObject::invokeMethod(root, "loadSource", Q_ARG(const QVariant&, QVariant(WEB_VIEW_SOURCE_URL)));
QMetaObject::invokeMethod(root, "loadWebBase");
QMetaObject::invokeMethod(root, "setShown", Q_ARG(const QVariant&, QVariant(true)));
QMetaObject::invokeMethod(root, "loadWebUrl", Q_ARG(const QVariant&, QVariant(url)), Q_ARG(const QVariant&, QVariant(injectedJavaScriptUrl)));
}

View file

@ -38,6 +38,11 @@ const quat Quaternions::X_180{ 0.0f, 1.0f, 0.0f, 0.0f };
const quat Quaternions::Y_180{ 0.0f, 0.0f, 1.0f, 0.0f };
const quat Quaternions::Z_180{ 0.0f, 0.0f, 0.0f, 1.0f };
const mat4 Matrices::IDENTITY { glm::mat4() };
const mat4 Matrices::X_180 { createMatFromQuatAndPos(Quaternions::X_180, Vectors::ZERO) };
const mat4 Matrices::Y_180 { createMatFromQuatAndPos(Quaternions::Y_180, Vectors::ZERO) };
const mat4 Matrices::Z_180 { createMatFromQuatAndPos(Quaternions::Z_180, Vectors::ZERO) };
// Safe version of glm::mix; based on the code in Nick Bobick's article,
// http://www.gamasutra.com/features/19980703/quaternions_01.htm (via Clyde,
// https://github.com/threerings/clyde/blob/master/src/main/java/com/threerings/math/Quaternion.java)

View file

@ -54,6 +54,13 @@ const glm::vec3 IDENTITY_FORWARD = glm::vec3( 0.0f, 0.0f,-1.0f);
glm::quat safeMix(const glm::quat& q1, const glm::quat& q2, float alpha);
class Matrices {
public:
static const mat4 IDENTITY;
static const mat4 X_180;
static const mat4 Y_180;
static const mat4 Z_180;
};
class Quaternions {
public:

View file

@ -0,0 +1,6 @@
set(TARGET_NAME trackers)
setup_hifi_library()
GroupSources("src")
link_hifi_libraries(shared)
target_bullet()

View file

@ -1,7 +1,4 @@
//
// DeviceTracker.cpp
// interface/src/devices
//
// Created by Sam Cake on 6/20/14.
// Copyright 2014 High Fidelity, Inc.
//

View file

@ -1,7 +1,4 @@
//
// DeviceTracker.h
// interface/src/devices
//
// Created by Sam Cake on 6/20/14.
// Copyright 2014 High Fidelity, Inc.
//

View file

@ -1,7 +1,4 @@
//
// EyeTracker.cpp
// interface/src/devices
//
// Created by David Rowe on 27 Jul 2015.
// Copyright 2015 High Fidelity, Inc.
//
@ -17,8 +14,8 @@
#include <SharedUtil.h>
#include "InterfaceLogging.h"
#include "OctreeConstants.h"
#include "Logging.h"
#include <OctreeConstants.h>
#ifdef HAVE_IVIEWHMD
char* HIGH_FIDELITY_EYE_TRACKER_CALIBRATION = "HighFidelityEyeTrackerCalibration";
@ -115,7 +112,7 @@ void EyeTracker::processData(smi_CallbackDataStruct* data) {
void EyeTracker::init() {
if (_isInitialized) {
qCWarning(interfaceapp) << "Eye Tracker: Already initialized";
qCWarning(trackers) << "Eye Tracker: Already initialized";
return;
}
}

View file

@ -1,7 +1,4 @@
//
// EyeTracker.h
// interface/src/devices
//
// Created by David Rowe on 27 Jul 2015.
// Copyright 2015 High Fidelity, Inc.
//

View file

@ -1,7 +1,4 @@
//
// FaceTracker.cpp
// interface/src/devices
//
// Created by Andrzej Kapolka on 4/9/14.
// Copyright 2014 High Fidelity, Inc.
//
@ -9,22 +6,21 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QTimer>
#include <GLMHelpers.h>
#include "FaceTracker.h"
#include "InterfaceLogging.h"
#include "Menu.h"
#include <QTimer>
#include <GLMHelpers.h>
#include "Logging.h"
//#include "Menu.h"
const int FPS_TIMER_DELAY = 2000; // ms
const int FPS_TIMER_DURATION = 2000; // ms
const float DEFAULT_EYE_DEFLECTION = 0.25f;
Setting::Handle<float> FaceTracker::_eyeDeflection("faceshiftEyeDeflection", DEFAULT_EYE_DEFLECTION);
bool FaceTracker::_isMuted { true };
void FaceTracker::init() {
_isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
_isInitialized = true; // FaceTracker can be used now
}
@ -101,7 +97,7 @@ void FaceTracker::countFrame() {
}
void FaceTracker::finishFPSTimer() {
qCDebug(interfaceapp) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f);
qCDebug(trackers) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f);
_isCalculatingFPS = false;
}
@ -113,3 +109,25 @@ void FaceTracker::toggleMute() {
void FaceTracker::setEyeDeflection(float eyeDeflection) {
_eyeDeflection.set(eyeDeflection);
}
void FaceTracker::updateFakeCoefficients(float leftBlink, float rightBlink, float browUp,
float jawOpen, float mouth2, float mouth3, float mouth4, QVector<float>& coefficients) {
const int MMMM_BLENDSHAPE = 34;
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
const int MAX_FAKE_BLENDSHAPE = 40; // Largest modified blendshape from above and below
coefficients.resize(std::max((int)coefficients.size(), MAX_FAKE_BLENDSHAPE + 1));
qFill(coefficients.begin(), coefficients.end(), 0.0f);
coefficients[_leftBlinkIndex] = leftBlink;
coefficients[_rightBlinkIndex] = rightBlink;
coefficients[_browUpCenterIndex] = browUp;
coefficients[_browUpLeftIndex] = browUp;
coefficients[_browUpRightIndex] = browUp;
coefficients[_jawOpenIndex] = jawOpen;
coefficients[SMILE_LEFT_BLENDSHAPE] = coefficients[SMILE_RIGHT_BLENDSHAPE] = mouth4;
coefficients[MMMM_BLENDSHAPE] = mouth2;
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
}

View file

@ -1,7 +1,4 @@
//
// FaceTracker.h
// interface/src/devices
//
// Created by Andrzej Kapolka on 4/9/14.
// Copyright 2014 High Fidelity, Inc.
//
@ -20,7 +17,7 @@
#include <SettingHandle.h>
/// Base class for face trackers (Faceshift, DDE).
/// Base class for face trackers (DDE, BinaryVR).
class FaceTracker : public QObject {
Q_OBJECT
@ -45,12 +42,21 @@ public:
const QVector<float>& getBlendshapeCoefficients() const;
float getBlendshapeCoefficient(int index) const;
bool isMuted() const { return _isMuted; }
void setIsMuted(bool isMuted) { _isMuted = isMuted; }
static bool isMuted() { return _isMuted; }
static void setIsMuted(bool isMuted) { _isMuted = isMuted; }
static float getEyeDeflection() { return _eyeDeflection.get(); }
static void setEyeDeflection(float eyeDeflection);
static void updateFakeCoefficients(float leftBlink,
float rightBlink,
float browUp,
float jawOpen,
float mouth2,
float mouth3,
float mouth4,
QVector<float>& coefficients);
signals:
void muteToggled();
@ -63,7 +69,7 @@ protected:
virtual ~FaceTracker() {};
bool _isInitialized = false;
bool _isMuted = true;
static bool _isMuted;
glm::vec3 _headTranslation = glm::vec3(0.0f);
glm::quat _headRotation = glm::quat();
@ -84,6 +90,24 @@ private:
bool _isCalculatingFPS = false;
int _frameCount = 0;
// see http://support.faceshift.com/support/articles/35129-export-of-blendshapes
static const int _leftBlinkIndex = 0;
static const int _rightBlinkIndex = 1;
static const int _leftEyeOpenIndex = 8;
static const int _rightEyeOpenIndex = 9;
// Brows
static const int _browDownLeftIndex = 14;
static const int _browDownRightIndex = 15;
static const int _browUpCenterIndex = 16;
static const int _browUpLeftIndex = 17;
static const int _browUpRightIndex = 18;
static const int _mouthSmileLeftIndex = 28;
static const int _mouthSmileRightIndex = 29;
static const int _jawOpenIndex = 21;
static Setting::Handle<float> _eyeDeflection;
};

View file

@ -0,0 +1,11 @@
//
// Created by Bradley Austin Davis on 2017/04/25
// Copyright 2013-2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Logging.h"
Q_LOGGING_CATEGORY(trackers, "hifi.trackers")

View file

@ -0,0 +1,16 @@
//
// Created by Bradley Austin Davis on 2017/04/25
// Copyright 2013-2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_TrackersLogging_h
#define hifi_TrackersLogging_h
#include <QtCore/QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(trackers)
#endif // hifi_TrackersLogging_h

View file

@ -1,7 +1,4 @@
//
// MotionTracker.cpp
// interface/src/devices
//
// Created by Sam Cake on 6/20/14.
// Copyright 2014 High Fidelity, Inc.
//
@ -10,8 +7,6 @@
//
#include "MotionTracker.h"
#include "GLMHelpers.h"
// glm::mult(mat43, mat43) just the composition of the 2 matrices assuming they are in fact mat44 with the last raw = { 0, 0, 0, 1 }
namespace glm {

View file

@ -1,7 +1,4 @@
//
// MotionTracker.h
// interface/src/devices
//
// Created by Sam Cake on 6/20/14.
// Copyright 2014 High Fidelity, Inc.
//
@ -14,20 +11,7 @@
#include "DeviceTracker.h"
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wsign-compare"
#endif
#include <glm/glm.hpp>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <glm/gtc/quaternion.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <GLMHelpers.h>
/// Base class for device trackers.
class MotionTracker : public DeviceTracker {

View file

@ -223,6 +223,18 @@ QAction* Menu::addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMe
return action;
}
QAction* Menu::addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
const QString& actionName,
const std::function<void(bool)>& handler,
const QKeySequence& shortcut,
const bool checked,
int menuItemLocation,
const QString& grouping) {
auto action = addCheckableActionToQMenuAndActionHash(destinationMenu, actionName, shortcut, checked, nullptr, nullptr, menuItemLocation, grouping);
connect(action, &QAction::triggered, handler);
return action;
}
void Menu::removeAction(MenuWrapper* menu, const QString& actionName) {
auto action = _actionHash.value(actionName);
menu->removeAction(action);

View file

@ -9,6 +9,8 @@
#ifndef hifi_ui_Menu_h
#define hifi_ui_Menu_h
#include <functional>
#include <QtCore/QDir>
#include <QtCore/QPointer>
#include <QtCore/QStandardPaths>
@ -90,6 +92,14 @@ public:
int menuItemLocation = UNSPECIFIED_POSITION,
const QString& grouping = QString());
QAction* addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
const QString& actionName,
const std::function<void(bool)>& handler,
const QKeySequence& shortcut = 0,
const bool checked = false,
int menuItemLocation = UNSPECIFIED_POSITION,
const QString& grouping = QString());
void removeAction(MenuWrapper* menu, const QString& actionName);
public slots:

View file

@ -0,0 +1,34 @@
//
// puppetFeet3.js
// examples/controllers
//
// Created by Brad Hefta-Gaub on 2017/04/11
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var MAPPING_NAME = "com.highfidelity.examples.puppetFeet3";
var mapping = Controller.newMapping(MAPPING_NAME);
var puppetOffset = { x: 0, y: -1, z: 0 };
var rotation = Quat.fromPitchYawRollDegrees(0, 0, -90);
var noTranslation = { x: 0, y: 0, z: 0 };
var transformMatrix = Mat4.createFromRotAndTrans(rotation, noTranslation);
var rotateAndTranslate = Mat4.createFromRotAndTrans(rotation, puppetOffset);
mapping.from(Controller.Standard.LeftHand).peek().rotate(rotation).translate(puppetOffset).to(Controller.Standard.LeftFoot);
//mapping.from(Controller.Standard.LeftHand).peek().translate(puppetOffset).to(Controller.Standard.LeftFoot);
//mapping.from(Controller.Standard.LeftHand).peek().transform(transformMatrix).translate(puppetOffset).to(Controller.Standard.LeftFoot);
//mapping.from(Controller.Standard.LeftHand).peek().transform(rotateAndTranslate).to(Controller.Standard.LeftFoot);
Controller.enableMapping(MAPPING_NAME);
Script.scriptEnding.connect(function(){
mapping.disable();
});

View file

@ -0,0 +1,105 @@
//
// hipsControllerTest.js
//
// Created by Anthony Thibault on 4/24/17
// Copyright 2017 High Fidelity, Inc.
//
// Test procedural manipulation of the Avatar hips via the controller system.
// Pull the left and right triggers on your hand controllers, you hips should begin to gyrate in an amusing mannor.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
/* global Xform */
Script.include("/~/system/libraries/Xform.js");
var triggerPressHandled = false;
var rightTriggerPressed = false;
var leftTriggerPressed = false;
var MAPPING_NAME = "com.highfidelity.hipsIkTest";
var mapping = Controller.newMapping(MAPPING_NAME);
mapping.from([Controller.Standard.RTClick]).peek().to(function (value) {
rightTriggerPressed = (value !== 0) ? true : false;
});
mapping.from([Controller.Standard.LTClick]).peek().to(function (value) {
leftTriggerPressed = (value !== 0) ? true : false;
});
Controller.enableMapping(MAPPING_NAME);
var CONTROLLER_MAPPING_NAME = "com.highfidelity.hipsIkTest.controller";
var controllerMapping;
var ZERO = {x: 0, y: 0, z: 0};
var X_AXIS = {x: 1, y: 0, z: 0};
var Y_AXIS = {x: 0, y: 1, z: 0};
var Y_180 = {x: 0, y: 1, z: 0, w: 0};
var Y_180_XFORM = new Xform(Y_180, {x: 0, y: 0, z: 0});
var hips = undefined;
function computeCurrentXform(jointIndex) {
var currentXform = new Xform(MyAvatar.getAbsoluteJointRotationInObjectFrame(jointIndex),
MyAvatar.getAbsoluteJointTranslationInObjectFrame(jointIndex));
return currentXform;
}
function calibrate() {
hips = computeCurrentXform(MyAvatar.getJointIndex("Hips"));
}
function circleOffset(radius, theta, normal) {
var pos = {x: radius * Math.cos(theta), y: radius * Math.sin(theta), z: 0};
var lookAtRot = Quat.lookAt(normal, ZERO, X_AXIS);
return Vec3.multiplyQbyV(lookAtRot, pos);
}
var calibrationCount = 0;
function update(dt) {
if (rightTriggerPressed && leftTriggerPressed) {
if (!triggerPressHandled) {
triggerPressHandled = true;
if (controllerMapping) {
hips = undefined;
Controller.disableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
controllerMapping = undefined;
} else {
calibrate();
calibrationCount++;
controllerMapping = Controller.newMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
var n = Y_AXIS;
var t = 0;
if (hips) {
controllerMapping.from(function () {
t += (1 / 60) * 4;
return {
valid: true,
translation: Vec3.sum(hips.pos, circleOffset(0.1, t, n)),
rotation: hips.rot,
velocity: ZERO,
angularVelocity: ZERO
};
}).to(Controller.Standard.Hips);
}
Controller.enableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
}
}
} else {
triggerPressHandled = false;
}
}
Script.update.connect(update);
Script.scriptEnding.connect(function () {
Controller.disableMapping(MAPPING_NAME);
if (controllerMapping) {
Controller.disableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
}
Script.update.disconnect(update);
});

View file

@ -11,19 +11,22 @@ var TRACKED_OBJECT_POSES = [
var triggerPressHandled = false;
var rightTriggerPressed = false;
var leftTriggerPressed = false;
var calibrationCount = 0;
var MAPPING_NAME = "com.highfidelity.viveMotionCapture";
var mapping = Controller.newMapping(MAPPING_NAME);
mapping.from([Controller.Standard.RTClick]).peek().to(function (value) {
var TRIGGER_MAPPING_NAME = "com.highfidelity.viveMotionCapture.triggers";
var triggerMapping = Controller.newMapping(TRIGGER_MAPPING_NAME);
triggerMapping.from([Controller.Standard.RTClick]).peek().to(function (value) {
rightTriggerPressed = (value !== 0) ? true : false;
});
mapping.from([Controller.Standard.LTClick]).peek().to(function (value) {
triggerMapping.from([Controller.Standard.LTClick]).peek().to(function (value) {
leftTriggerPressed = (value !== 0) ? true : false;
});
Controller.enableMapping(TRIGGER_MAPPING_NAME);
Controller.enableMapping(MAPPING_NAME);
var CONTROLLER_MAPPING_NAME = "com.highfidelity.viveMotionCapture.controller";
var controllerMapping;
var head;
var leftFoot;
var rightFoot;
var hips;
@ -75,8 +78,29 @@ function computeDefaultToReferenceXform() {
}
}
function computeHeadOffsetXform() {
var leftEyeIndex = MyAvatar.getJointIndex("LeftEye");
var rightEyeIndex = MyAvatar.getJointIndex("RightEye");
var headIndex = MyAvatar.getJointIndex("Head");
if (leftEyeIndex > 0 && rightEyeIndex > 0 && headIndex > 0) {
var defaultHeadXform = new Xform(MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(headIndex),
MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(headIndex));
var defaultLeftEyeXform = new Xform(MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(leftEyeIndex),
MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(leftEyeIndex));
var defaultRightEyeXform = new Xform(MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(rightEyeIndex),
MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(rightEyeIndex));
var defaultCenterEyePos = Vec3.multiply(0.5, Vec3.sum(defaultLeftEyeXform.pos, defaultRightEyeXform.pos));
var defaultCenterEyeXform = new Xform(defaultLeftEyeXform.rot, defaultCenterEyePos);
return Xform.mul(defaultCenterEyeXform.inv(), defaultHeadXform);
} else {
return undefined;
}
}
function calibrate() {
head = undefined;
leftFoot = undefined;
rightFoot = undefined;
hips = undefined;
@ -84,6 +108,13 @@ function calibrate() {
var defaultToReferenceXform = computeDefaultToReferenceXform();
var headOffsetXform = computeHeadOffsetXform();
print("AJT: computed headOffsetXform " + (headOffsetXform ? JSON.stringify(headOffsetXform) : "undefined"));
if (headOffsetXform) {
head = { offsetXform: headOffsetXform };
}
var poses = [];
if (Controller.Hardware.Vive) {
TRACKED_OBJECT_POSES.forEach(function (key) {
@ -92,7 +123,8 @@ function calibrate() {
if (pose.valid) {
poses.push({
channel: channel,
pose: pose
pose: pose,
lastestPose: pose
});
}
});
@ -177,85 +209,91 @@ var ikTypes = {
var handlerId;
function computeIKTargetXform(jointInfo) {
var pose = Controller.getPoseValue(jointInfo.channel);
function convertJointInfoToPose(jointInfo) {
var latestPose = jointInfo.latestPose;
var offsetXform = jointInfo.offsetXform;
return Xform.mul(Y_180_XFORM, Xform.mul(new Xform(pose.rotation, pose.translation), offsetXform));
var xform = Xform.mul(new Xform(latestPose.rotation, latestPose.translation), offsetXform);
return {
valid: true,
translation: xform.pos,
rotation: xform.rot,
velocity: Vec3.sum(latestPose.velocity, Vec3.cross(latestPose.angularVelocity, Vec3.subtract(xform.pos, latestPose.translation))),
angularVelocity: latestPose.angularVelocity
};
}
function update(dt) {
if (rightTriggerPressed && leftTriggerPressed) {
if (!triggerPressHandled) {
triggerPressHandled = true;
if (handlerId) {
print("AJT: UN-CALIBRATE!");
if (controllerMapping) {
// go back to normal, vive pucks will be ignored.
print("AJT: UN-CALIBRATE!");
head = undefined;
leftFoot = undefined;
rightFoot = undefined;
hips = undefined;
spine2 = undefined;
if (handlerId) {
print("AJT: un-hooking animation state handler");
MyAvatar.removeAnimationStateHandler(handlerId);
handlerId = undefined;
}
Controller.disableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
controllerMapping = undefined;
} else {
print("AJT: CALIBRATE!");
calibrate();
calibrationCount++;
var animVars = [];
controllerMapping = Controller.newMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
if (head) {
controllerMapping.from(function () {
var worldToAvatarXform = (new Xform(MyAvatar.orientation, MyAvatar.position)).inv();
head.latestPose = {
valid: true,
translation: worldToAvatarXform.xformPoint(HMD.position),
rotation: Quat.multiply(worldToAvatarXform.rot, Quat.multiply(HMD.orientation, Y_180)), // postMult 180 rot flips head direction
velocity: {x: 0, y: 0, z: 0}, // TODO: currently this is unused anyway...
angularVelocity: {x: 0, y: 0, z: 0}
};
return convertJointInfoToPose(head);
}).to(Controller.Standard.Head);
}
if (leftFoot) {
animVars.push("leftFootType");
animVars.push("leftFootPosition");
animVars.push("leftFootRotation");
controllerMapping.from(leftFoot.channel).to(function (pose) {
leftFoot.latestPose = pose;
});
controllerMapping.from(function () {
return convertJointInfoToPose(leftFoot);
}).to(Controller.Standard.LeftFoot);
}
if (rightFoot) {
animVars.push("rightFootType");
animVars.push("rightFootPosition");
animVars.push("rightFootRotation");
controllerMapping.from(rightFoot.channel).to(function (pose) {
rightFoot.latestPose = pose;
});
controllerMapping.from(function () {
return convertJointInfoToPose(rightFoot);
}).to(Controller.Standard.RightFoot);
}
if (hips) {
animVars.push("hipsType");
animVars.push("hipsPosition");
animVars.push("hipsRotation");
controllerMapping.from(hips.channel).to(function (pose) {
hips.latestPose = pose;
});
controllerMapping.from(function () {
return convertJointInfoToPose(hips);
}).to(Controller.Standard.Hips);
}
if (spine2) {
animVars.push("spine2Type");
animVars.push("spine2Position");
animVars.push("spine2Rotation");
controllerMapping.from(spine2.channel).to(function (pose) {
spine2.latestPose = pose;
});
controllerMapping.from(function () {
return convertJointInfoToPose(spine2);
}).to(Controller.Standard.Spine2);
}
// hook up new anim state handler that maps vive pucks to ik system.
handlerId = MyAvatar.addAnimationStateHandler(function (props) {
var result = {}, xform;
if (rightFoot) {
xform = computeIKTargetXform(rightFoot);
result.rightFootType = ikTypes.RotationAndPosition;
result.rightFootPosition = xform.pos;
result.rightFootRotation = xform.rot;
}
if (leftFoot) {
xform = computeIKTargetXform(leftFoot);
result.leftFootType = ikTypes.RotationAndPosition;
result.leftFootPosition = xform.pos;
result.leftFootRotation = xform.rot;
}
if (hips) {
xform = computeIKTargetXform(hips);
result.hipsType = ikTypes.RotationAndPosition;
result.hipsPosition = xform.pos;
result.hipsRotation = xform.rot;
}
if (spine2) {
xform = computeIKTargetXform(spine2);
result.spine2Type = ikTypes.RotationAndPosition;
result.spine2Position = xform.pos;
result.spine2Rotation = xform.rot;
}
return result;
}, animVars);
Controller.enableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
}
}
} else {
@ -301,7 +339,10 @@ function update(dt) {
Script.update.connect(update);
Script.scriptEnding.connect(function () {
Controller.disableMapping(MAPPING_NAME);
Controller.disableMapping(TRIGGER_MAPPING_NAME);
if (controllerMapping) {
Controller.disableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
}
Script.update.disconnect(update);
});

View file

@ -1801,15 +1801,15 @@ function MyController(hand) {
}
this.processStylus();
if (isInEditMode() && !this.isNearStylusTarget) {
if (isInEditMode() && !this.isNearStylusTarget && HMD.isHandControllerAvailable()) {
// Always showing lasers while in edit mode and hands/stylus is not active.
var rayPickInfo = this.calcRayPickInfo(this.hand);
this.intersectionDistance = (rayPickInfo.entityID || rayPickInfo.overlayID) ? rayPickInfo.distance : 0;
this.searchIndicatorOn(rayPickInfo.searchRay);
} else {
this.searchIndicatorOff();
}
}
};
this.handleLaserOnHomeButton = function(rayPickInfo) {

View file

@ -1,4 +1,6 @@
"use strict";
/*jslint vars:true, plusplus:true, forin:true*/
/*global Window, Script, Controller, MyAvatar, AvatarList, Entities, Messages, Audio, SoundCache, Account, UserActivityLogger, Vec3, Quat, XMLHttpRequest, location, print*/
//
// makeUserConnection.js
// scripts/system
@ -9,7 +11,7 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function() { // BEGIN LOCAL_SCOPE
(function () { // BEGIN LOCAL_SCOPE
var LABEL = "makeUserConnection";
var MAX_AVATAR_DISTANCE = 0.2; // m
@ -22,12 +24,13 @@
MAKING_CONNECTION: 3
};
var STATE_STRINGS = ["inactive", "waiting", "connecting", "makingConnection"];
var HAND_STRING_PROPERTY = 'hand'; // Used in our message protocol. IWBNI we changed it to handString, but that would break compatability.
var WAITING_INTERVAL = 100; // ms
var CONNECTING_INTERVAL = 100; // ms
var MAKING_CONNECTION_TIMEOUT = 800; // ms
var CONNECTING_TIME = 1600; // ms
var PARTICLE_RADIUS = 0.15; // m
var PARTICLE_ANGLE_INCREMENT = 360/45; // 1hz
var PARTICLE_ANGLE_INCREMENT = 360 / 45; // 1hz
var HANDSHAKE_SOUND_URL = "https://s3-us-west-1.amazonaws.com/hifi-content/davidkelly/production/audio/4beat_sweep.wav";
var SUCCESSFUL_HANDSHAKE_SOUND_URL = "https://s3-us-west-1.amazonaws.com/hifi-content/davidkelly/production/audio/3rdbeat_success_bell.wav";
var PREFERRER_HAND_JOINT_POSTFIX_ORDER = ['Middle1', 'Index1', ''];
@ -39,7 +42,7 @@
var PARTICLE_EFFECT_PROPS = {
"alpha": 0.8,
"azimuthFinish": Math.PI,
"azimuthStart": -1*Math.PI,
"azimuthStart": -1 * Math.PI,
"emitRate": 500,
"emitSpeed": 0.0,
"emitterShouldTrail": 1,
@ -56,10 +59,10 @@
"color": {"red": 255, "green": 255, "blue": 255},
"colorFinish": {"red": 0, "green": 164, "blue": 255},
"colorStart": {"red": 255, "green": 255, "blue": 255},
"emitOrientation": {"w": -0.71, "x":0.0, "y":0.0, "z": 0.71},
"emitOrientation": {"w": -0.71, "x": 0.0, "y": 0.0, "z": 0.71},
"emitAcceleration": {"x": 0.0, "y": 0.0, "z": 0.0},
"accelerationSpread": {"x": 0.0, "y": 0.0, "z": 0.0},
"dimensions": {"x":0.05, "y": 0.05, "z": 0.05},
"dimensions": {"x": 0.05, "y": 0.05, "z": 0.05},
"type": "ParticleEffect"
};
var MAKING_CONNECTION_PARTICLE_PROPS = {
@ -68,7 +71,7 @@
"alphaSpread": 0,
"alphaFinish": 0,
"azimuthFinish": Math.PI,
"azimuthStart": -1*Math.PI,
"azimuthStart": -1 * Math.PI,
"emitRate": 2000,
"emitSpeed": 0.0,
"emitterShouldTrail": 1,
@ -86,14 +89,14 @@
"color": {"red": 200, "green": 170, "blue": 255},
"colorFinish": {"red": 0, "green": 134, "blue": 255},
"colorStart": {"red": 185, "green": 222, "blue": 255},
"emitOrientation": {"w": -0.71, "x":0.0, "y":0.0, "z": 0.71},
"emitOrientation": {"w": -0.71, "x": 0.0, "y": 0.0, "z": 0.71},
"emitAcceleration": {"x": 0.0, "y": 0.0, "z": 0.0},
"accelerationSpread": {"x": 0.0, "y": 0.0, "z": 0.0},
"dimensions": {"x":0.05, "y": 0.05, "z": 0.05},
"dimensions": {"x": 0.05, "y": 0.05, "z": 0.05},
"type": "ParticleEffect"
};
var currentHand = undefined;
var currentHand;
var currentHandJointIndex = -1;
var state = STATES.INACTIVE;
var connectingInterval;
@ -101,7 +104,6 @@
var makingConnectionTimeout;
var animHandlerId;
var connectingId;
var connectingHandString;
var connectingHandJointIndex = -1;
var waitingList = {};
var particleEffect;
@ -116,7 +118,7 @@
function debug() {
var stateString = "<" + STATE_STRINGS[state] + ">";
var connecting = "[" + connectingId + "/" + connectingHandString + "]";
var connecting = "[" + connectingId + "/" + connectingHandJointIndex + "]";
print.apply(null, [].concat.apply([LABEL, stateString, JSON.stringify(waitingList), connecting],
[].map.call(arguments, JSON.stringify)));
}
@ -183,27 +185,19 @@
function handToString(hand) {
if (hand === Controller.Standard.RightHand) {
return "RightHand";
} else if (hand === Controller.Standard.LeftHand) {
}
if (hand === Controller.Standard.LeftHand) {
return "LeftHand";
}
debug("handToString called without valid hand! value: ", hand);
return "";
}
function stringToHand(hand) {
if (hand === "RightHand") {
return Controller.Standard.RightHand;
} else if (hand === "LeftHand") {
return Controller.Standard.LeftHand;
}
debug("stringToHand called with bad hand string:", hand);
return 0;
}
function handToHaptic(hand) {
if (hand === Controller.Standard.RightHand) {
return 1;
} else if (hand === Controller.Standard.LeftHand) {
}
if (hand === Controller.Standard.LeftHand) {
return 0;
}
debug("handToHaptic called without a valid hand!");
@ -229,13 +223,13 @@
}
// This returns the ideal hand joint index for the avatar.
// [hand]middle1 -> [hand]index1 -> [hand]
function getIdealHandJointIndex(avatar, hand) {
debug("got hand " + hand + " for avatar " + avatar.sessionUUID);
var handString = handToString(hand);
for (var i = 0; i < PREFERRER_HAND_JOINT_POSTFIX_ORDER.length; i++) {
var jointName = handString + PREFERRER_HAND_JOINT_POSTFIX_ORDER[i];
var jointIndex = avatar.getJointIndex(jointName);
// [handString]middle1 -> [handString]index1 -> [handString]
function getIdealHandJointIndex(avatar, handString) {
debug("get hand " + handString + " for avatar " + (avatar && avatar.sessionUUID));
var suffixIndex, jointName, jointIndex;
for (suffixIndex = 0; suffixIndex < (avatar ? PREFERRER_HAND_JOINT_POSTFIX_ORDER.length : 0); suffixIndex++) {
jointName = handString + PREFERRER_HAND_JOINT_POSTFIX_ORDER[suffixIndex];
jointIndex = avatar.getJointIndex(jointName);
if (jointIndex !== -1) {
debug('found joint ' + jointName + ' (' + jointIndex + ')');
return jointIndex;
@ -249,26 +243,39 @@
function getHandPosition(avatar, handJointIndex) {
if (handJointIndex === -1) {
debug("calling getHandPosition with no hand joint index! (returning avatar position but this is a BUG)");
debug(new Error().stack);
return avatar.position;
}
return avatar.getJointPosition(handJointIndex);
}
function shakeHandsAnimation(animationProperties) {
var animationData = {};
function updateAnimationData() {
// all we are doing here is moving the right hand to a spot
// that is in front of and a bit above the hips. Basing how
// far in front as scaling with the avatar's height (say hips
// to head distance)
var headIndex = MyAvatar.getJointIndex("Head");
var offset = 0.5; // default distance of hand in front of you
var result = {};
if (headIndex) {
offset = 0.8 * MyAvatar.getAbsoluteJointTranslationInObjectFrame(headIndex).y;
}
result.rightHandPosition = Vec3.multiply(offset, {x: -0.25, y: 0.8, z: 1.3});
result.rightHandRotation = Quat.fromPitchYawRollDegrees(90, 0, 90);
return result;
animationData.rightHandPosition = Vec3.multiply(offset, {x: -0.25, y: 0.8, z: 1.3});
animationData.rightHandRotation = Quat.fromPitchYawRollDegrees(90, 0, 90);
}
function shakeHandsAnimation() {
return animationData;
}
function endHandshakeAnimation() {
if (animHandlerId) {
debug("removing animation");
animHandlerId = MyAvatar.removeAnimationStateHandler(animHandlerId);
}
}
function startHandshakeAnimation() {
endHandshakeAnimation(); // just in case order of press/unpress is broken
debug("adding animation");
updateAnimationData();
animHandlerId = MyAvatar.addAnimationStateHandler(shakeHandsAnimation, []);
}
function positionFractionallyTowards(posA, posB, frac) {
@ -294,11 +301,11 @@
}
}
function calcParticlePos(myHand, otherHand, otherOrientation, reset) {
function calcParticlePos(myHandPosition, otherHandPosition, otherOrientation, reset) {
if (reset) {
particleRotationAngle = 0.0;
}
var position = positionFractionallyTowards(myHand, otherHand, 0.5);
var position = positionFractionallyTowards(myHandPosition, otherHandPosition, 0.5);
particleRotationAngle += PARTICLE_ANGLE_INCREMENT; // about 0.5 hz
var radius = Math.min(PARTICLE_RADIUS, PARTICLE_RADIUS * particleRotationAngle / 360);
var axis = Vec3.mix(Quat.getFront(MyAvatar.orientation), Quat.inverse(Quat.getFront(otherOrientation)), 0.5);
@ -314,80 +321,78 @@
}
var myHandPosition = getHandPosition(MyAvatar, currentHandJointIndex);
var otherHand;
var otherHandPosition;
var otherOrientation;
if (connectingId) {
var other = AvatarList.getAvatar(connectingId);
if (other) {
otherOrientation = other.orientation;
otherHand = getHandPosition(other, connectingHandJointIndex);
otherHandPosition = getHandPosition(other, connectingHandJointIndex);
}
}
switch (state) {
case STATES.WAITING:
// no visualization while waiting
deleteParticleEffect();
deleteMakeConnectionParticleEffect();
stopHandshakeSound();
break;
case STATES.CONNECTING:
var particleProps = {};
// put the position between the 2 hands, if we have a connectingId. This
// helps define the plane in which the particles move.
positionFractionallyTowards(myHandPosition, otherHand, 0.5);
// now manage the rest of the entity
if (!particleEffect) {
particleRotationAngle = 0.0;
particleEmitRate = 500;
particleProps = PARTICLE_EFFECT_PROPS;
particleProps.isEmitting = 0;
particleProps.position = calcParticlePos(myHandPosition, otherHand, otherOrientation);
particleProps.parentID = MyAvatar.sessionUUID;
particleEffect = Entities.addEntity(particleProps, true);
} else {
particleProps.position = calcParticlePos(myHandPosition, otherHand, otherOrientation);
particleProps.isEmitting = 1;
Entities.editEntity(particleEffect, particleProps);
}
if (!makingConnectionParticleEffect) {
var props = MAKING_CONNECTION_PARTICLE_PROPS;
props.parentID = MyAvatar.sessionUUID;
makingConnectionEmitRate = 2000;
props.emitRate = makingConnectionEmitRate;
props.position = myHandPosition;
makingConnectionParticleEffect = Entities.addEntity(props, true);
} else {
makingConnectionEmitRate *= 0.5;
Entities.editEntity(makingConnectionParticleEffect, {
emitRate: makingConnectionEmitRate,
position: myHandPosition,
isEmitting: true
});
}
break;
case STATES.MAKING_CONNECTION:
particleEmitRate = Math.max(50, particleEmitRate * 0.5);
Entities.editEntity(makingConnectionParticleEffect, {emitRate: 0, isEmitting: 0, position: myHandPosition});
Entities.editEntity(particleEffect, {
position: calcParticlePos(myHandPosition, otherHand, otherOrientation),
emitRate: particleEmitRate
case STATES.WAITING:
// no visualization while waiting
deleteParticleEffect();
deleteMakeConnectionParticleEffect();
stopHandshakeSound();
break;
case STATES.CONNECTING:
var particleProps = {};
// put the position between the 2 hands, if we have a connectingId. This
// helps define the plane in which the particles move.
positionFractionallyTowards(myHandPosition, otherHandPosition, 0.5);
// now manage the rest of the entity
if (!particleEffect) {
particleRotationAngle = 0.0;
particleEmitRate = 500;
particleProps = PARTICLE_EFFECT_PROPS;
particleProps.isEmitting = 0;
particleProps.position = calcParticlePos(myHandPosition, otherHandPosition, otherOrientation);
particleProps.parentID = MyAvatar.sessionUUID;
particleEffect = Entities.addEntity(particleProps, true);
} else {
particleProps.position = calcParticlePos(myHandPosition, otherHandPosition, otherOrientation);
particleProps.isEmitting = 1;
Entities.editEntity(particleEffect, particleProps);
}
if (!makingConnectionParticleEffect) {
var props = MAKING_CONNECTION_PARTICLE_PROPS;
props.parentID = MyAvatar.sessionUUID;
makingConnectionEmitRate = 2000;
props.emitRate = makingConnectionEmitRate;
props.position = myHandPosition;
makingConnectionParticleEffect = Entities.addEntity(props, true);
} else {
makingConnectionEmitRate *= 0.5;
Entities.editEntity(makingConnectionParticleEffect, {
emitRate: makingConnectionEmitRate,
position: myHandPosition,
isEmitting: true
});
break;
default:
debug("unexpected state", state);
break;
}
break;
case STATES.MAKING_CONNECTION:
particleEmitRate = Math.max(50, particleEmitRate * 0.5);
Entities.editEntity(makingConnectionParticleEffect, {emitRate: 0, isEmitting: 0, position: myHandPosition});
Entities.editEntity(particleEffect, {
position: calcParticlePos(myHandPosition, otherHandPosition, otherOrientation),
emitRate: particleEmitRate
});
break;
default:
debug("unexpected state", state);
break;
}
}
function isNearby(id, hand) {
function isNearby() {
if (currentHand) {
var handPos = getHandPosition(MyAvatar, currentHandJointIndex);
var avatar = AvatarList.getAvatar(id);
var handPosition = getHandPosition(MyAvatar, currentHandJointIndex);
var avatar = AvatarList.getAvatar(connectingId);
if (avatar) {
var otherHand = stringToHand(hand);
var otherHandJointIndex = getIdealHandJointIndex(avatar, otherHand);
var distance = Vec3.distance(getHandPosition(avatar, otherHandJointIndex), handPos);
var distance = Vec3.distance(getHandPosition(avatar, connectingHandJointIndex), handPosition);
return (distance < MAX_AVATAR_DISTANCE);
}
}
@ -395,68 +400,90 @@
}
function findNearestWaitingAvatar() {
var handPos = getHandPosition(MyAvatar, currentHandJointIndex);
var handPosition = getHandPosition(MyAvatar, currentHandJointIndex);
var minDistance = MAX_AVATAR_DISTANCE;
var nearestAvatar = {};
Object.keys(waitingList).forEach(function (identifier) {
var avatar = AvatarList.getAvatar(identifier);
if (avatar) {
var hand = stringToHand(waitingList[identifier]);
var handJointIndex = getIdealHandJointIndex(avatar, hand);
var distance = Vec3.distance(getHandPosition(avatar, handJointIndex), handPos);
var handJointIndex = waitingList[identifier];
var distance = Vec3.distance(getHandPosition(avatar, handJointIndex), handPosition);
if (distance < minDistance) {
minDistance = distance;
nearestAvatar = {avatar: identifier, hand: hand, avatarObject: avatar};
nearestAvatar = {avatarId: identifier, jointIndex: handJointIndex};
}
}
});
return nearestAvatar;
}
function messageSend(message) {
Messages.sendMessage(MESSAGE_CHANNEL, JSON.stringify(message));
}
function handStringMessageSend(message) {
message[HAND_STRING_PROPERTY] = handToString(currentHand);
messageSend(message);
}
function setupCandidate() { // find the closest in-range avatar, send connection request, and return true. (Otherwise falsey)
var nearestAvatar = findNearestWaitingAvatar();
if (nearestAvatar.avatarId) {
connectingId = nearestAvatar.avatarId;
connectingHandJointIndex = nearestAvatar.jointIndex;
debug("sending connectionRequest to", connectingId);
handStringMessageSend({
key: "connectionRequest",
id: connectingId
});
return true;
}
}
function clearConnecting() {
connectingId = undefined;
connectingHandJointIndex = -1;
}
function lookForWaitingAvatar() {
// we started with nobody close enough, but maybe I've moved
// or they did. Note that 2 people doing this race, so stop
// as soon as you have a connectingId (which means you got their
// message before noticing they were in range in this loop)
// just in case we re-enter before stopping
stopWaiting();
debug("started looking for waiting avatars");
waitingInterval = Script.setInterval(function () {
if (state === STATES.WAITING && !connectingId) {
setupCandidate();
} else {
// something happened, stop looking for avatars to connect
stopWaiting();
debug("stopped looking for waiting avatars");
}
}, WAITING_INTERVAL);
}
var pollCount = 0, requestUrl = location.metaverseServerUrl + '/api/v1/user/connection_request';
// As currently implemented, we select the closest waiting avatar (if close enough) and send
// them a connectionRequest. If nobody is close enough we send a waiting message, and wait for a
// connectionRequest. If the 2 people who want to connect are both somewhat out of range when they
// initiate the shake, they will race to see who sends the connectionRequest after noticing the
// waiting message. Either way, they will start connecting eachother at that point.
// waiting message. Either way, they will start connecting each other at that point.
function startHandshake(fromKeyboard) {
if (fromKeyboard) {
debug("adding animation");
// just in case order of press/unpress is broken
if (animHandlerId) {
animHandlerId = MyAvatar.removeAnimationStateHandler(animHandlerId);
}
animHandlerId = MyAvatar.addAnimationStateHandler(shakeHandsAnimation, []);
startHandshakeAnimation();
}
debug("starting handshake for", currentHand);
pollCount = 0;
state = STATES.WAITING;
connectingId = undefined;
connectingHandString = undefined;
connectingHandJointIndex = -1;
clearConnecting();
// just in case
stopWaiting();
stopConnecting();
stopMakingConnection();
var nearestAvatar = findNearestWaitingAvatar();
if (nearestAvatar.avatar) {
connectingId = nearestAvatar.avatar;
connectingHandString = handToString(nearestAvatar.hand);
connectingHandJointIndex = getIdealHandJointIndex(nearestAvatar.avatarObject, nearestAvatar.hand);
currentHandJointIndex = getIdealHandJointIndex(MyAvatar, currentHand);
debug("sending connectionRequest to", connectingId);
messageSend({
key: "connectionRequest",
id: connectingId,
hand: handToString(currentHand)
});
} else {
if (!setupCandidate()) {
// send waiting message
debug("sending waiting message");
messageSend({
handStringMessageSend({
key: "waiting",
hand: handToString(currentHand)
});
lookForWaitingAvatar();
}
@ -474,9 +501,7 @@
// as we ignore the key release event when inactive. See updateTriggers
// below.
state = STATES.INACTIVE;
connectingId = undefined;
connectingHandString = undefined;
connectingHandJointIndex = -1;
clearConnecting();
stopWaiting();
stopConnecting();
stopMakingConnection();
@ -486,10 +511,7 @@
key: "done"
});
if (animHandlerId) {
debug("removing animation");
MyAvatar.removeAnimationStateHandler(animHandlerId);
}
endHandshakeAnimation();
// No-op if we were successful, but this way we ensure that failures and abandoned handshakes don't leave us
// in a weird state.
request({uri: requestUrl, method: 'DELETE'}, debug);
@ -500,19 +522,16 @@
debug("currentHand", currentHand, "ignoring messages from", hand);
return;
}
if (!currentHand) {
currentHand = hand;
currentHandJointIndex = getIdealHandJointIndex(MyAvatar, currentHand);
}
currentHand = hand;
currentHandJointIndex = getIdealHandJointIndex(MyAvatar, handToString(currentHand)); // Always, in case of changed skeleton.
// ok now, we are either initiating or quitting...
var isGripping = value > GRIP_MIN;
if (isGripping) {
debug("updateTriggers called - gripping", handToString(hand));
if (state !== STATES.INACTIVE) {
return;
} else {
startHandshake(fromKeyboard);
}
startHandshake(fromKeyboard);
} else {
// TODO: should we end handshake even when inactive? Ponder
debug("updateTriggers called -- no longer gripping", handToString(hand));
@ -524,47 +543,12 @@
}
}
function messageSend(message) {
Messages.sendMessage(MESSAGE_CHANNEL, JSON.stringify(message));
}
function lookForWaitingAvatar() {
// we started with nobody close enough, but maybe I've moved
// or they did. Note that 2 people doing this race, so stop
// as soon as you have a connectingId (which means you got their
// message before noticing they were in range in this loop)
// just in case we re-enter before stopping
stopWaiting();
debug("started looking for waiting avatars");
waitingInterval = Script.setInterval(function () {
if (state === STATES.WAITING && !connectingId) {
// find the closest in-range avatar, and send connection request
var nearestAvatar = findNearestWaitingAvatar();
if (nearestAvatar.avatar) {
connectingId = nearestAvatar.avatar;
connectingHandString = handToString(nearestAvatar.hand);
debug("sending connectionRequest to", connectingId);
messageSend({
key: "connectionRequest",
id: connectingId,
hand: handToString(currentHand)
});
}
} else {
// something happened, stop looking for avatars to connect
stopWaiting();
debug("stopped looking for waiting avatars");
}
}, WAITING_INTERVAL);
}
/* There is a mini-state machine after entering STATES.makingConnection.
We make a request (which might immediately succeed, fail, or neither.
If we immediately fail, we tell the user.
Otherwise, we wait MAKING_CONNECTION_TIMEOUT. At that time, we poll until success or fail.
*/
var result, requestBody, pollCount = 0, requestUrl = location.metaverseServerUrl + '/api/v1/user/connection_request';
var result, requestBody;
function connectionRequestCompleted() { // Final result is in. Do effects.
if (result.status === 'success') { // set earlier
if (!successfulHandshakeInjector) {
@ -580,16 +564,37 @@
handToHaptic(currentHand));
// don't change state (so animation continues while gripped)
// but do send a notification, by calling the slot that emits the signal for it
Window.makeConnection(true, result.connection.new_connection ?
"You and " + result.connection.username + " are now connected!" : result.connection.username);
UserActivityLogger.makeUserConnection(connectingId, true, result.connection.new_connection ?
"new connection" : "already connected");
Window.makeConnection(true,
result.connection.new_connection ?
"You and " + result.connection.username + " are now connected!" :
result.connection.username);
UserActivityLogger.makeUserConnection(connectingId,
true,
result.connection.new_connection ?
"new connection" :
"already connected");
return;
} // failed
endHandshake();
debug("failing with result data", result);
// IWBNI we also did some fail sound/visual effect.
Window.makeConnection(false, result.connection);
if (Account.isLoggedIn()) { // Give extra failure info
request(location.metaverseServerUrl + '/api/v1/users/' + Account.username + '/location', function (error, response) {
var message = '';
if (error || response.status !== 'success') {
message = 'Unable to get location.';
} else if (!response.data || !response.data.location) {
message = "Unexpected location value: " + JSON.stringify(response);
} else if (response.data.location.node_id !== cleanId(MyAvatar.sessionUUID)) {
message = 'Session identification does not match database. Maybe you are logged in on another machine? That would prevent handshakes.' + JSON.stringify(response) + MyAvatar.sessionUUID;
}
if (message) {
Window.makeConnection(false, message);
}
debug("account location:", message || 'ok');
});
}
UserActivityLogger.makeUserConnection(connectingId, false, result.connection);
}
// This is a bit fragile - but to account for skew in when people actually create the
@ -606,7 +611,7 @@
debug(response, 'pollCount', pollCount);
if (pollCount++ >= POLL_LIMIT) { // server will expire, but let's not wait that long.
debug('POLL LIMIT REACHED; TIMEOUT: expired message generated by CLIENT');
result = {status: 'error', connection: 'expired'};
result = {status: 'error', connection: 'No logged-in partner found.'};
connectionRequestCompleted();
} else { // poll
Script.setTimeout(function () {
@ -636,8 +641,6 @@
}
}
// this should be where we make the appropriate connection call. For now just make the
// visualization change.
function makeConnection(id) {
// send done to let the connection know you have made connection.
messageSend({
@ -647,8 +650,7 @@
state = STATES.MAKING_CONNECTION;
// continue the haptic background until the timeout fires. When we make calls, we will have an interval
// probably, in which we do this.
// continue the haptic background until the timeout fires.
Controller.triggerHapticPulse(HAPTIC_DATA.background.strength, MAKING_CONNECTION_TIMEOUT, handToHaptic(currentHand));
requestBody = {'node_id': cleanId(MyAvatar.sessionUUID), 'proposed_node_id': cleanId(id)}; // for use when repeating
@ -662,26 +664,27 @@
// This will immediately set response if successful (e.g., the other guy got his request in first),
// or immediate failure, and will otherwise poll (using the requestBody we just set).
request({ //
request({
uri: requestUrl,
method: 'POST',
json: true,
body: {'user_connection_request': requestBody}
}, handleConnectionResponseAndMaybeRepeat);
}
function setupConnecting(id, jointIndex) {
connectingId = id;
connectingHandJointIndex = jointIndex;
}
// we change states, start the connectionInterval where we check
// to be sure the hand is still close enough. If not, we terminate
// the interval, go back to the waiting state. If we make it
// the entire CONNECTING_TIME, we make the connection.
function startConnecting(id, hand) {
function startConnecting(id, jointIndex) {
var count = 0;
debug("connecting", id, "hand", hand);
debug("connecting", id, "hand", jointIndex);
// do we need to do this?
connectingId = id;
connectingHandString = hand;
connectingHandJointIndex = AvatarList.getAvatarIdentifiers().indexOf(connectingId) !== -1 ?
getIdealHandJointIndex(AvatarList.getAvatar(connectingId), stringToHand(connectingHandString)) : -1;
setupConnecting(id, jointIndex);
state = STATES.CONNECTING;
// play sound
@ -696,10 +699,9 @@
}
// send message that we are connecting with them
messageSend({
handStringMessageSend({
key: "connecting",
id: id,
hand: handToString(currentHand)
id: id
});
Controller.triggerHapticPulse(HAPTIC_DATA.initial.strength, HAPTIC_DATA.initial.duration, handToHaptic(currentHand));
@ -710,7 +712,7 @@
if (state !== STATES.CONNECTING) {
debug("stopping connecting interval, state changed");
stopConnecting();
} else if (!isNearby(id, hand)) {
} else if (!isNearby()) {
// gotta go back to waiting
debug(id, "moved, back to waiting");
stopConnecting();
@ -718,7 +720,7 @@
key: "done"
});
startHandshake();
} else if (count > CONNECTING_TIME/CONNECTING_INTERVAL) {
} else if (count > CONNECTING_TIME / CONNECTING_INTERVAL) {
debug("made connection with " + id);
makeConnection(id);
stopConnecting();
@ -744,140 +746,120 @@
| ---------- (done) ---------> |
*/
function messageHandler(channel, messageString, senderID) {
var message = {};
function exisitingOrSearchedJointIndex() { // If this is a new connectingId, we'll need to find the jointIndex
return connectingId ? connectingHandJointIndex : getIdealHandJointIndex(AvatarList.getAvatar(senderID), message[HAND_STRING_PROPERTY]);
}
if (channel !== MESSAGE_CHANNEL) {
return;
}
if (MyAvatar.sessionUUID === senderID) { // ignore my own
return;
}
var message = {};
try {
message = JSON.parse(messageString);
} catch (e) {
debug(e);
}
switch (message.key) {
case "waiting":
// add this guy to waiting object. Any other message from this person will
// remove it from the list
waitingList[senderID] = message.hand;
break;
case "connectionRequest":
delete waitingList[senderID];
if (state === STATES.WAITING && message.id === MyAvatar.sessionUUID &&
(!connectingId || connectingId === senderID)) {
// you were waiting for a connection request, so send the ack. Or, you and the other
// guy raced and both send connectionRequests. Handle that too
connectingId = senderID;
connectingHandString = message.hand;
connectingHandJointIndex = AvatarList.getAvatarIdentifiers().indexOf(connectingId) !== -1 ?
getIdealHandJointIndex(AvatarList.getAvatar(connectingId), stringToHand(connectingHandString)) : -1;
messageSend({
key: "connectionAck",
id: senderID,
hand: handToString(currentHand)
});
} else if (state === STATES.WAITING && connectingId === senderID) {
// the person you are trying to connect sent a request to someone else. See the
// if statement above. So, don't cry, just start the handshake over again
case "waiting":
// add this guy to waiting object. Any other message from this person will remove it from the list
waitingList[senderID] = getIdealHandJointIndex(AvatarList.getAvatar(senderID), message[HAND_STRING_PROPERTY]);
break;
case "connectionRequest":
delete waitingList[senderID];
if (state === STATES.WAITING && message.id === MyAvatar.sessionUUID && (!connectingId || connectingId === senderID)) {
// you were waiting for a connection request, so send the ack. Or, you and the other
// guy raced and both send connectionRequests. Handle that too
setupConnecting(senderID, exisitingOrSearchedJointIndex());
handStringMessageSend({
key: "connectionAck",
id: senderID,
});
} else if (state === STATES.WAITING && connectingId === senderID) {
// the person you are trying to connect sent a request to someone else. See the
// if statement above. So, don't cry, just start the handshake over again
startHandshake();
}
break;
case "connectionAck":
delete waitingList[senderID];
if (state === STATES.WAITING && (!connectingId || connectingId === senderID)) {
if (message.id === MyAvatar.sessionUUID) {
stopWaiting();
startConnecting(senderID, exisitingOrSearchedJointIndex());
} else if (connectingId) {
// this is for someone else (we lost race in connectionRequest),
// so lets start over
startHandshake();
}
break;
case "connectionAck":
delete waitingList[senderID];
if (state === STATES.WAITING && (!connectingId || connectingId === senderID)) {
if (message.id === MyAvatar.sessionUUID) {
// start connecting...
connectingId = senderID;
connectingHandString = message.hand;
connectingHandJointIndex = AvatarList.getAvatarIdentifiers().indexOf(connectingId) !== -1 ?
getIdealHandJointIndex(AvatarList.getAvatar(connectingId), stringToHand(connectingHandString)) : -1;
stopWaiting();
startConnecting(senderID, connectingHandString);
} else if (connectingId) {
// this is for someone else (we lost race in connectionRequest),
// so lets start over
}
// TODO: check to see if we are waiting for this but the person we are connecting sent it to
// someone else, and try again
break;
case "connecting":
delete waitingList[senderID];
if (state === STATES.WAITING && senderID === connectingId) {
if (message.id !== MyAvatar.sessionUUID) {
// the person we were trying to connect is connecting to someone else
// so try again
startHandshake();
break;
}
startConnecting(senderID, connectingHandJointIndex);
}
break;
case "done":
delete waitingList[senderID];
if (state === STATES.CONNECTING && connectingId === senderID) {
// if they are done, and didn't connect us, terminate our
// connecting
if (message.connectionId !== MyAvatar.sessionUUID) {
stopConnecting();
// now just call startHandshake. Should be ok to do so without a
// value for isKeyboard, as we should not change the animation
// state anyways (if any)
startHandshake();
}
} else {
// if waiting or inactive, lets clear the connecting id. If in makingConnection,
// do nothing
if (state !== STATES.MAKING_CONNECTION && connectingId === senderID) {
clearConnecting();
if (state !== STATES.INACTIVE) {
startHandshake();
}
}
// TODO: check to see if we are waiting for this but the person we are connecting sent it to
// someone else, and try again
break;
case "connecting":
delete waitingList[senderID];
if (state === STATES.WAITING && senderID === connectingId) {
// temporary logging
if (connectingHandString !== message.hand) {
debug("connecting hand", connectingHandString, "not same as connecting hand in message", message.hand);
}
connectingHandString = message.hand;
if (message.id !== MyAvatar.sessionUUID) {
// the person we were trying to connect is connecting to someone else
// so try again
startHandshake();
break;
}
startConnecting(senderID, message.hand);
}
break;
case "done":
delete waitingList[senderID];
if (state === STATES.CONNECTING && connectingId === senderID) {
// if they are done, and didn't connect us, terminate our
// connecting
if (message.connectionId !== MyAvatar.sessionUUID) {
stopConnecting();
// now just call startHandshake. Should be ok to do so without a
// value for isKeyboard, as we should not change the animation
// state anyways (if any)
startHandshake();
}
} else {
// if waiting or inactive, lets clear the connecting id. If in makingConnection,
// do nothing
if (state !== STATES.MAKING_CONNECTION && connectingId === senderID) {
connectingId = undefined;
connectingHandString = undefined;
connectingHandJointIndex = -1;
if (state !== STATES.INACTIVE) {
startHandshake();
}
}
}
break;
default:
debug("unknown message", message);
break;
}
break;
default:
debug("unknown message", message);
break;
}
}
Messages.subscribe(MESSAGE_CHANNEL);
Messages.messageReceived.connect(messageHandler);
function makeGripHandler(hand, animate) {
// determine if we are gripping or un-gripping
if (animate) {
return function(value) {
return function (value) {
updateTriggers(value, true, hand);
};
} else {
return function (value) {
updateTriggers(value, false, hand);
};
}
return function (value) {
updateTriggers(value, false, hand);
};
}
function keyPressEvent(event) {
if ((event.text === "x") && !event.isAutoRepeat && !event.isShifted && !event.isMeta && !event.isControl &&
!event.isAlt) {
if ((event.text === "x") && !event.isAutoRepeat && !event.isShifted && !event.isMeta && !event.isControl && !event.isAlt) {
updateTriggers(1.0, true, Controller.Standard.RightHand);
}
}
function keyReleaseEvent(event) {
if ((event.text === "x") && !event.isAutoRepeat && !event.isShifted && !event.isMeta && !event.isControl &&
!event.isAlt) {
if ((event.text === "x") && !event.isAutoRepeat && !event.isShifted && !event.isMeta && !event.isControl && !event.isAlt) {
updateTriggers(0.0, true, Controller.Standard.RightHand);
}
}

View file

@ -723,7 +723,6 @@ function startup() {
activeIcon: "icons/tablet-icons/people-a.svg",
sortOrder: 7
});
tablet.fromQml.connect(fromQml);
button.clicked.connect(onTabletButtonClicked);
tablet.screenChanged.connect(onTabletScreenChanged);
Users.usernameFromIDReply.connect(usernameFromIDReply);
@ -789,8 +788,23 @@ function onTabletButtonClicked() {
audioTimer = createAudioInterval(conserveResources ? AUDIO_LEVEL_CONSERVED_UPDATE_INTERVAL_MS : AUDIO_LEVEL_UPDATE_INTERVAL_MS);
}
}
var hasEventBridge = false;
function wireEventBridge(on) {
if (on) {
if (!hasEventBridge) {
tablet.fromQml.connect(fromQml);
hasEventBridge = true;
}
} else {
if (hasEventBridge) {
tablet.fromQml.disconnect(fromQml);
hasEventBridge = false;
}
}
}
function onTabletScreenChanged(type, url) {
wireEventBridge(shouldActivateButton);
// for toolbar mode: change button to active when window is first openend, false otherwise.
button.editProperties({isActive: shouldActivateButton});
shouldActivateButton = false;

View file

@ -30,40 +30,6 @@
text: buttonName,
sortOrder: 8
});
function messagesWaiting(isWaiting) {
button.editProperties({
icon: isWaiting ? WAITING_ICON : NORMAL_ICON
// No need for a different activeIcon, because we issue messagesWaiting(false) when the button goes active anyway.
});
}
function onClicked() {
if (onGotoScreen) {
// for toolbar-mode: go back to home screen, this will close the window.
tablet.gotoHomeScreen();
} else {
shouldActivateButton = true;
tablet.loadQMLSource(gotoQmlSource);
onGotoScreen = true;
}
}
function onScreenChanged(type, url) {
ignore(type);
if (url === gotoQmlSource) {
onGotoScreen = true;
shouldActivateButton = true;
button.editProperties({isActive: shouldActivateButton});
messagesWaiting(false);
} else {
shouldActivateButton = false;
onGotoScreen = false;
button.editProperties({isActive: shouldActivateButton});
}
}
button.clicked.connect(onClicked);
tablet.screenChanged.connect(onScreenChanged);
function request(options, callback) { // cb(error, responseOfCorrectContentType) of url. A subset of npm request.
var httpRequest = new XMLHttpRequest(), key;
// QT bug: apparently doesn't handle onload. Workaround using readyState.
@ -112,6 +78,70 @@
httpRequest.open(options.method, options.uri, true);
httpRequest.send(options.body);
}
function fromQml(message) {
var response = {id: message.id, jsonrpc: "2.0"};
switch (message.method) {
case 'request':
request(message.params, function (error, data) {
response.error = error;
response.result = data;
tablet.sendToQml(response);
});
return;
default:
response.error = {message: 'Unrecognized message', data: message};
}
tablet.sendToQml(response);
}
function messagesWaiting(isWaiting) {
button.editProperties({
icon: isWaiting ? WAITING_ICON : NORMAL_ICON
// No need for a different activeIcon, because we issue messagesWaiting(false) when the button goes active anyway.
});
}
var hasEventBridge = false;
function wireEventBridge(on) {
if (on) {
if (!hasEventBridge) {
tablet.fromQml.connect(fromQml);
hasEventBridge = true;
}
} else {
if (hasEventBridge) {
tablet.fromQml.disconnect(fromQml);
hasEventBridge = false;
}
}
}
function onClicked() {
if (onGotoScreen) {
// for toolbar-mode: go back to home screen, this will close the window.
tablet.gotoHomeScreen();
} else {
shouldActivateButton = true;
tablet.loadQMLSource(gotoQmlSource);
onGotoScreen = true;
}
}
function onScreenChanged(type, url) {
ignore(type);
if (url === gotoQmlSource) {
onGotoScreen = true;
shouldActivateButton = true;
button.editProperties({isActive: shouldActivateButton});
wireEventBridge(true);
messagesWaiting(false);
} else {
shouldActivateButton = false;
onGotoScreen = false;
button.editProperties({isActive: shouldActivateButton});
wireEventBridge(false);
}
}
button.clicked.connect(onClicked);
tablet.screenChanged.connect(onScreenChanged);
var stories = {};
var DEBUG = false;

View file

@ -114,6 +114,12 @@ int main(int argc, char** argv) {
last = now;
InputCalibrationData calibrationData = {
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4()
@ -130,6 +136,12 @@ int main(int argc, char** argv) {
{
InputCalibrationData calibrationData = {
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4(),
glm::mat4()