Merge branch 'master' of git://github.com/highfidelity/hifi into roughness_metal

This commit is contained in:
Olivier Prat 2018-01-10 10:07:32 +01:00
commit cc8a717a81
42 changed files with 2149 additions and 165 deletions

View file

@ -12,10 +12,8 @@ function(JOIN VALUES GLUE OUTPUT)
endfunction()
if (NOT DEV_BUILD)
set(INTERFACE_QML_QRC ${CMAKE_CURRENT_BINARY_DIR}/qml.qrc)
generate_qrc(OUTPUT ${INTERFACE_QML_QRC} PATH ${CMAKE_CURRENT_SOURCE_DIR}/resources GLOBS *.qml *.qss *.js *.html *.ttf *.gif *.svg *.png *.jpg)
endif()
set(INTERFACE_QML_QRC ${CMAKE_CURRENT_BINARY_DIR}/qml.qrc)
generate_qrc(OUTPUT ${INTERFACE_QML_QRC} PATH ${CMAKE_CURRENT_SOURCE_DIR}/resources GLOBS *.qml *.qss *.js *.html *.ttf *.gif *.svg *.png *.jpg)
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "LeapMotion")
@ -74,9 +72,7 @@ qt5_wrap_ui(QT_UI_HEADERS "${QT_UI_FILES}")
# add them to the interface source files
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${QT_UI_HEADERS}" "${QT_RESOURCES}")
if (NOT DEV_BUILD)
list(APPEND INTERFACE_SRCS ${INTERFACE_QML_QRC})
endif()
if (UNIX)
install(

View file

@ -36,6 +36,7 @@ Rectangle {
property bool pendingInventoryReply: true;
property bool isShowingMyItems: false;
property bool isDebuggingFirstUseTutorial: false;
property int pendingItemCount: 0;
// Style
color: hifi.colors.white;
Connections {
@ -79,18 +80,22 @@ Rectangle {
onInventoryResult: {
purchasesReceived = true;
if (root.pendingInventoryReply) {
inventoryTimer.start();
}
if (result.status !== 'success') {
console.log("Failed to get purchases", result.message);
} else {
} else if (!purchasesContentsList.dragging) { // Don't modify the view if the user's scrolling
var inventoryResult = processInventoryResult(result.data.assets);
var currentIndex = purchasesContentsList.currentIndex === -1 ? 0 : purchasesContentsList.currentIndex;
purchasesModel.clear();
purchasesModel.append(inventoryResult);
root.pendingItemCount = 0;
for (var i = 0; i < purchasesModel.count; i++) {
if (purchasesModel.get(i).status === "pending") {
root.pendingItemCount++;
}
}
if (previousPurchasesModel.count !== 0) {
checkIfAnyItemStatusChanged();
} else {
@ -103,6 +108,12 @@ Rectangle {
previousPurchasesModel.append(inventoryResult);
buildFilteredPurchasesModel();
purchasesContentsList.positionViewAtIndex(currentIndex, ListView.Beginning);
}
if (root.pendingInventoryReply && root.pendingItemCount > 0) {
inventoryTimer.start();
}
root.pendingInventoryReply = false;
@ -419,6 +430,8 @@ Rectangle {
visible: (root.isShowingMyItems && filteredPurchasesModel.count !== 0) || (!root.isShowingMyItems && filteredPurchasesModel.count !== 0);
clip: true;
model: filteredPurchasesModel;
snapMode: ListView.SnapToItem;
highlightRangeMode: ListView.StrictlyEnforceRange;
// Anchors
anchors.top: root.canRezCertifiedItems ? separator.bottom : cantRezCertified.bottom;
anchors.topMargin: 12;

View file

@ -53,7 +53,7 @@ Item {
// Title Bar text
RalewaySemiBold {
text: "HIFI COMMERCE - LOGIN";
text: "Log in to continue";
// Text size
size: hifi.fontSizes.overlayTitle;
// Anchors

View file

@ -25,8 +25,12 @@ Item {
HifiConstants { id: hifi; }
id: root;
property bool historyReceived: false;
property bool initialHistoryReceived: false;
property bool historyRequestPending: true;
property bool noMoreHistoryData: false;
property int pendingCount: 0;
property int currentHistoryPage: 1;
property var pagesAlreadyAdded: new Array();
Connections {
target: Commerce;
@ -36,32 +40,86 @@ Item {
}
onHistoryResult : {
historyReceived = true;
if (result.status === 'success') {
var sameItemCount = 0;
tempTransactionHistoryModel.clear();
tempTransactionHistoryModel.append(result.data.history);
for (var i = 0; i < tempTransactionHistoryModel.count; i++) {
if (!transactionHistoryModel.get(i)) {
sameItemCount = -1;
break;
} else if (tempTransactionHistoryModel.get(i).transaction_type === transactionHistoryModel.get(i).transaction_type &&
tempTransactionHistoryModel.get(i).text === transactionHistoryModel.get(i).text) {
sameItemCount++;
}
}
root.initialHistoryReceived = true;
root.historyRequestPending = false;
if (sameItemCount !== tempTransactionHistoryModel.count) {
transactionHistoryModel.clear();
if (result.status === 'success') {
var currentPage = parseInt(result.current_page);
if (result.data.history.length === 0) {
root.noMoreHistoryData = true;
console.log("No more data to retrieve from Commerce.history() endpoint.")
} else if (root.currentHistoryPage === 1) {
var sameItemCount = 0;
tempTransactionHistoryModel.clear();
tempTransactionHistoryModel.append(result.data.history);
for (var i = 0; i < tempTransactionHistoryModel.count; i++) {
transactionHistoryModel.append(tempTransactionHistoryModel.get(i));
if (!transactionHistoryModel.get(i)) {
sameItemCount = -1;
break;
} else if (tempTransactionHistoryModel.get(i).transaction_type === transactionHistoryModel.get(i).transaction_type &&
tempTransactionHistoryModel.get(i).text === transactionHistoryModel.get(i).text) {
sameItemCount++;
}
}
if (sameItemCount !== tempTransactionHistoryModel.count) {
transactionHistoryModel.clear();
for (var i = 0; i < tempTransactionHistoryModel.count; i++) {
transactionHistoryModel.append(tempTransactionHistoryModel.get(i));
}
calculatePendingAndInvalidated();
}
} else {
if (root.pagesAlreadyAdded.indexOf(currentPage) !== -1) {
console.log("Page " + currentPage + " of history has already been added to the list.");
} else {
// First, add the history result to a temporary model
tempTransactionHistoryModel.clear();
tempTransactionHistoryModel.append(result.data.history);
// Make a note that we've already added this page to the model...
root.pagesAlreadyAdded.push(currentPage);
var insertionIndex = 0;
// If there's nothing in the model right now, we don't need to modify insertionIndex.
if (transactionHistoryModel.count !== 0) {
var currentIteratorPage;
// Search through the whole transactionHistoryModel and look for the insertion point.
// The insertion point is found when the result page from the server is less than
// the page that the current item came from, OR when we've reached the end of the whole model.
for (var i = 0; i < transactionHistoryModel.count; i++) {
currentIteratorPage = transactionHistoryModel.get(i).resultIsFromPage;
if (currentPage < currentIteratorPage) {
insertionIndex = i;
break;
} else if (i === transactionHistoryModel.count - 1) {
insertionIndex = i + 1;
break;
}
}
}
// Go through the results we just got back from the server, setting the "resultIsFromPage"
// property of those results and adding them to the main model.
for (var i = 0; i < tempTransactionHistoryModel.count; i++) {
tempTransactionHistoryModel.setProperty(i, "resultIsFromPage", currentPage);
transactionHistoryModel.insert(i + insertionIndex, tempTransactionHistoryModel.get(i))
}
calculatePendingAndInvalidated();
}
calculatePendingAndInvalidated();
}
}
refreshTimer.start();
// Only auto-refresh if the user hasn't scrolled
// and there is more data to grab
if (transactionHistory.atYBeginning && !root.noMoreHistoryData) {
refreshTimer.start();
}
}
}
@ -134,9 +192,13 @@ Item {
onVisibleChanged: {
if (visible) {
historyReceived = false;
transactionHistoryModel.clear();
Commerce.balance();
Commerce.history();
initialHistoryReceived = false;
root.currentHistoryPage = 1;
root.noMoreHistoryData = false;
root.historyRequestPending = true;
Commerce.history(root.currentHistoryPage);
} else {
refreshTimer.stop();
}
@ -164,9 +226,12 @@ Item {
id: refreshTimer;
interval: 4000;
onTriggered: {
console.log("Refreshing Wallet Home...");
Commerce.balance();
Commerce.history();
if (transactionHistory.atYBeginning) {
console.log("Refreshing 1st Page of Recent Activity...");
root.historyRequestPending = true;
Commerce.balance();
Commerce.history(1);
}
}
}
@ -241,7 +306,7 @@ Item {
anchors.right: parent.right;
Item {
visible: transactionHistoryModel.count === 0 && root.historyReceived;
visible: transactionHistoryModel.count === 0 && root.initialHistoryReceived;
anchors.centerIn: parent;
width: parent.width - 12;
height: parent.height;
@ -364,7 +429,12 @@ Item {
onAtYEndChanged: {
if (transactionHistory.atYEnd) {
console.log("User scrolled to the bottom of 'Recent Activity'.");
// Grab next page of results and append to model
if (!root.historyRequestPending && !root.noMoreHistoryData) {
// Grab next page of results and append to model
root.historyRequestPending = true;
Commerce.history(++root.currentHistoryPage);
console.log("Fetching Page " + root.currentHistoryPage + " of Recent Activity...");
}
}
}
}

View file

@ -127,9 +127,15 @@ Item {
GridView {
id: gridView
keyNavigationEnabled: false
highlightFollowsCurrentItem: false
property int previousGridIndex: -1
// true if any of the buttons contains mouse
property bool containsMouse: false
anchors {
fill: parent
topMargin: 20
@ -162,15 +168,29 @@ Item {
flow: GridView.LeftToRight
model: page.proxyModel
delegate: Item {
delegate: Control {
id: wrapper
width: gridView.cellWidth
height: gridView.cellHeight
hoverEnabled: true
property bool containsMouse: gridView.containsMouse
onHoveredChanged: {
if (hovered && !gridView.containsMouse) {
gridView.containsMouse = true
} else {
gridView.containsMouse = false
}
}
property var proxy: modelData
TabletButton {
id: tabletButton
scale: wrapper.hovered ? 1.25 : wrapper.containsMouse ? 0.75 : 1.0
Behavior on scale { NumberAnimation { duration: 200; easing.type: Easing.Linear } }
anchors.centerIn: parent
gridView: wrapper.GridView.view
buttonIndex: page.proxyModel.buttonIndex(uuid);
@ -224,6 +244,7 @@ Item {
PageIndicator {
id: pageIndicator
currentIndex: swipeView.currentIndex
visible: swipeView.count > 1
delegate: Item {
width: 15

View file

@ -3166,6 +3166,7 @@ glm::mat4 MyAvatar::getLeftHandCalibrationMat() const {
}
bool MyAvatar::pinJoint(int index, const glm::vec3& position, const glm::quat& orientation) {
std::lock_guard<std::mutex> guard(_pinnedJointsMutex);
auto hipsIndex = getJointIndex("Hips");
if (index != hipsIndex) {
qWarning() << "Pinning is only supported for the hips joint at the moment.";
@ -3185,7 +3186,14 @@ bool MyAvatar::pinJoint(int index, const glm::vec3& position, const glm::quat& o
return true;
}
bool MyAvatar::isJointPinned(int index) {
std::lock_guard<std::mutex> guard(_pinnedJointsMutex);
auto it = std::find(_pinnedJoints.begin(), _pinnedJoints.end(), index);
return it != _pinnedJoints.end();
}
bool MyAvatar::clearPinOnJoint(int index) {
std::lock_guard<std::mutex> guard(_pinnedJointsMutex);
auto it = std::find(_pinnedJoints.begin(), _pinnedJoints.end(), index);
if (it != _pinnedJoints.end()) {
_pinnedJoints.erase(it);

View file

@ -448,9 +448,8 @@ public:
virtual void clearJointData(const QString& name) override;
virtual void clearJointsData() override;
Q_INVOKABLE bool pinJoint(int index, const glm::vec3& position, const glm::quat& orientation);
bool isJointPinned(int index);
Q_INVOKABLE bool clearPinOnJoint(int index);
Q_INVOKABLE float getIKErrorOnLastSolve() const;
@ -837,6 +836,7 @@ private:
bool getIsAway() const { return _isAway; }
void setAway(bool value);
std::mutex _pinnedJointsMutex;
std::vector<int> _pinnedJoints;
// height of user in sensor space, when standing erect.

View file

@ -34,12 +34,25 @@ Rig::CharacterControllerState convertCharacterControllerState(CharacterControlle
}
static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
glm::mat4 worldToSensorMat = glm::inverse(myAvatar->getSensorToWorldMatrix());
// check for pinned hips.
auto hipsIndex = myAvatar->getJointIndex("Hips");
if (myAvatar->isJointPinned(hipsIndex)) {
Transform avatarTransform = myAvatar->getTransform();
AnimPose result = AnimPose(worldToSensorMat * avatarTransform.getMatrix() * Matrices::Y_180);
result.scale() = glm::vec3(1.0f, 1.0f, 1.0f);
return result;
} else {
DebugDraw::getInstance().removeMarker("pinnedHips");
}
glm::mat4 hipsMat = myAvatar->deriveBodyFromHMDSensor();
glm::vec3 hipsPos = extractTranslation(hipsMat);
glm::quat hipsRot = glmExtractRotation(hipsMat);
glm::mat4 avatarToWorldMat = myAvatar->getTransform().getMatrix();
glm::mat4 worldToSensorMat = glm::inverse(myAvatar->getSensorToWorldMatrix());
glm::mat4 avatarToSensorMat = worldToSensorMat * avatarToWorldMat;
// dampen hips rotation, by mixing it with the avatar orientation in sensor space
@ -323,17 +336,25 @@ void MySkeletonModel::updateFingers() {
for (auto& link : chain) {
int index = _rig.indexOfJoint(link.second);
if (index >= 0) {
auto rotationFrameOffset = _jointRotationFrameOffsetMap.find(index);
if (rotationFrameOffset == _jointRotationFrameOffsetMap.end()) {
_jointRotationFrameOffsetMap.insert(std::pair<int, int>(index, 0));
rotationFrameOffset = _jointRotationFrameOffsetMap.find(index);
}
auto pose = myAvatar->getControllerPoseInSensorFrame(link.first);
if (pose.valid) {
glm::quat relRot = glm::inverse(prevAbsRot) * pose.getRotation();
// only set the rotation for the finger joints, not the hands.
if (link.first != controller::Action::LEFT_HAND && link.first != controller::Action::RIGHT_HAND) {
_rig.setJointRotation(index, true, relRot, CONTROLLER_PRIORITY);
rotationFrameOffset->second = 0;
}
prevAbsRot = pose.getRotation();
} else {
} else if (rotationFrameOffset->second == 1) { // if the pose is invalid and was set on previous frame we do clear ( current frame offset = 1 )
_rig.clearJointAnimationPriority(index);
}
rotationFrameOffset->second++;
}
}
}

View file

@ -28,6 +28,8 @@ private:
AnimPose _prevHips; // sensor frame
bool _prevHipsValid { false };
std::map<int, int> _jointRotationFrameOffsetMap;
};
#endif // hifi_MySkeletonModel_h

View file

@ -72,11 +72,16 @@ void Ledger::signedSend(const QString& propertyName, const QByteArray& text, con
send(endpoint, success, fail, QNetworkAccessManager::PutOperation, AccountManagerAuth::Required, request);
}
void Ledger::keysQuery(const QString& endpoint, const QString& success, const QString& fail) {
void Ledger::keysQuery(const QString& endpoint, const QString& success, const QString& fail, QJsonObject& requestParams) {
auto wallet = DependencyManager::get<Wallet>();
QJsonObject request;
request["public_keys"] = QJsonArray::fromStringList(wallet->listPublicKeys());
send(endpoint, success, fail, QNetworkAccessManager::PostOperation, AccountManagerAuth::Required, request);
requestParams["public_keys"] = QJsonArray::fromStringList(wallet->listPublicKeys());
send(endpoint, success, fail, QNetworkAccessManager::PostOperation, AccountManagerAuth::Required, requestParams);
}
void Ledger::keysQuery(const QString& endpoint, const QString& success, const QString& fail) {
QJsonObject requestParams;
keysQuery(endpoint, success, fail, requestParams);
}
void Ledger::buy(const QString& hfc_key, int cost, const QString& asset_id, const QString& inventory_key, const bool controlled_failure) {
@ -169,6 +174,7 @@ void Ledger::historySuccess(QNetworkReply& reply) {
QJsonObject newDataData;
newDataData["history"] = newHistoryArray;
newData["data"] = newDataData;
newData["current_page"] = data["current_page"].toInt();
emit historyResult(newData);
}
@ -176,8 +182,11 @@ void Ledger::historyFailure(QNetworkReply& reply) {
failResponse("history", reply);
}
void Ledger::history(const QStringList& keys) {
keysQuery("history", "historySuccess", "historyFailure");
void Ledger::history(const QStringList& keys, const int& pageNumber) {
QJsonObject params;
params["per_page"] = 100;
params["page"] = pageNumber;
keysQuery("history", "historySuccess", "historyFailure", params);
}
// The api/failResponse is called just for the side effect of logging.

View file

@ -29,7 +29,7 @@ public:
bool receiveAt(const QString& hfc_key, const QString& old_key);
void balance(const QStringList& keys);
void inventory(const QStringList& keys);
void history(const QStringList& keys);
void history(const QStringList& keys, const int& pageNumber);
void account();
void reset();
void updateLocation(const QString& asset_id, const QString location, const bool controlledFailure = false);
@ -79,6 +79,7 @@ private:
QJsonObject apiResponse(const QString& label, QNetworkReply& reply);
QJsonObject failResponse(const QString& label, QNetworkReply& reply);
void send(const QString& endpoint, const QString& success, const QString& fail, QNetworkAccessManager::Operation method, AccountManagerAuth::Type authType, QJsonObject request);
void keysQuery(const QString& endpoint, const QString& success, const QString& fail, QJsonObject& extraRequestParams);
void keysQuery(const QString& endpoint, const QString& success, const QString& fail);
void signedSend(const QString& propertyName, const QByteArray& text, const QString& key, const QString& endpoint, const QString& success, const QString& fail, const bool controlled_failure = false);
};

View file

@ -96,12 +96,12 @@ void QmlCommerce::inventory() {
}
}
void QmlCommerce::history() {
void QmlCommerce::history(const int& pageNumber) {
auto ledger = DependencyManager::get<Ledger>();
auto wallet = DependencyManager::get<Wallet>();
QStringList cachedPublicKeys = wallet->listPublicKeys();
if (!cachedPublicKeys.isEmpty()) {
ledger->history(cachedPublicKeys);
ledger->history(cachedPublicKeys, pageNumber);
}
}

View file

@ -60,7 +60,7 @@ protected:
Q_INVOKABLE void buy(const QString& assetId, int cost, const bool controlledFailure = false);
Q_INVOKABLE void balance();
Q_INVOKABLE void inventory();
Q_INVOKABLE void history();
Q_INVOKABLE void history(const int& pageNumber);
Q_INVOKABLE void generateKeyPair();
Q_INVOKABLE void reset();
Q_INVOKABLE void resetLocalWalletOnly();

View file

@ -192,8 +192,7 @@ void WindowScriptingInterface::ensureReticleVisible() const {
/// Display a "browse to directory" dialog. If `directory` is an invalid file or directory the browser will start at the current
/// working directory.
/// \param const QString& title title of the window
/// \param const QString& directory directory to start the file browser at
/// \param const QString& nameFilter filter to filter filenames by - see `QFileDialog`
/// \param const QString& directory directory to start the directory browser at
/// \return QScriptValue file path as a string if one was selected, otherwise `QScriptValue::NullValue`
QScriptValue WindowScriptingInterface::browseDir(const QString& title, const QString& directory) {
ensureReticleVisible();
@ -214,8 +213,7 @@ QScriptValue WindowScriptingInterface::browseDir(const QString& title, const QSt
/// Display a "browse to directory" dialog. If `directory` is an invalid file or directory the browser will start at the current
/// working directory.
/// \param const QString& title title of the window
/// \param const QString& directory directory to start the file browser at
/// \param const QString& nameFilter filter to filter filenames by - see `QFileDialog`
/// \param const QString& directory directory to start the directory browser at
void WindowScriptingInterface::browseDirAsync(const QString& title, const QString& directory) {
ensureReticleVisible();
QString path = directory;
@ -459,6 +457,41 @@ int WindowScriptingInterface::openMessageBox(QString title, QString text, int bu
return createMessageBox(title, text, buttons, defaultButton);
}
/**jsdoc
* <p>The buttons that may be included in a message box created by {@link Window.openMessageBox|openMessageBox} are defined by
* numeric values:
* <table>
* <thead>
* <tr>
* <th>Button</th>
* <th>Value</th>
* <th>Description</th>
* </tr>
* </thead>
* <tbody>
* <tr> <td><strong>NoButton</strong></td> <td><code>0x0</code></td> <td>An invalid button.</td> </tr>
* <tr> <td><strong>Ok</strong></td> <td><code>0x400</code></td> <td>"OK"</td> </tr>
* <tr> <td><strong>Save</strong></td> <td><code>0x800</code></td> <td>"Save"</td> </tr>
* <tr> <td><strong>SaveAll</strong></td> <td><code>0x1000</code></td> <td>"Save All"</td> </tr>
* <tr> <td><strong>Open</strong></td> <td><code>0x2000</code></td> <td>"Open"</td> </tr>
* <tr> <td><strong>Yes</strong></td> <td><code>0x4000</code></td> <td>"Yes"</td> </tr>
* <tr> <td><strong>YesToAll</strong></td> <td><code>0x8000</code></td> <td>"Yes to All"</td> </tr>
* <tr> <td><strong>No</strong></td> <td><code>0x10000</code></td> <td>"No"</td> </tr>
* <tr> <td><strong>NoToAll</strong></td> <td><code>0x20000</code></td> <td>"No to All"</td> </tr>
* <tr> <td><strong>Abort</strong></td> <td><code>0x40000</code></td> <td>"Abort"</td> </tr>
* <tr> <td><strong>Retry</strong></td> <td><code>0x80000</code></td> <td>"Retry"</td> </tr>
* <tr> <td><strong>Ignore</strong></td> <td><code>0x100000</code></td> <td>"Ignore"</td> </tr>
* <tr> <td><strong>Close</strong></td> <td><code>0x200000</code></td> <td>"Close"</td> </tr>
* <tr> <td><strong>Cancel</strong></td> <td><code>0x400000</code></td> <td>"Cancel"</td> </tr>
* <tr> <td><strong>Discard</strong></td> <td><code>0x800000</code></td> <td>"Discard" or "Don't Save"</td> </tr>
* <tr> <td><strong>Help</strong></td> <td><code>0x1000000</code></td> <td>"Help"</td> </tr>
* <tr> <td><strong>Apply</strong></td> <td><code>0x2000000</code></td> <td>"Apply"</td> </tr>
* <tr> <td><strong>Reset</strong></td> <td><code>0x4000000</code></td> <td>"Reset"</td> </tr>
* <tr> <td><strong>RestoreDefaults</strong></td> <td><code>0x8000000</code></td> <td>"Restore Defaults"</td> </tr>
* </tbody>
* </table>
* @typedef Window.MessageBoxButton
*/
int WindowScriptingInterface::createMessageBox(QString title, QString text, int buttons, int defaultButton) {
auto messageBox = DependencyManager::get<OffscreenUi>()->createMessageBox(OffscreenUi::ICON_INFORMATION, title, text,
static_cast<QFlags<QMessageBox::StandardButton>>(buttons), static_cast<QMessageBox::StandardButton>(defaultButton));

View file

@ -33,6 +33,21 @@ QScriptValue CustomPromptResultToScriptValue(QScriptEngine* engine, const Custom
void CustomPromptResultFromScriptValue(const QScriptValue& object, CustomPromptResult& result);
/**jsdoc
* The Window API provides various facilities not covered elsewhere: window dimensions, window focus, normal or entity camera
* view, clipboard, announcements, user connections, common dialog boxes, snapshots, file import, domain changes, domain
* physics.
*
* @namespace Window
* @property {number} innerWidth - The width of the drawable area of the Interface window (i.e., without borders or other
* chrome), in pixels. <em>Read-only.</em>
* @property {number} innerHeight - The height of the drawable area of the Interface window (i.e., without borders or other
* chrome) plus the height of the menu bar, in pixels. <em>Read-only.</em>
* @property {object} location - Provides facilities for working with your current metaverse location. See {@link location}.
* @property {number} x - The x coordinate of the top left corner of the Interface window on the display. <em>Read-only.</em>
* @property {number} y - The y coordinate of the top left corner of the Interface window on the display. <em>Read-only.</em>
*/
class WindowScriptingInterface : public QObject, public Dependency {
Q_OBJECT
Q_PROPERTY(int innerWidth READ getInnerWidth)
@ -48,63 +63,622 @@ public:
int getY();
public slots:
/**jsdoc
* Check if the Interface window has focus.
* @function Window.hasFocus
* @returns {boolean} <code>true</code> if the Interface window has focus, otherwise <code>false</code>.
*/
QScriptValue hasFocus();
/**jsdoc
* Make the Interface window have focus.
* @function Window.setFocus
*/
void setFocus();
/**jsdoc
* Raise the Interface window if it is minimized, and give it focus.
* @function Window.raiseMainWindow
*/
void raiseMainWindow();
/**jsdoc
* Display a dialog with the specified message and an "OK" button. The dialog is non-modal; the script continues without
* waiting for a user response.
* @function Window.alert
* @param {string} message="" - The message to display.
* @example <caption>Display a friendly greeting.</caption>
* Window.alert("Welcome!");
* print("Script continues without waiting");
*/
void alert(const QString& message = "");
/**jsdoc
* Prompt the user to confirm something. Displays a modal dialog with a message plus "Yes" and "No" buttons.
* responds.
* @function Window.confirm
* @param {string} message="" - The question to display.
* @returns {boolean} <code>true</code> if the user selects "Yes", otherwise <code>false</code>.
* @example <caption>Ask the user a question requiring a yes/no answer.</caption>
* var answer = Window.confirm("Are you sure?");
* print(answer); // true or false
*/
QScriptValue confirm(const QString& message = "");
/**jsdoc
* Prompt the user to enter some text. Displays a modal dialog with a message and a text box, plus "OK" and "Cancel"
* buttons.
* @function Window.prompt
* @param {string} message - The question to display.
* @param {string} defaultText - The default answer text.
* @returns {string} The text that the user entered if they select "OK", otherwise "".
* @example <caption>Ask the user a question requiring a text answer.</caption>
* var answer = Window.prompt("Question", "answer");
* if (answer === "") {
* print("User canceled");
* } else {
* print("User answer: " + answer);
* }
*/
QScriptValue prompt(const QString& message, const QString& defaultText);
/**jsdoc
* Prompt the user to enter some text. Displays a non-modal dialog with a message and a text box, plus "OK" and "Cancel"
* buttons. A {@link Window.promptTextChanged|promptTextChanged} signal is emitted when the user OKs the dialog; no signal
* is emitted if the user cancels the dialog.
* @function Window.promptAsync
* @param {string} message - The question to display.
* @param {string} defaultText - The default answer text.
* @example <caption>Ask the user a question requiring a text answer without waiting for the answer.</caption>
* function onPromptTextChanged(text) {
* print("User answer: " + text);
* }
* Window.promptTextChanged.connect(onPromptTextChanged);
*
* Window.promptAsync("Question", "answer");
* print("Script continues without waiting");
*/
void promptAsync(const QString& message = "", const QString& defaultText = "");
/**jsdoc
* Prompt the user for input in a custom, modal dialog.
* @deprecated This funtion is deprecated and will be removed.
* @function Window.customPrompt
* @param {object} config - Configures the modal dialog.
* @returns {object} The user's response.
*/
CustomPromptResult customPrompt(const QVariant& config);
/**jsdoc
* Prompt the user to choose a directory. Displays a modal dialog that navigates the directory tree.
* @function Window.browseDir
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @returns {string} The path of the directory if one is chosen, otherwise <code>null</code>.
* @example <caption>Ask the user to choose a directory.</caption>
* var directory = Window.browseDir("Select Directory", Paths.resources);
* print("Directory: " + directory);
*/
QScriptValue browseDir(const QString& title = "", const QString& directory = "");
/**jsdoc
* Prompt the user to choose a directory. Displays a non-modal dialog that navigates the directory tree. A
* {@link Window.browseDirChanged|browseDirChanged} signal is emitted when a directory is chosen; no signal is emitted if
* the user cancels the dialog.
* @function Window.browseDirAsync
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @example <caption>Ask the user to choose a directory without waiting for the answer.</caption>
* function onBrowseDirChanged(directory) {
* print("Directory: " + directory);
* }
* Window.browseDirChanged.connect(onBrowseDirChanged);
*
* Window.browseDirAsync("Select Directory", Paths.resources);
* print("Script continues without waiting");
*/
void browseDirAsync(const QString& title = "", const QString& directory = "");
/**jsdoc
* Prompt the user to choose a file. Displays a modal dialog that navigates the directory tree.
* @function Window.browse
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @returns {string} The path and name of the file if one is chosen, otherwise <code>null</code>.
* @example <caption>Ask the user to choose an image file.</caption>
* var filename = Window.browse("Select Image File", Paths.resources, "Images (*.png *.jpg *.svg)");
* print("File: " + filename);
*/
QScriptValue browse(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to choose a file. Displays a non-modal dialog that navigates the directory tree. A
* {@link Window.openFileChanged|openFileChanged} signal is emitted when a file is chosen; no signal is emitted if the user
* cancels the dialog.
* @function Window.browseAsync
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @example <caption>Ask the user to choose an image file without waiting for the answer.</caption>
* function onOpenFileChanged(filename) {
* print("File: " + filename);
* }
* Window.openFileChanged.connect(onOpenFileChanged);
*
* Window.browseAsync("Select Image File", Paths.resources, "Images (*.png *.jpg *.svg)");
* print("Script continues without waiting");
*/
void browseAsync(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to specify the path and name of a file to save to. Displays a model dialog that navigates the directory
* tree and allows the user to type in a file name.
* @function Window.save
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @returns {string} The path and name of the file if one is specified, otherwise <code>null</code>. If a single file type
* is specified in the nameFilter, that file type extension is automatically appended to the result when appropriate.
* @example <caption>Ask the user to specify a file to save to.</caption>
* var filename = Window.save("Save to JSON file", Paths.resources, "*.json");
* print("File: " + filename);
*/
QScriptValue save(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to specify the path and name of a file to save to. Displays a non-model dialog that navigates the
* directory tree and allows the user to type in a file name. A {@link Window.saveFileChanged|saveFileChanged} signal is
* emitted when a file is specified; no signal is emitted if the user cancels the dialog.
* @function Window.saveAsync
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @example <caption>Ask the user to specify a file to save to without waiting for an answer.</caption>
* function onSaveFileChanged(filename) {
* print("File: " + filename);
* }
* Window.saveFileChanged.connect(onSaveFileChanged);
*
* Window.saveAsync("Save to JSON file", Paths.resources, "*.json");
* print("Script continues without waiting");
*/
void saveAsync(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to choose an Asset Server item. Displays a modal dialog that navigates the tree of assets on the Asset
* Server.
* @function Window.browseAssets
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @returns {string} The path and name of the asset if one is chosen, otherwise <code>null</code>.
* @example <caption>Ask the user to select an FBX asset.</caption>
* var asset = Window.browseAssets("Select FBX File", "/", "*.fbx");
* print("FBX file: " + asset);
*/
QScriptValue browseAssets(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to choose an Asset Server item. Displays a non-modal dialog that navigates the tree of assets on the
* Asset Server. A {@link Window.assetsDirChanged|assetsDirChanged} signal is emitted when an asset is chosen; no signal is
* emitted if the user cancels the dialog.
* @function Window.browseAssetsAsync
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @example
* function onAssetsDirChanged(asset) {
* print("FBX file: " + asset);
* }
* Window.assetsDirChanged.connect(onAssetsDirChanged);
*
* Window.browseAssetsAsync("Select FBX File", "/", "*.fbx");
* print("Script continues without waiting");
*/
void browseAssetsAsync(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Open the Asset Browser dialog. If a file to upload is specified, the user is prompted to enter the folder and name to
* map the file to on the asset server.
* @function Window.showAssetServer
* @param {string} uploadFile="" - The path and name of a file to upload to the asset server.
* @example <caption>Upload a file to the asset server.</caption>
* var filename = Window.browse("Select File to Add to Asset Server", Paths.resources);
* print("File: " + filename);
* Window.showAssetServer(filename);
*/
void showAssetServer(const QString& upload = "");
/**jsdoc
* Get Interface's build number.
* @function Window.checkVersion
* @returns {string} - Interface's build number.
*/
QString checkVersion();
/**jsdoc
* Copies text to the operating system's clipboard.
* @function Window.copyToClipboard
* @param {string} text - The text to copy to the operating system's clipboard.
*/
void copyToClipboard(const QString& text);
/**jsdoc
* Takes a snapshot of the current Interface view from the primary camera. When a still image only is captured,
* {@link Window.stillSnapshotTaken|stillSnapshotTaken} is emitted; when a still image plus moving images are captured,
* {@link Window.processingGifStarted|processingGifStarted} and {@link Window.processingGifCompleted|processingGifCompleted}
* are emitted. The path to store the snapshots and the length of the animated GIF to capture are specified in Settings >
* General > Snapshots.
* @function Window.takeSnapshot
* @param {boolean} notify=true - This value is passed on through the {@link Window.stillSnapshotTaken|stillSnapshotTaken}
* signal.
* @param {boolean} includeAnimated=false - If <code>true</code>, a moving image is captured as an animated GIF in addition
* to a still image.
* @param {number} aspectRatio=0 - The width/height ratio of the snapshot required. If the value is <code>0</code> the
* full resolution is used (window dimensions in desktop mode; HMD display dimensions in HMD mode), otherwise one of the
* dimensions is adjusted in order to match the aspect ratio.
* @example <caption>Using the snapshot function and signals.</caption>
* function onStillSnapshottaken(path, notify) {
* print("Still snapshot taken: " + path);
* print("Notify: " + notify);
* }
*
* function onProcessingGifStarted(stillPath) {
* print("Still snapshot taken: " + stillPath);
* }
*
* function onProcessingGifCompleted(animatedPath) {
* print("Animated snapshot taken: " + animatedPath);
* }
*
* Window.stillSnapshotTaken.connect(onStillSnapshottaken);
* Window.processingGifStarted.connect(onProcessingGifStarted);
* Window.processingGifCompleted.connect(onProcessingGifCompleted);
*
* var notify = true;
* var animated = true;
* var aspect = 1920 / 1080;
* Window.takeSnapshot(notify, animated, aspect);
*/
void takeSnapshot(bool notify = true, bool includeAnimated = false, float aspectRatio = 0.0f);
/**jsdoc
* Takes a still snapshot of the current view from the secondary camera that can be set up through the {@link Render} API.
* @function Window.takeSecondaryCameraSnapshot
*/
void takeSecondaryCameraSnapshot();
/**jsdoc
* Emit a {@link Window.connectionAdded|connectionAdded} or a {@link Window.connectionError|connectionError} signal that
* indicates whether or not a user connection was successfully made using the Web API.
* @function Window.makeConnection
* @param {boolean} success - If <code>true</code> then {@link Window.connectionAdded|connectionAdded} is emitted, otherwise
* {@link Window.connectionError|connectionError} is emitted.
* @param {string} description - Descriptive text about the connection success or error. This is sent in the signal emitted.
*/
void makeConnection(bool success, const QString& userNameOrError);
/**jsdoc
* Display a notification message. Notifications are displayed in panels by the default script, nofications.js. An
* {@link Window.announcement|announcement} signal is emitted when this function is called.
* @function Window.displayAnnouncement
* @param {string} message - The announcement message.
* @example <caption>Send and capture an announcement message.</caption>
* function onAnnouncement(message) {
* // The message is also displayed as a notification by notifications.js.
* print("Announcement: " + message);
* }
* Window.announcement.connect(onAnnouncement);
*
* Window.displayAnnouncement("Hello");
*/
void displayAnnouncement(const QString& message);
/**jsdoc
* Prepare a snapshot ready for sharing. A {@link Window.snapshotShared|snapshotShared} signal is emitted when the snapshot
* has been prepared.
* @function Window.shareSnapshot
* @param {string} path - The path and name of the image file to share.
* @param {string} href="" - The metaverse location where the snapshot was taken.
*/
void shareSnapshot(const QString& path, const QUrl& href = QUrl(""));
/**jsdoc
* Check to see if physics is active for you in the domain you're visiting - there is a delay between your arrival at a
* domain and physics becoming active for you in that domain.
* @function Window.isPhysicsEnabled
* @returns {boolean} <code>true</code> if physics is currently active for you, otherwise <code>false</code>.
* @example <caption>Wait for physics to be enabled when you change domains.</caption>
* function checkForPhysics() {
* var isPhysicsEnabled = Window.isPhysicsEnabled();
* print("Physics enabled: " + isPhysicsEnabled);
* if (!isPhysicsEnabled) {
* Script.setTimeout(checkForPhysics, 1000);
* }
* }
*
* function onDomainChanged(domain) {
* print("Domain changed: " + domain);
* Script.setTimeout(checkForPhysics, 1000);
* }
*
* Window.domainChanged.connect(onDomainChanged);
*/
bool isPhysicsEnabled();
/**jsdoc
* Set what to show on the PC display: normal view or entity camera view. The entity camera is configured using
* {@link Camera.setCameraEntity} and {@link Camera|Camera.mode}.
* @function Window.setDisplayTexture
* @param {Window.DisplayTexture} texture - The view to display.
* @returns {boolean} <code>true</code> if the display texture was successfully set, otherwise <code>false</code>.
*/
// See spectatorCamera.js for Valid parameter values.
/**jsdoc
* <p>The views that may be displayed on the PC display.</p>
* <table>
* <thead>
* <tr>
* <th>Value</th>
* <th>View Displayed</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td><code>""</code></td>
* <td>Normal view.</td>
* </tr>
* <tr>
* <td><code>"resource://spectatorCameraFrame"</code></td>
* <td>Entity camera view.</td>
* </tr>
* </tbody>
* </table>
* @typedef Window.DisplayTexture
*/
bool setDisplayTexture(const QString& name);
/**jsdoc
* Check if a 2D point is within the desktop window if in desktop mode, or the drawable area of the HUD overlay if in HMD
* mode.
* @function Window.isPointOnDesktopWindow
* @param {Vec2} point - The point to check.
* @returns {boolean} <code>true</code> if the point is within the window or HUD, otherwise <code>false</code>.
*/
bool isPointOnDesktopWindow(QVariant point);
/**jsdoc
* Get the size of the drawable area of the Interface window if in desktop mode or the HMD rendering surface if in HMD mode.
* @function Window.getDeviceSize
* @returns {Vec2} The width and height of the Interface window or HMD rendering surface, in pixels.
*/
glm::vec2 getDeviceSize() const;
/**jsdoc
* Open a non-modal message box that can have a variety of button combinations. See also,
* {@link Window.updateMessageBox|updateMessageBox} and {@link Window.closeMessageBox|closeMessageBox}.
* @function Window.openMessageBox
* @param {string} title - The title to display for the message box.
* @param {string} text - Text to display in the message box.
* @param {Window.MessageBoxButton} buttons - The buttons to display on the message box; one or more button values added
* together.
* @param {Window.MessageBoxButton} defaultButton - The button that has focus when the message box is opened.
* @returns {number} The ID of the message box created.
* @example <caption>Ask the user whether that want to reset something.</caption>
* var messageBox;
* var resetButton = 0x4000000;
* var cancelButton = 0x400000;
*
* function onMessageBoxClosed(id, button) {
* if (id === messageBox) {
* if (button === resetButton) {
* print("Reset");
* } else {
* print("Don't reset");
* }
* }
* }
* Window.messageBoxClosed.connect(onMessageBoxClosed);
*
* messageBox = Window.openMessageBox("Reset Something",
* "Do you want to reset something?",
* resetButton + cancelButton, cancelButton);
*/
int openMessageBox(QString title, QString text, int buttons, int defaultButton);
/**jsdoc
* Update the content of a message box that was opened with {@link Window.openMessageBox|openMessageBox}.
* @function Window.updateMessageBox
* @param {number} id - The ID of the message box.
* @param {string} title - The title to display for the message box.
* @param {string} text - Text to display in the message box.
* @param {Window.MessageBoxButton} buttons - The buttons to display on the message box; one or more button values added
* together.
* @param {Window.MessageBoxButton} defaultButton - The button that has focus when the message box is opened.
*/
void updateMessageBox(int id, QString title, QString text, int buttons, int defaultButton);
/**jsdoc
* Close a message box that was opened with {@link Window.openMessageBox|openMessageBox}.
* @function Window.closeMessageBox
* @param {number} id - The ID of the message box.
*/
void closeMessageBox(int id);
private slots:
void onMessageBoxSelected(int button);
signals:
void domainChanged(const QString& domainHostname);
/**jsdoc
* Triggered when you change the domain you're visiting. <strong>Warning:</strong> Is not emitted if you go to domain that
* isn't running.
* @function Window.domainChanged
* @param {string} domain - The domain's IP address.
* @returns {Signal}
* @example <caption>Report when you change domains.</caption>
* function onDomainChanged(domain) {
* print("Domain changed: " + domain);
* }
*
* Window.domainChanged.connect(onDomainChanged);
*/
void domainChanged(const QString& domain);
/**jsdoc
* Triggered when you try to navigate to a *.json, *.svo, or *.svo.json URL in a Web browser within Interface.
* @function Window.svoImportRequested
* @param {string} url - The URL of the file to import.
* @returns {Signal}
*/
void svoImportRequested(const QString& url);
/**jsdoc
* Triggered when you try to visit a domain but are refused connection.
* @function Window.domainConnectionRefused
* @param {string} reasonMessage - A description of the refusal.
* @param {Window.ConnectionRefusedReason} reasonCode - Integer number that enumerates the reason for the refusal.
* @param {string} extraInfo - Extra information about the refusal.
* @returns {Signal}
*/
void domainConnectionRefused(const QString& reasonMessage, int reasonCode, const QString& extraInfo);
/**jsdoc
* Triggered when a still snapshot has been taken by calling {@link Window.takeSnapshot|takeSnapshot} with
* <code>includeAnimated = false</code>.
* @function Window.stillSnapshotTaken
* @param {string} pathStillSnapshot - The path and name of the snapshot image file.
* @param {boolean} notify - The value of the <code>notify</code> parameter that {@link Window.takeSnapshot|takeSnapshot}
* was called with.
* @returns {Signal}
*/
void stillSnapshotTaken(const QString& pathStillSnapshot, bool notify);
/**jsdoc
* Triggered when a snapshot submitted via {@link Window.shareSnapshot|shareSnapshot} is ready for sharing. The snapshot
* may then be shared via the {@link Account.metaverseServerURL} Web API.
* @function Window.snapshotShared
* @param {boolean} isError - <code>true</code> if an error was encountered preparing the snapshot for sharing, otherwise
* <code>false</code>.
* @param {string} reply - JSON-formatted information about the snapshot.
* @returns {Signal}
*/
void snapshotShared(bool isError, const QString& reply);
/**jsdoc
* Triggered when the snapshot images have been captured by {@link Window.takeSnapshot|takeSnapshot} and the GIF is
* starting to be processed.
* @function Window.processingGifStarted
* @param {string} pathStillSnapshot - The path and name of the still snapshot image file.
* @returns {Signal}
*/
void processingGifStarted(const QString& pathStillSnapshot);
/**jsdoc
* Triggered when a GIF has been prepared of the snapshot images captured by {@link Window.takeSnapshot|takeSnapshot}.
* @function Window.processingGifCompleted
* @param {string} pathAnimatedSnapshot - The path and name of the moving snapshot GIF file.
* @returns {Signal}
*/
void processingGifCompleted(const QString& pathAnimatedSnapshot);
/**jsdoc
* Triggered when you've successfully made a user connection.
* @function Window.connectionAdded
* @param {string} message - A description of the success.
* @returns {Signal}
*/
void connectionAdded(const QString& connectionName);
/**jsdoc
* Triggered when you failed to make a user connection.
* @function Window.connectionError
* @param {string} message - A description of the error.
* @returns {Signal}
*/
void connectionError(const QString& errorString);
/**jsdoc
* Triggered when a message is announced by {@link Window.displayAnnouncement|displayAnnouncement}.
* @function Window.announcement
* @param {string} message - The message text.
* @returns {Signal}
*/
void announcement(const QString& message);
/**jsdoc
* Triggered when the user closes a message box that was opened with {@link Window.openMessageBox|openMessageBox}.
* @function Window.messageBoxClosed
* @param {number} id - The ID of the message box that was closed.
* @param {number} button - The button that the user clicked. If the user presses Esc, the Cancel button value is returned,
* whether or not the Cancel button is displayed in the message box.
* @returns {Signal}
*/
void messageBoxClosed(int id, int button);
/**jsdoc
* Triggered when the user chooses a directory in a {@link Window.browseDirAsync|browseDirAsync} dialog.
* @function Window.browseDirChanged
* @param {string} directory - The directory the user chose in the dialog.
* @returns {Signal}
*/
void browseDirChanged(QString browseDir);
/**jsdoc
* Triggered when the user chooses an asset in a {@link Window.browseAssetsAsync|browseAssetsAsync} dialog.
* @function Window.assetsDirChanged
* @param {string} asset - The path and name of the asset the user chose in the dialog.
* @returns {Signal}
*/
void assetsDirChanged(QString assetsDir);
/**jsdoc
* Triggered when the user specifies a file in a {@link Window.saveAsync|saveAsync} dialog.
* @function Window.saveFileChanged
* @param {string} filename - The path and name of the file that the user specified in the dialog.
* @returns {Signal}
*/
void saveFileChanged(QString filename);
/**jsdoc
* Triggered when the user chooses a file in a {@link Window.browseAsync|browseAsync} dialog.
* @function Window.openFileChanged
* @param {string} filename - The path and name of the file the user chose in the dialog.
* @returns {Signal}
*/
void openFileChanged(QString filename);
/**jsdoc
* Triggered when the user OKs a {@link Window.promptAsync|promptAsync} dialog.
* @function Window.promptTextChanged
* @param {string} text - The text the user entered in the dialog.
* @returns {Signal}
*/
void promptTextChanged(QString text);
// triggered when window size or position changes
/**jsdoc
* Triggered when the position or size of the Interface window changes.
* @function Window.geometryChanged
* @param {Rect} geometry - The position and size of the drawable area of the Interface window.
* @returns {Signal}
* @example <caption>Report the position of size of the Interface window when it changes.</caption>
* function onWindowGeometryChanged(rect) {
* print("Window geometry: " + JSON.stringify(rect));
* }
*
* Window.geometryChanged.connect(onWindowGeometryChanged);
*/
void geometryChanged(QRect geometry);
private:

View file

@ -37,6 +37,16 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
SkeletonModel::~SkeletonModel() {
}
void SkeletonModel::setURL(const QUrl& url) {
_texturesLoaded = false;
Model::setURL(url);
}
void SkeletonModel::setTextures(const QVariantMap& textures) {
_texturesLoaded = false;
Model::setTextures(textures);
}
void SkeletonModel::initJointStates() {
const FBXGeometry& geometry = getFBXGeometry();
glm::mat4 modelOffset = glm::scale(_scale) * glm::translate(_offset);
@ -142,6 +152,13 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
Parent::simulate(deltaTime, fullUpdate);
}
// FIXME: This texture loading logic should probably live in Avatar, to mirror RenderableModelEntityItem and ModelOverlay,
// but Avatars don't get updates in the same way
if (!_texturesLoaded && getGeometry() && getGeometry()->areTexturesLoaded()) {
_texturesLoaded = true;
updateRenderItems();
}
if (!isActive() || !_owningAvatar->isMyAvatar()) {
return; // only simulate for own avatar
}

View file

@ -31,6 +31,9 @@ public:
SkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr);
~SkeletonModel();
Q_INVOKABLE void setURL(const QUrl& url) override;
Q_INVOKABLE void setTextures(const QVariantMap& textures) override;
void initJointStates() override;
void simulate(float deltaTime, bool fullUpdate = true) override;
@ -115,8 +118,6 @@ protected:
void computeBoundingShape();
protected:
bool getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
Avatar* _owningAvatar;
@ -128,6 +129,9 @@ protected:
glm::vec3 _defaultEyeModelPosition;
float _headClipDistance; // Near clip distance to use if no separate head model
private:
bool _texturesLoaded { false };
};
#endif // hifi_SkeletonModel_h

View file

@ -965,7 +965,10 @@ void EntityItem::setMass(float mass) {
void EntityItem::setHref(QString value) {
auto href = value.toLower();
if (! (value.toLower().startsWith("hifi://")) ) {
// If the string has something and doesn't start with with "hifi://" it shouldn't be set
// We allow the string to be empty, because that's the initial state of this property
if ( !(value.toLower().startsWith("hifi://")) && !value.isEmpty()) {
return;
}
withWriteLock([&] {

View file

@ -3,6 +3,7 @@
// libraries/midi/src
//
// Created by Burt Sloane
// Modified by Bruce Brown
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
@ -14,30 +15,45 @@
#include <QtCore/QLoggingCategory>
#if defined Q_OS_WIN32
#include "Windows.h"
#endif
#if defined Q_OS_WIN32
const int MIDI_BYTE_MASK = 0x0FF;
const int MIDI_NIBBLE_MASK = 0x00F;
const int MIDI_PITCH_BEND_MASK = 0x3F80;
const int MIDI_SHIFT_STATUS = 4;
const int MIDI_SHIFT_NOTE = 8;
const int MIDI_SHIFT_VELOCITY = 16;
const int MIDI_SHIFT_PITCH_BEND = 9;
// Status Decode
const int MIDI_NOTE_OFF = 0x8;
const int MIDI_NOTE_ON = 0x9;
const int MIDI_POLYPHONIC_KEY_PRESSURE = 0xa;
const int MIDI_PROGRAM_CHANGE = 0xc;
const int MIDI_CHANNEL_PRESSURE = 0xd;
const int MIDI_PITCH_BEND_CHANGE = 0xe;
const int MIDI_SYSTEM_MESSAGE = 0xf;
#endif
const int MIDI_STATUS_MASK = 0x0F0;
const int MIDI_NOTE_OFF = 0x080;
const int MIDI_NOTE_ON = 0x090;
const int MIDI_CONTROL_CHANGE = 0x0b0;
const int MIDI_CONTROL_CHANGE = 0xb;
const int MIDI_CHANNEL_MODE_ALL_NOTES_OFF = 0x07b;
static Midi* instance = NULL; // communicate this to non-class callbacks
static Midi* instance = NULL; // communicate this to non-class callbacks
static bool thruModeEnabled = false;
static bool broadcastEnabled = false;
static bool typeNoteOffEnabled = true;
static bool typeNoteOnEnabled = true;
static bool typePolyKeyPressureEnabled = false;
static bool typeControlChangeEnabled = true;
static bool typeProgramChangeEnabled = true;
static bool typeChanPressureEnabled = false;
static bool typePitchBendEnabled = true;
static bool typeSystemMessageEnabled = false;
std::vector<QString> Midi::midiinexclude;
std::vector<QString> Midi::midioutexclude;
std::vector<QString> Midi::midiInExclude;
std::vector<QString> Midi::midiOutExclude;
#if defined Q_OS_WIN32
@ -47,7 +63,6 @@ std::vector<QString> Midi::midioutexclude;
std::vector<HMIDIIN> midihin;
std::vector<HMIDIOUT> midihout;
void CALLBACK MidiInProc(HMIDIIN hMidiIn, UINT wMsg, DWORD_PTR dwInstance, DWORD_PTR dwParam1, DWORD_PTR dwParam2) {
switch (wMsg) {
case MIM_OPEN:
@ -58,23 +73,64 @@ void CALLBACK MidiInProc(HMIDIIN hMidiIn, UINT wMsg, DWORD_PTR dwInstance, DWORD
if (midihin[i] == hMidiIn) {
midihin[i] = NULL;
instance->allNotesOff();
instance->midiHardwareChange();
}
}
break;
case MIM_DATA: {
int status = MIDI_BYTE_MASK & dwParam1;
int note = MIDI_BYTE_MASK & (dwParam1 >> MIDI_SHIFT_NOTE);
int vel = MIDI_BYTE_MASK & (dwParam1 >> MIDI_SHIFT_VELOCITY);
if (thruModeEnabled) {
instance->sendNote(status, note, vel); // relay the note on to all other midi devices
int device = -1;
for (int i = 0; i < midihin.size(); i++) {
if (midihin[i] == hMidiIn) {
device = i;
}
}
instance->noteReceived(status, note, vel); // notify the javascript
int raw = dwParam1;
int channel = (MIDI_NIBBLE_MASK & dwParam1) + 1;
int status = MIDI_BYTE_MASK & dwParam1;
int type = MIDI_NIBBLE_MASK & (dwParam1 >> MIDI_SHIFT_STATUS);
int note = MIDI_BYTE_MASK & (dwParam1 >> MIDI_SHIFT_NOTE);
int velocity = MIDI_BYTE_MASK & (dwParam1 >> MIDI_SHIFT_VELOCITY);
int bend = 0;
int program = 0;
if (!typeNoteOffEnabled && type == MIDI_NOTE_OFF) {
return;
}
if (!typeNoteOnEnabled && type == MIDI_NOTE_ON) {
return;
}
if (!typePolyKeyPressureEnabled && type == MIDI_POLYPHONIC_KEY_PRESSURE) {
return;
}
if (!typeControlChangeEnabled && type == MIDI_CONTROL_CHANGE) {
return;
}
if (typeProgramChangeEnabled && type == MIDI_PROGRAM_CHANGE) {
program = note;
note = 0;
}
if (typeChanPressureEnabled && type == MIDI_CHANNEL_PRESSURE) {
velocity = note;
note = 0;
}
if (typePitchBendEnabled && type == MIDI_PITCH_BEND_CHANGE) {
bend = ((MIDI_BYTE_MASK & (dwParam1 >> MIDI_SHIFT_NOTE)) |
(MIDI_PITCH_BEND_MASK & (dwParam1 >> MIDI_SHIFT_PITCH_BEND))) - 8192;
channel = 0; // Weird values on different instruments
note = 0;
velocity = 0;
}
if (!typeSystemMessageEnabled && type == MIDI_SYSTEM_MESSAGE) {
return;
}
if (thruModeEnabled) {
instance->sendNote(status, note, velocity); // relay the message on to all other midi devices.
}
instance->midiReceived(device, raw, channel, status, type, note, velocity, bend, program); // notify the javascript
break;
}
}
}
void CALLBACK MidiOutProc(HMIDIOUT hmo, UINT wMsg, DWORD_PTR dwInstance, DWORD_PTR dwParam1, DWORD_PTR dwParam2) {
switch (wMsg) {
case MOM_OPEN:
@ -85,21 +141,45 @@ void CALLBACK MidiOutProc(HMIDIOUT hmo, UINT wMsg, DWORD_PTR dwInstance, DWORD_P
if (midihout[i] == hmo) {
midihout[i] = NULL;
instance->allNotesOff();
instance->midiHardwareChange();
}
}
break;
}
}
void Midi::sendNote(int status, int note, int vel) {
for (int i = 0; i < midihout.size(); i++) {
if (midihout[i] != NULL) {
midiOutShortMsg(midihout[i], status + (note << MIDI_SHIFT_NOTE) + (vel << MIDI_SHIFT_VELOCITY));
void Midi::sendRawMessage(int device, int raw) {
if (broadcastEnabled) {
for (int i = 0; i < midihout.size(); i++) {
if (midihout[i] != NULL) {
midiOutShortMsg(midihout[i], raw);
}
}
} else {
midiOutShortMsg(midihout[device], raw);
}
}
void Midi::sendMessage(int device, int channel, int type, int note, int velocity) {
int message = (channel - 1) | (type << MIDI_SHIFT_STATUS);
if (broadcastEnabled) {
for (int i = 0; i < midihout.size(); i++) {
if (midihout[i] != NULL) {
midiOutShortMsg(midihout[i], message | (note << MIDI_SHIFT_NOTE) | (velocity << MIDI_SHIFT_VELOCITY));
}
}
} else {
midiOutShortMsg(midihout[device], message | (note << MIDI_SHIFT_NOTE) | (velocity << MIDI_SHIFT_VELOCITY));
}
}
void Midi::sendNote(int status, int note, int velocity) {
for (int i = 0; i < midihout.size(); i++) {
if (midihout[i] != NULL) {
midiOutShortMsg(midihout[i], status + (note << MIDI_SHIFT_NOTE) + (velocity << MIDI_SHIFT_VELOCITY));
}
}
}
void Midi::MidiSetup() {
midihin.clear();
@ -110,8 +190,8 @@ void Midi::MidiSetup() {
midiInGetDevCaps(i, &incaps, sizeof(MIDIINCAPS));
bool found = false;
for (int j = 0; j < midiinexclude.size(); j++) {
if (midiinexclude[j].toStdString().compare(incaps.szPname) == 0) {
for (int j = 0; j < midiInExclude.size(); j++) {
if (midiInExclude[j].toStdString().compare(incaps.szPname) == 0) {
found = true;
break;
}
@ -122,7 +202,6 @@ void Midi::MidiSetup() {
midiInStart(tmphin);
midihin.push_back(tmphin);
}
}
MIDIOUTCAPS outcaps;
@ -130,8 +209,8 @@ void Midi::MidiSetup() {
midiOutGetDevCaps(i, &outcaps, sizeof(MIDIINCAPS));
bool found = false;
for (int j = 0; j < midioutexclude.size(); j++) {
if (midioutexclude[j].toStdString().compare(outcaps.szPname) == 0) {
for (int j = 0; j < midiOutExclude.size(); j++) {
if (midiOutExclude[j].toStdString().compare(outcaps.szPname) == 0) {
found = true;
break;
}
@ -164,7 +243,13 @@ void Midi::MidiCleanup() {
midihout.clear();
}
#else
void Midi::sendNote(int status, int note, int vel) {
void Midi::sendRawMessage(int device, int raw) {
}
void Midi::sendNote(int status, int note, int velocity) {
}
void Midi::sendMessage(int device, int channel, int type, int note, int velocity){
}
void Midi::MidiSetup() {
@ -176,26 +261,30 @@ void Midi::MidiCleanup() {
}
#endif
void Midi::noteReceived(int status, int note, int velocity) {
if (((status & MIDI_STATUS_MASK) != MIDI_NOTE_OFF) &&
((status & MIDI_STATUS_MASK) != MIDI_NOTE_ON) &&
((status & MIDI_STATUS_MASK) != MIDI_CONTROL_CHANGE)) {
return; // NOTE: only sending note-on, note-off, and control-change to Javascript
}
void Midi::midiReceived(int device, int raw, int channel, int status, int type, int note, int velocity, int bend, int program) {
QVariantMap eventData;
eventData["device"] = device;
eventData["raw"] = raw;
eventData["channel"] = channel;
eventData["status"] = status;
eventData["type"] = type;
eventData["note"] = note;
eventData["velocity"] = velocity;
emit midiNote(eventData);
eventData["bend"] = bend;
eventData["program"] = program;
emit midiNote(eventData);// Legacy
emit midiMessage(eventData);
}
void Midi::midiHardwareChange() {
emit midiReset();
}
//
Midi::Midi() {
instance = this;
#if defined Q_OS_WIN32
midioutexclude.push_back("Microsoft GS Wavetable Synth"); // we don't want to hear this thing
midiOutExclude.push_back("Microsoft GS Wavetable Synth"); // we don't want to hear this thing (Lags)
#endif
MidiSetup();
}
@ -203,10 +292,18 @@ Midi::Midi() {
Midi::~Midi() {
}
void Midi::sendRawDword(int device, int raw) {
sendRawMessage(device, raw);
}
void Midi::playMidiNote(int status, int note, int velocity) {
sendNote(status, note, velocity);
}
void Midi::sendMidiMessage(int device, int channel, int type, int note, int velocity) {
sendMessage(device, channel, type, note, velocity);
}
void Midi::allNotesOff() {
sendNote(MIDI_CONTROL_CHANGE, MIDI_CHANNEL_MODE_ALL_NOTES_OFF, 0); // all notes off
}
@ -219,6 +316,7 @@ void Midi::resetDevices() {
void Midi::USBchanged() {
instance->MidiCleanup();
instance->MidiSetup();
instance->midiHardwareChange();
}
//
@ -245,16 +343,16 @@ QStringList Midi::listMidiDevices(bool output) {
void Midi::unblockMidiDevice(QString name, bool output) {
if (output) {
for (unsigned long i = 0; i < midioutexclude.size(); i++) {
if (midioutexclude[i].toStdString().compare(name.toStdString()) == 0) {
midioutexclude.erase(midioutexclude.begin() + i);
for (unsigned long i = 0; i < midiOutExclude.size(); i++) {
if (midiOutExclude[i].toStdString().compare(name.toStdString()) == 0) {
midiOutExclude.erase(midiOutExclude.begin() + i);
break;
}
}
} else {
for (unsigned long i = 0; i < midiinexclude.size(); i++) {
if (midiinexclude[i].toStdString().compare(name.toStdString()) == 0) {
midiinexclude.erase(midiinexclude.begin() + i);
for (unsigned long i = 0; i < midiInExclude.size(); i++) {
if (midiInExclude[i].toStdString().compare(name.toStdString()) == 0) {
midiInExclude.erase(midiInExclude.begin() + i);
break;
}
}
@ -264,9 +362,9 @@ void Midi::unblockMidiDevice(QString name, bool output) {
void Midi::blockMidiDevice(QString name, bool output) {
unblockMidiDevice(name, output); // make sure it's only in there once
if (output) {
midioutexclude.push_back(name);
midiOutExclude.push_back(name);
} else {
midiinexclude.push_back(name);
midiInExclude.push_back(name);
}
}
@ -274,3 +372,38 @@ void Midi::thruModeEnable(bool enable) {
thruModeEnabled = enable;
}
void Midi::broadcastEnable(bool enable) {
broadcastEnabled = enable;
}
void Midi::typeNoteOffEnable(bool enable) {
typeNoteOffEnabled = enable;
}
void Midi::typeNoteOnEnable(bool enable) {
typeNoteOnEnabled = enable;
}
void Midi::typePolyKeyPressureEnable(bool enable) {
typePolyKeyPressureEnabled = enable;
}
void Midi::typeControlChangeEnable(bool enable) {
typeControlChangeEnabled = enable;
}
void Midi::typeProgramChangeEnable(bool enable) {
typeProgramChangeEnabled = enable;
}
void Midi::typeChanPressureEnable(bool enable) {
typeChanPressureEnabled = enable;
}
void Midi::typePitchBendEnable(bool enable) {
typePitchBendEnabled = enable;
}
void Midi::typeSystemMessageEnable(bool enable) {
typeSystemMessageEnabled = enable;
}

View file

@ -3,6 +3,7 @@
// libraries/midi/src
//
// Created by Burt Sloane
// Modified by Bruce Brown
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
@ -24,13 +25,16 @@ class Midi : public QObject, public Dependency {
SINGLETON_DEPENDENCY
public:
void noteReceived(int status, int note, int velocity); // relay a note to Javascript
void sendNote(int status, int note, int vel); // relay a note to MIDI outputs
void midiReceived(int device, int raw, int channel, int status, int type, int note, int velocity, int bend, int program); // relay a note to Javascript
void midiHardwareChange(); // relay hardware change to Javascript
void sendRawMessage(int device, int raw); // relay midi message to MIDI outputs
void sendNote(int status, int note, int velocity); // relay a note to MIDI outputs
void sendMessage(int device, int channel, int type, int note, int velocity); // relay a message to MIDI outputs
static void USBchanged();
private:
static std::vector<QString> midiinexclude;
static std::vector<QString> midioutexclude;
static std::vector<QString> midiInExclude;
static std::vector<QString> midiOutExclude;
private:
void MidiSetup();
@ -38,31 +42,63 @@ private:
signals:
void midiNote(QVariantMap eventData);
void midiMessage(QVariantMap eventData);
void midiReset();
public slots:
/// play a note on all connected devices
/// @param {int} status: 0x80 is noteoff, 0x90 is noteon (if velocity=0, noteoff), etc
/// @param {int} note: midi note number
/// @param {int} velocity: note velocity (0 means noteoff)
Q_INVOKABLE void playMidiNote(int status, int note, int velocity);
public slots:
// Send Raw Midi Packet to all connected devices
Q_INVOKABLE void sendRawDword(int device, int raw);
/// Send Raw Midi message to selected device
/// @param {int} device: device number
/// @param {int} raw: raw midi message (DWORD)
/// turn off all notes on all connected devices
Q_INVOKABLE void allNotesOff();
// Send Midi Message to all connected devices
Q_INVOKABLE void sendMidiMessage(int device, int channel, int type, int note, int velocity);
/// Send midi message to selected device/devices
/// @param {int} device: device number
/// @param {int} channel: channel number
/// @param {int} type: 0x8 is noteoff, 0x9 is noteon (if velocity=0, noteoff), etc
/// @param {int} note: midi note number
/// @param {int} velocity: note velocity (0 means noteoff)
/// clean up and re-discover attached devices
Q_INVOKABLE void resetDevices();
// Send Midi Message to all connected devices
Q_INVOKABLE void playMidiNote(int status, int note, int velocity);
/// play a note on all connected devices
/// @param {int} status: 0x80 is noteoff, 0x90 is noteon (if velocity=0, noteoff), etc
/// @param {int} note: midi note number
/// @param {int} velocity: note velocity (0 means noteoff)
/// ask for a list of inputs/outputs
Q_INVOKABLE QStringList listMidiDevices(bool output);
/// turn off all notes on all connected devices
Q_INVOKABLE void allNotesOff();
/// block an input/output by name
Q_INVOKABLE void blockMidiDevice(QString name, bool output);
/// clean up and re-discover attached devices
Q_INVOKABLE void resetDevices();
/// unblock an input/output by name
Q_INVOKABLE void unblockMidiDevice(QString name, bool output);
/// ask for a list of inputs/outputs
Q_INVOKABLE QStringList listMidiDevices(bool output);
/// block an input/output by name
Q_INVOKABLE void blockMidiDevice(QString name, bool output);
/// unblock an input/output by name
Q_INVOKABLE void unblockMidiDevice(QString name, bool output);
/// repeat all incoming notes to all outputs (default disabled)
Q_INVOKABLE void thruModeEnable(bool enable);
/// broadcast on all unblocked devices
Q_INVOKABLE void broadcastEnable(bool enable);
/// filter by event types
Q_INVOKABLE void typeNoteOffEnable(bool enable);
Q_INVOKABLE void typeNoteOnEnable(bool enable);
Q_INVOKABLE void typePolyKeyPressureEnable(bool enable);
Q_INVOKABLE void typeControlChangeEnable(bool enable);
Q_INVOKABLE void typeProgramChangeEnable(bool enable);
Q_INVOKABLE void typeChanPressureEnable(bool enable);
Q_INVOKABLE void typePitchBendEnable(bool enable);
Q_INVOKABLE void typeSystemMessageEnable(bool enable);
/// repeat all incoming notes to all outputs (default disabled)
Q_INVOKABLE void thruModeEnable(bool enable);
public:
Midi();

View file

@ -84,6 +84,46 @@ public:
void softReset();
/**jsdoc
* <p>The reasons that you may be refused connection to a domain are defined by numeric values:</p>
* <table>
* <thead>
* <tr>
* <th>Reason</th>
* <th>Value</th>
* <th>Description</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td><strong>Unknown</strong></td>
* <td><code>0</code></td>
* <td>Some unknown reason.</td>
* </tr>
* <tr>
* <td><strong>ProtocolMismatch</strong></td>
* <td><code>1</code></td>
* <td>The communications protocols of the domain and your Interface are not the same.</td>
* </tr>
* <tr>
* <td><strong>LoginError</strong></td>
* <td><code>2</code></td>
* <td>You could not be logged into the domain.</td>
* </tr>
* <tr>
* <td><strong>NotAuthorized</strong></td>
* <td><code>3</code></td>
* <td>You are not authorized to connect to the domain.</td>
* </tr>
* <tr>
* <td><strong>TooManyUsers</strong></td>
* <td><code>4</code></td>
* <td>The domain already has its maximum number of users.</td>
* </tr>
* </tbody>
* </table>
* @typedef Window.ConnectionRefusedReason
*/
enum class ConnectionRefusedReason : uint8_t {
Unknown,
ProtocolMismatch,

View file

@ -46,10 +46,7 @@ void CauterizedMeshPartPayload::bindTransform(gpu::Batch& batch, const render::S
}
batch.setModelTransform(_cauterizedTransform);
} else {
if (_clusterBuffer) {
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, _clusterBuffer);
}
batch.setModelTransform(_transform);
ModelMeshPartPayload::bindTransform(batch, locations, renderMode);
}
}

View file

@ -480,23 +480,14 @@ ShapeKey ModelMeshPartPayload::getShapeKey() const {
}
void ModelMeshPartPayload::bindMesh(gpu::Batch& batch) {
if (!_isBlendShaped) {
batch.setIndexBuffer(gpu::UINT32, (_drawMesh->getIndexBuffer()._buffer), 0);
batch.setInputFormat((_drawMesh->getVertexFormat()));
batch.setInputStream(0, _drawMesh->getVertexStream());
batch.setIndexBuffer(gpu::UINT32, (_drawMesh->getIndexBuffer()._buffer), 0);
batch.setInputFormat((_drawMesh->getVertexFormat()));
if (_isBlendShaped && _blendedVertexBuffer) {
batch.setInputBuffer(0, _blendedVertexBuffer, 0, sizeof(glm::vec3));
batch.setInputBuffer(1, _blendedVertexBuffer, _drawMesh->getNumVertices() * sizeof(glm::vec3), sizeof(glm::vec3));
batch.setInputStream(2, _drawMesh->getVertexStream().makeRangedStream(2));
} else {
batch.setIndexBuffer(gpu::UINT32, (_drawMesh->getIndexBuffer()._buffer), 0);
batch.setInputFormat((_drawMesh->getVertexFormat()));
if (_blendedVertexBuffer) {
batch.setInputBuffer(0, _blendedVertexBuffer, 0, sizeof(glm::vec3));
batch.setInputBuffer(1, _blendedVertexBuffer, _drawMesh->getNumVertices() * sizeof(glm::vec3), sizeof(glm::vec3));
batch.setInputStream(2, _drawMesh->getVertexStream().makeRangedStream(2));
} else {
batch.setIndexBuffer(gpu::UINT32, (_drawMesh->getIndexBuffer()._buffer), 0);
batch.setInputFormat((_drawMesh->getVertexFormat()));
batch.setInputStream(0, _drawMesh->getVertexStream());
}
batch.setInputStream(0, _drawMesh->getVertexStream());
}
}

View file

@ -78,7 +78,7 @@ public:
/// Sets the URL of the model to render.
// Should only be called from the model's rendering thread to avoid access violations of changed geometry.
Q_INVOKABLE void setURL(const QUrl& url);
Q_INVOKABLE virtual void setURL(const QUrl& url);
const QUrl& getURL() const { return _url; }
// new Scene/Engine rendering support
@ -136,7 +136,7 @@ public:
const Geometry::Pointer& getCollisionGeometry() const { return _collisionGeometry; }
const QVariantMap getTextures() const { assert(isLoaded()); return _renderGeometry->getTextures(); }
Q_INVOKABLE void setTextures(const QVariantMap& textures);
Q_INVOKABLE virtual void setTextures(const QVariantMap& textures);
/// Provided as a convenience, will crash if !isLoaded()
// And so that getGeometry() isn't chained everywhere

View file

@ -262,7 +262,7 @@ controller::Pose ovrControllerPoseToHandPose(
pose.translation = toGlm(handPose.ThePose.Position);
pose.translation += rotation * translationOffset;
pose.rotation = rotation * rotationOffset;
pose.angularVelocity = toGlm(handPose.AngularVelocity);
pose.angularVelocity = rotation * toGlm(handPose.AngularVelocity);
pose.velocity = toGlm(handPose.LinearVelocity);
pose.valid = true;
return pose;

View file

@ -29,7 +29,8 @@ var DEFAULT_SCRIPTS_COMBINED = [
"system/notifications.js",
"system/dialTone.js",
"system/firstPersonHMD.js",
"system/tablet-ui/tabletUI.js"
"system/tablet-ui/tabletUI.js",
"system/emote.js"
];
var DEFAULT_SCRIPTS_SEPARATE = [
"system/controllers/controllerScripts.js"

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -148,8 +148,8 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
this.setIgnorePointerItems = function() {
if (HMD.tabletID !== this.tabletID) {
this.tabletID = HMD.tabletID;
Pointers.setIgnoreItems(_this.leftPointer, _this.blacklist);
Pointers.setIgnoreItems(_this.rightPointer, _this.blacklist);
Pointers.setIgnoreItems(_this.leftPointer, _this.blacklist.concat([HMD.tabletID]));
Pointers.setIgnoreItems(_this.rightPointer, _this.blacklist.concat([HMD.tabletID]));
}
};
@ -378,8 +378,8 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
};
this.setBlacklist = function() {
RayPick.setIgnoreItems(_this.leftControllerRayPick, this.blacklist);
RayPick.setIgnoreItems(_this.rightControllerRayPick, this.blacklist);
RayPick.setIgnoreItems(_this.leftControllerRayPick, this.blacklist.concat([HMD.tabletID]));
RayPick.setIgnoreItems(_this.rightControllerRayPick, this.blacklist.concat([HMD.tabletID]));
};
var MAPPING_NAME = "com.highfidelity.controllerDispatcher";

View file

@ -447,7 +447,9 @@ Script.include("/~/system/libraries/Xform.js");
this.targetObject = new TargetObject(entityID, targetProps);
this.targetObject.parentProps = getEntityParents(targetProps);
Script.clearTimeout(this.contextOverlayTimer);
if (this.contextOverlayTimer) {
Script.clearTimeout(this.contextOverlayTimer);
}
this.contextOverlayTimer = false;
if (entityID !== this.entityWithContextOverlay) {
this.destroyContextOverlay();

View file

@ -0,0 +1,739 @@
//
// scripts/system/libraries/handTouch.js
//
// Created by Luis Cuenca on 12/29/17
// Copyright 2017 High Fidelity, Inc.
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/* jslint bitwise: true */
/* global Script, Overlays, Controller, Vec3, MyAvatar, Entities
*/
(function(){
var updateFingerWithIndex = 0;
// Keys to access finger data
var fingerKeys = ["pinky", "ring", "middle", "index", "thumb"];
// Additionally close the hands to achieve a grabbing effect
var grabPercent = { left: 0,
right: 0 };
// var isGrabbing = false;
var Palm = function() {
this.position = {x:0, y:0, z:0};
this.perpendicular = {x:0, y:0, z:0};
this.distance = 0;
this.fingers = {
pinky: {x:0, y:0, z:0},
middle: {x:0, y:0, z:0},
ring: {x:0, y:0, z:0},
thumb: {x:0, y:0, z:0},
index: {x:0, y:0, z:0}
};
this.set = false;
};
var palmData = {
left: new Palm(),
right: new Palm()
};
var handJointNames = {left: "LeftHand", right: "RightHand"};
// Store which fingers are touching - if all false restate the default poses
var isTouching = {
left: {
pinky: false,
middle: false,
ring: false,
thumb: false,
index: false
}, right: {
pinky: false,
middle: false,
ring: false,
thumb: false,
index: false
}
};
// frame count for transition to default pose
var countToDefault = {
left: 0,
right: 0
};
// joint data for opened pose
var dataOpen = {
left: {
pinky:[{x: -0.0066, y:-0.0224, z:-0.2174, w:0.9758},{x: 0.0112, y:0.0001, z:0.0093, w:0.9999},{x: -0.0346, y:0.0003, z:-0.0073, w:0.9994}],
ring:[{x: -0.0029, y:-0.0094, z:-0.1413, w:0.9899},{x: 0.0112, y:0.0001, z:0.0059, w:0.9999},{x: -0.0346, y:0.0002, z:-0.006, w:0.9994}],
middle:[{x: -0.0016, y:0, z:-0.0286, w:0.9996},{x: 0.0112, y:-0.0001, z:-0.0063, w:0.9999},{x: -0.0346, y:-0.0003, z:0.0073, w:0.9994}],
index:[{x: -0.0016, y:0.0001, z:0.0199, w:0.9998},{x: 0.0112, y:0, z:0.0081, w:0.9999},{x: -0.0346, y:0.0008, z:-0.023, w:0.9991}],
thumb:[{x: 0.0354, y:0.0363, z:0.3275, w:0.9435},{x: -0.0945, y:0.0938, z:0.0995, w:0.9861},{x: -0.0952, y:0.0718, z:0.1382, w:0.9832}]
}, right: {
pinky:[{x: -0.0034, y:0.023, z:0.1051, w:0.9942},{x: 0.0106, y:-0.0001, z:-0.0091, w:0.9999},{x: -0.0346, y:-0.0003, z:0.0075, w:0.9994}],
ring:[{x: -0.0013, y:0.0097, z:0.0311, w:0.9995},{x: 0.0106, y:-0.0001, z:-0.0056, w:0.9999},{x: -0.0346, y:-0.0002, z:0.0061, w:0.9994}],
middle:[{x: -0.001, y:0, z:0.0285, w:0.9996},{x: 0.0106, y:0.0001, z:0.0062, w:0.9999},{x: -0.0346, y:0.0003, z:-0.0074, w:0.9994}],
index:[{x: -0.001, y:0, z:-0.0199, w:0.9998},{x: 0.0106, y:-0.0001, z:-0.0079, w:0.9999},{x: -0.0346, y:-0.0008, z:0.0229, w:0.9991}],
thumb:[{x: 0.0355, y:-0.0363, z:-0.3263, w:0.9439},{x: -0.0946, y:-0.0938, z:-0.0996, w:0.9861},{x: -0.0952, y:-0.0719, z:-0.1376, w:0.9833}]
}
};
var dataClose = {
left: {
pinky:[{x: 0.5878, y:-0.1735, z:-0.1123, w:0.7821},{x: 0.5704, y:0.0053, z:0.0076, w:0.8213},{x: 0.6069, y:-0.0044, z:-0.0058, w:0.7947}],
ring:[{x: 0.5761, y:-0.0989, z:-0.1025, w:0.8048},{x: 0.5332, y:0.0032, z:0.005, w:0.846},{x: 0.5773, y:-0.0035, z:-0.0049, w:0.8165}],
middle:[{x: 0.543, y:-0.0469, z:-0.0333, w:0.8378},{x: 0.5419, y:-0.0034, z:-0.0053, w:0.8404},{x: 0.5015, y:0.0037, z:0.0063, w:0.8651}],
index:[{x: 0.3051, y:-0.0156, z:-0.014, w:0.9521},{x: 0.6414, y:0.0051, z:0.0063, w:0.7671},{x: 0.5646, y:-0.013, z:-0.019, w:0.8251}],
thumb:[{x: 0.313, y:-0.0348, z:0.3192, w:0.8938},{x: 0, y:0, z:-0.37, w:0.929},{x: 0, y:0, z:-0.2604, w:0.9655}]
}, right: {
pinky:[{x: 0.5881, y:0.1728, z:0.1114, w:0.7823},{x: 0.5704, y:-0.0052, z:-0.0075, w:0.8213},{x: 0.6069, y:0.0046, z:0.006, w:0.7947}],
ring:[{x: 0.5729, y:0.1181, z:0.0898, w:0.8061},{x: 0.5332, y:-0.003, z:-0.0048, w:0.846},{x: 0.5773, y:0.0035, z:0.005, w:0.8165}],
middle:[{x: 0.543, y:0.0468, z:0.0332, w:0.8378},{x: 0.5419, y:0.0034, z:0.0052, w:0.8404},{x: 0.5047, y:-0.0037, z:-0.0064, w:0.8632}],
index:[{x: 0.306, y:-0.0076, z:-0.0584, w:0.9502},{x: 0.6409, y:-0.005, z:-0.006, w:0.7675},{x: 0.5646, y:0.0129, z:0.0189, w:0.8251}],
thumb:[{x: 0.313, y:0.0352, z:-0.3181, w:0.8942},{x: 0, y:0, z:0.3698, w:0.9291},{x: 0, y:0, z:0.2609, w:0.9654}]
}
};
// snapshot for the default pose
var dataDefault = {
left:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
set: false
},
right:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
set: false
}
};
// joint data for the current frame
var dataCurrent = {
left:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}]
},
right:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}]
}
};
// interpolated values on joint data to smooth movement
var dataDelta = {
left:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}]
},
right:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}]
}
};
// Acquire an updated value per hand every 5 frames when finger is touching (faster in)
var touchAnimationSteps = 5;
// Acquire an updated value per hand every 10 frames when finger is returning to default position (slower out)
var defaultAnimationSteps = 10;
// Debugging info
var showSphere = false;
var showLines = false;
// This get setup on creation
var linesCreated = false;
var sphereCreated = false;
// Register object with API Debugger
var varsToDebug = {
scriptLoaded: false,
toggleDebugSphere: function(){
showSphere = !showSphere;
if (showSphere && !sphereCreated) {
createDebugSphere();
sphereCreated = true;
}
},
toggleDebugLines: function(){
showLines = !showLines;
if (showLines && !linesCreated) {
createDebugLines();
linesCreated = true;
}
},
fingerPercent: {
left: {
pinky: 0.38,
middle: 0.38,
ring: 0.38,
thumb: 0.38,
index: 0.38
} ,
right: {
pinky: 0.38,
middle: 0.38,
ring: 0.38,
thumb: 0.38,
index: 0.38
}
},
triggerValues: {
leftTriggerValue: 0,
leftTriggerClicked: 0,
rightTriggerValue: 0,
rightTriggerClicked: 0,
leftSecondaryValue: 0,
rightSecondaryValue: 0
},
palmData: {
left: new Palm(),
right: new Palm()
},
offset: {x:0, y:0, z:0},
avatarLoaded: false
};
// Add/Subtract the joint data - per finger joint
function addVals(val1, val2, sign) {
var val = [];
if (val1.length != val2.length) return;
for (var i = 0; i < val1.length; i++) {
val.push({x: 0, y: 0, z: 0, w: 0});
val[i].x = val1[i].x + sign*val2[i].x;
val[i].y = val1[i].y + sign*val2[i].y;
val[i].z = val1[i].z + sign*val2[i].z;
val[i].w = val1[i].w + sign*val2[i].w;
}
return val;
}
// Multiply/Divide the joint data - per finger joint
function multiplyValsBy(val1, num) {
var val = [];
for (var i = 0; i < val1.length; i++) {
val.push({x: 0, y: 0, z: 0, w: 0});
val[i].x = val1[i].x * num;
val[i].y = val1[i].y * num;
val[i].z = val1[i].z * num;
val[i].w = val1[i].w * num;
}
return val;
}
// Calculate the finger lengths by adding its joint lengths
function getJointDistances(jointNamesArray) {
var result = {distances: [], totalDistance: 0};
for (var i = 1; i < jointNamesArray.length; i++) {
var index0 = MyAvatar.getJointIndex(jointNamesArray[i-1]);
var index1 = MyAvatar.getJointIndex(jointNamesArray[i]);
var pos0 = MyAvatar.getJointPosition(index0);
var pos1 = MyAvatar.getJointPosition(index1);
var distance = Vec3.distance(pos0, pos1);
result.distances.push(distance);
result.totalDistance += distance;
}
return result;
}
function dataRelativeToWorld(side, dataIn, dataOut) {
var handJoint = handJointNames[side];
var jointIndex = MyAvatar.getJointIndex(handJoint);
var worldPosHand = MyAvatar.jointToWorldPoint({x:0, y:0, z:0}, jointIndex);
dataOut.position = MyAvatar.jointToWorldPoint(dataIn.position, jointIndex);
// dataOut.perpendicular = Vec3.subtract(MyAvatar.jointToWorldPoint(dataIn.perpendicular, jointIndex), worldPosHand);
var localPerpendicular = side == "right" ? {x:0.2, y:0, z:1} : {x:-0.2, y:0, z:1};
dataOut.perpendicular = Vec3.normalize(Vec3.subtract(MyAvatar.jointToWorldPoint(localPerpendicular, jointIndex), worldPosHand));
dataOut.distance = dataIn.distance;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
dataOut.fingers[finger] = MyAvatar.jointToWorldPoint(dataIn.fingers[finger], jointIndex);
}
}
function dataRelativeToHandJoint(side, dataIn, dataOut) {
var handJoint = handJointNames[side];
var jointIndex = MyAvatar.getJointIndex(handJoint);
var worldPosHand = MyAvatar.jointToWorldPoint({x:0, y:0, z:0}, jointIndex);
dataOut.position = MyAvatar.worldToJointPoint(dataIn.position, jointIndex);
dataOut.perpendicular = MyAvatar.worldToJointPoint(Vec3.sum(worldPosHand, dataIn.perpendicular), jointIndex);
dataOut.distance = dataIn.distance;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
dataOut.fingers[finger] = MyAvatar.worldToJointPoint(dataIn.fingers[finger], jointIndex);
}
}
// Calculate the sphere that look up for entities, the center of the palm, perpendicular vector from the palm plane and origin of the the finger rays
function estimatePalmData(side) {
// Return data object
var data = new Palm();
var jointOffset = { x: 0, y: 0, z: 0 };
var upperSide = side[0].toUpperCase() + side.substring(1);
var jointIndexHand = MyAvatar.getJointIndex(upperSide + "Hand");
// Store position of the hand joint
var worldPosHand = MyAvatar.jointToWorldPoint(jointOffset, jointIndexHand);
var minusWorldPosHand = {x:-worldPosHand.x, y:-worldPosHand.y, z:-worldPosHand.z};
// Data for finger rays
var directions = {pinky: undefined, middle: undefined, ring: undefined, thumb: undefined, index: undefined};
var positions = {pinky: undefined, middle: undefined, ring: undefined, thumb: undefined, index: undefined};
var thumbLength = 0;
var weightCount = 0;
// Calculate palm center
var handJointWeight = 1;
var fingerJointWeight = 2;
var palmCenter = {x:0, y:0, z:0};
palmCenter = Vec3.sum(worldPosHand, palmCenter);
weightCount += handJointWeight;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
var jointSuffixes = 4; // Get 4 joint names with suffix numbers (0, 1, 2, 3)
var jointNames = getJointNames(side, finger, jointSuffixes);
var fingerLength = getJointDistances(jointNames).totalDistance;
var jointIndex = MyAvatar.getJointIndex(jointNames[0]);
positions[finger] = MyAvatar.jointToWorldPoint(jointOffset, jointIndex);
directions[finger] = Vec3.normalize(Vec3.sum(positions[finger], minusWorldPosHand));
data.fingers[finger] = Vec3.sum(positions[finger], Vec3.multiply(fingerLength, directions[finger]));
if (finger != "thumb") {
// finger joints have double the weight than the hand joint
// This would better position the palm estimation
palmCenter = Vec3.sum(Vec3.multiply(fingerJointWeight, positions[finger]), palmCenter);
weightCount += fingerJointWeight;
} else {
thumbLength = fingerLength;
}
}
// perpendicular change direction depending on the side
data.perpendicular = (side == "right") ?
Vec3.normalize(Vec3.cross(directions.index, directions.pinky)):
Vec3.normalize(Vec3.cross(directions.pinky, directions.index));
data.position = Vec3.multiply(1.0/weightCount, palmCenter);
if (side == "right") varsToDebug.offset = MyAvatar.worldToJointPoint(worldPosHand, jointIndexHand);
var palmDistanceMultiplier = 1.55; // 1.55 based on test/error for the sphere radius that best fits the hand
data.distance = palmDistanceMultiplier*Vec3.distance(data.position, positions.index);
// move back thumb ray origin
var thumbBackMultiplier = 0.2;
data.fingers.thumb = Vec3.sum(data.fingers.thumb, Vec3.multiply( -thumbBackMultiplier * thumbLength, data.perpendicular));
//return getDataRelativeToHandJoint(side, data);
dataRelativeToHandJoint(side, data, palmData[side]);
palmData[side].set = true;
// return palmData[side];
}
// Register GlobalDebugger for API Debugger
Script.registerValue("GlobalDebugger", varsToDebug);
// store the rays for the fingers - only for debug purposes
var fingerRays = {
left:{
pinky: undefined,
middle: undefined,
ring: undefined,
thumb: undefined,
index: undefined
},
right:{
pinky: undefined,
middle: undefined,
ring: undefined,
thumb: undefined,
index: undefined
}
};
// Create debug overlays - finger rays + palm rays + spheres
var palmRay, sphereHand;
function createDebugLines() {
for (var i = 0; i < fingerKeys.length; i++) {
fingerRays.left[fingerKeys[i]] = Overlays.addOverlay("line3d", {
color: { red: 0, green: 0, blue: 255 },
start: { x:0, y:0, z:0 },
end: { x:0, y:1, z:0 },
visible: showLines
});
fingerRays.right[fingerKeys[i]] = Overlays.addOverlay("line3d", {
color: { red: 0, green: 0, blue: 255 },
start: { x:0, y:0, z:0 },
end: { x:0, y:1, z:0 },
visible: showLines
});
}
palmRay = {
left: Overlays.addOverlay("line3d", {
color: { red: 255, green: 0, blue: 0 },
start: { x:0, y:0, z:0 },
end: { x:0, y:1, z:0 },
visible: showLines
}),
right: Overlays.addOverlay("line3d", {
color: { red: 255, green: 0, blue: 0 },
start: { x:0, y:0, z:0 },
end: { x:0, y:1, z:0 },
visible: showLines
})
};
linesCreated = true;
}
function createDebugSphere() {
sphereHand = {
right: Overlays.addOverlay("sphere", {
position: MyAvatar.position,
color: { red: 0, green: 255, blue: 0 },
scale: { x: 0.01, y: 0.01, z: 0.01 },
visible: showSphere
}),
left: Overlays.addOverlay("sphere", {
position: MyAvatar.position,
color: { red: 0, green: 255, blue: 0 },
scale: { x: 0.01, y: 0.01, z: 0.01 },
visible: showSphere
})
};
sphereCreated = true;
}
function acquireDefaultPose(side) {
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
var jointSuffixes = 3; // We need rotation of the 0, 1 and 2 joints
var names = getJointNames(side, finger, jointSuffixes);
for (var j = 0; j < names.length; j++) {
var index = MyAvatar.getJointIndex(names[j]);
var rotation = MyAvatar.getJointRotation(index);
dataDefault[side][finger][j] = dataCurrent[side][finger][j] = rotation;
}
}
dataDefault[side].set = true;
}
function updateSphereHand(side) {
var data = new Palm();
dataRelativeToWorld(side, palmData[side], data);
varsToDebug.palmData[side] = palmData[side];
var palmPoint = data.position;
var LOOKUP_DISTANCE_MULTIPLIER = 1.5;
var dist = LOOKUP_DISTANCE_MULTIPLIER*data.distance;
// Situate the debugging overlays
var checkOffset = { x: data.perpendicular.x * dist,
y: data.perpendicular.y * dist,
z: data.perpendicular.z * dist };
var spherePos = Vec3.sum(palmPoint, checkOffset);
var checkPoint = Vec3.sum(palmPoint, Vec3.multiply(2, checkOffset));
if (showLines) {
Overlays.editOverlay(palmRay[side], {
start: palmPoint,
end: checkPoint,
visible: showLines
});
for (var i = 0; i < fingerKeys.length; i++) {
Overlays.editOverlay(fingerRays[side][fingerKeys[i]], {
start: data.fingers[fingerKeys[i]],
end: checkPoint,
visible: showLines
});
}
}
if (showSphere) {
Overlays.editOverlay(sphereHand[side], {
position: spherePos,
scale: {
x: 2*dist,
y: 2*dist,
z: 2*dist
},
visible: showSphere
});
}
// Update the intersection of only one finger at a time
var finger = fingerKeys[updateFingerWithIndex];
var grabbables = Entities.findEntities(spherePos, dist);
var newFingerData = dataDefault[side][finger];
var animationSteps = defaultAnimationSteps;
if (grabbables.length > 0) {
var origin = data.fingers[finger];
var direction = Vec3.normalize(Vec3.subtract(checkPoint, origin));
var intersection = Entities.findRayIntersection({origin: origin, direction: direction}, true, grabbables, [], true, false);
var percent = 0; // Initialize
var isAbleToGrab = intersection.intersects && intersection.distance < LOOKUP_DISTANCE_MULTIPLIER*dist;
if (isAbleToGrab && !getTouching(side)) {
acquireDefaultPose(side); // take a snapshot of the default pose before touch starts
newFingerData = dataDefault[side][finger]; // assign default pose to finger data
}
// Store if this finger is touching something
isTouching[side][finger] = isAbleToGrab;
if (isAbleToGrab) {
// update the open/close percentage for this finger
var FINGER_REACT_MULTIPLIER = 2.8;
percent = intersection.distance/(FINGER_REACT_MULTIPLIER*dist);
var THUMB_FACTOR = 0.2;
var FINGER_FACTOR = 0.05;
var grabMultiplier = finger === "thumb" ? THUMB_FACTOR : FINGER_FACTOR; // Amount of grab coefficient added to the fingers - thumb is higher
percent += grabMultiplier * grabPercent[side];
// Calculate new interpolation data
var totalDistance = addVals(dataClose[side][finger], dataOpen[side][finger], -1);
newFingerData = addVals(dataOpen[side][finger], multiplyValsBy(totalDistance, percent), 1); // assign close/open ratio to finger to simulate touch
animationSteps = touchAnimationSteps;
}
varsToDebug.fingerPercent[side][finger] = percent;
}
// Calculate animation increments
dataDelta[side][finger] = multiplyValsBy(addVals(newFingerData, dataCurrent[side][finger], -1), 1.0/animationSteps);
}
// Recreate the finger joint names
function getJointNames(side, finger, count) {
var names = [];
for (var i = 1; i < count+1; i++) {
var name = side[0].toUpperCase()+side.substring(1)+"Hand"+finger[0].toUpperCase()+finger.substring(1)+(i);
names.push(name);
}
return names;
}
// Capture the controller values
var leftTriggerPress = function (value) {
varsToDebug.triggerValues.leftTriggerValue = value;
// the value for the trigger increments the hand-close percentage
grabPercent.left = value;
};
var leftTriggerClick = function (value) {
varsToDebug.triggerValues.leftTriggerClicked = value;
};
var rightTriggerPress = function (value) {
varsToDebug.triggerValues.rightTriggerValue = value;
// the value for the trigger increments the hand-close percentage
grabPercent.right = value;
};
var rightTriggerClick = function (value) {
varsToDebug.triggerValues.rightTriggerClicked = value;
};
var leftSecondaryPress = function (value) {
varsToDebug.triggerValues.leftSecondaryValue = value;
};
var rightSecondaryPress = function (value) {
varsToDebug.triggerValues.rightSecondaryValue = value;
};
var MAPPING_NAME = "com.highfidelity.handTouch";
var mapping = Controller.newMapping(MAPPING_NAME);
mapping.from([Controller.Standard.RT]).peek().to(rightTriggerPress);
mapping.from([Controller.Standard.RTClick]).peek().to(rightTriggerClick);
mapping.from([Controller.Standard.LT]).peek().to(leftTriggerPress);
mapping.from([Controller.Standard.LTClick]).peek().to(leftTriggerClick);
mapping.from([Controller.Standard.RB]).peek().to(rightSecondaryPress);
mapping.from([Controller.Standard.LB]).peek().to(leftSecondaryPress);
mapping.from([Controller.Standard.LeftGrip]).peek().to(leftSecondaryPress);
mapping.from([Controller.Standard.RightGrip]).peek().to(rightSecondaryPress);
Controller.enableMapping(MAPPING_NAME);
if (showLines && !linesCreated) {
createDebugLines();
linesCreated = true;
}
if (showSphere && !sphereCreated) {
createDebugSphere();
sphereCreated = true;
}
function getTouching(side) {
var animating = false;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
animating = animating || isTouching[side][finger];
}
return animating; // return false only if none of the fingers are touching
}
function reEstimatePalmData() {
["right", "left"].forEach(function(side){
estimatePalmData(side);
});
}
MyAvatar.onLoadComplete.connect(function () {
// Sometimes the rig is not ready when this signal is trigger
console.log("avatar loaded");
Script.setInterval(function(){
reEstimatePalmData();
}, 2000);
});
MyAvatar.sensorToWorldScaleChanged.connect(function(){
reEstimatePalmData();
});
Script.scriptEnding.connect(function () {
["right", "left"].forEach(function(side){
if (linesCreated) {
Overlays.deleteOverlay(palmRay[side]);
}
if (sphereCreated) {
Overlays.deleteOverlay(sphereHand[side]);
}
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
var jointSuffixes = 3; // We need to clear the joints 0, 1 and 2 joints
var names = getJointNames(side, finger, jointSuffixes);
for (var j = 0; j < names.length; j++) {
var index = MyAvatar.getJointIndex(names[j]);
MyAvatar.clearJointData(index);
}
if (linesCreated) {
Overlays.deleteOverlay(fingerRays[side][finger]);
}
}
});
});
Script.update.connect(function(){
// index of the finger that needs to be updated this frame
updateFingerWithIndex = (updateFingerWithIndex < fingerKeys.length-1) ? updateFingerWithIndex + 1 : 0;
["right", "left"].forEach(function(side){
if (!palmData[side].set) {
reEstimatePalmData();
}
// recalculate the base data
updateSphereHand(side);
// this vars manage the transition to default pose
var isHandTouching = getTouching(side);
countToDefault[side] = isHandTouching ? 0 : countToDefault[side] + 1;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
var jointSuffixes = 3; // We need to update rotation of the 0, 1 and 2 joints
var names = getJointNames(side, finger, jointSuffixes);
// Add the animation increments
dataCurrent[side][finger] = addVals(dataCurrent[side][finger], dataDelta[side][finger], 1);
// update every finger joint
for (var j = 0; j < names.length; j++) {
var index = MyAvatar.getJointIndex(names[j]);
// if no finger is touching restate the default poses
if (isHandTouching || (dataDefault[side].set && countToDefault[side] < 5*touchAnimationSteps)) {
var quatRot = dataCurrent[side][finger][j];
MyAvatar.setJointRotation(index, quatRot);
} else {
MyAvatar.clearJointData(index);
}
}
}
});
});
}());

View file

@ -225,7 +225,7 @@ function adjustPositionPerBoundingBox(position, direction, registration, dimensi
var TOOLS_PATH = Script.resolvePath("assets/images/tools/");
var GRABBABLE_ENTITIES_MENU_CATEGORY = "Edit";
var GRABBABLE_ENTITIES_MENU_ITEM = "Create Entities As Grabbable";
var GRABBABLE_ENTITIES_MENU_ITEM = "Create Entities As Grabbable (except Zones, Particles, and Lights)";
var toolBar = (function () {
var EDIT_SETTING = "io.highfidelity.isEditing"; // for communication with other scripts
@ -239,6 +239,7 @@ var toolBar = (function () {
var dimensions = properties.dimensions ? properties.dimensions : DEFAULT_DIMENSIONS;
var position = getPositionToCreateEntity();
var entityID = null;
if (position !== null && position !== undefined) {
var direction;
if (Camera.mode === "entity" || Camera.mode === "independent") {
@ -278,9 +279,13 @@ var toolBar = (function () {
position = grid.snapToSurface(grid.snapToGrid(position, false, dimensions), dimensions);
properties.position = position;
if (Menu.isOptionChecked(GRABBABLE_ENTITIES_MENU_ITEM)) {
if (Menu.isOptionChecked(GRABBABLE_ENTITIES_MENU_ITEM) &&
!(properties.type === "Zone" || properties.type === "Light" || properties.type === "ParticleEffect")) {
properties.userData = JSON.stringify({ grabbableKey: { grabbable: true } });
} else {
properties.userData = JSON.stringify({ grabbableKey: { grabbable: false } });
}
entityID = Entities.addEntity(properties);
if (properties.type === "ParticleEffect") {

122
scripts/system/emote.js Normal file
View file

@ -0,0 +1,122 @@
"use strict";
//
// emote.js
// scripts/system/
//
// Created by Brad Hefta-Gaub on 7 Jan 2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/* globals Script, Tablet */
/* eslint indent: ["error", 4, { "outerIIFEBody": 0 }] */
(function() { // BEGIN LOCAL_SCOPE
var EMOTE_ANIMATIONS = ['Crying', 'Surprised', 'Dancing', 'Cheering', 'Waving', 'Fall', 'Pointing', 'Clapping'];
var ANIMATIONS = Array();
EMOTE_ANIMATIONS.forEach(function (name) {
var animationURL = Script.resolvePath("assets/animations/" + name + ".fbx");
var resource = AnimationCache.prefetch(animationURL);
var animation = AnimationCache.getAnimation(animationURL);
ANIMATIONS[name] = { url: animationURL, animation: animation, resource: resource};
});
var EMOTE_APP_BASE = "html/EmoteApp.html";
var EMOTE_APP_URL = Script.resolvePath(EMOTE_APP_BASE);
var EMOTE_LABEL = "EMOTE";
var EMOTE_APP_SORT_ORDER = 11;
var FPS = 60;
var MSEC_PER_SEC = 1000;
var FINISHED = 3; // see ScriptableResource::State
var onEmoteScreen = false;
var button;
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
var activeTimer = false; // used to cancel active timer if a user plays an amimation while another animation is playing
var activeEmote = false; // to keep track of the currently playing emote
button = tablet.addButton({
//icon: "icons/tablet-icons/emote.svg", // TODO - we need graphics for this
text: EMOTE_LABEL,
sortOrder: EMOTE_APP_SORT_ORDER
});
function onClicked() {
if (onEmoteScreen) {
tablet.gotoHomeScreen();
} else {
onEmoteScreen = true;
tablet.gotoWebScreen(EMOTE_APP_URL);
}
}
function onScreenChanged(type, url) {
onEmoteScreen = type === "Web" && (url.indexOf(EMOTE_APP_BASE) == url.length - EMOTE_APP_BASE.length);
button.editProperties({ isActive: onEmoteScreen });
}
// Handle the events we're receiving from the web UI
function onWebEventReceived(event) {
// Converts the event to a JavasScript Object
if (typeof event === "string") {
event = JSON.parse(event);
}
if (event.type === "click") {
var emoteName = event.data;
if (ANIMATIONS[emoteName].resource.state == FINISHED) {
if (activeTimer !== false) {
Script.clearTimeout(activeTimer);
}
// if the activeEmote is different from the chosen emote, then play the new emote. Other wise,
// this is a second click on the same emote as the activeEmote, and we will just stop it.
if (activeEmote !== emoteName) {
activeEmote = emoteName;
var frameCount = ANIMATIONS[emoteName].animation.frames.length;
MyAvatar.overrideAnimation(ANIMATIONS[emoteName].url, FPS, false, 0, frameCount);
var timeOut = MSEC_PER_SEC * frameCount / FPS;
activeTimer = Script.setTimeout(function () {
MyAvatar.restoreAnimation();
activeTimer = false;
activeEmote = false;
}, timeOut);
} else {
activeEmote = false;
MyAvatar.restoreAnimation();
}
}
}
}
button.clicked.connect(onClicked);
tablet.screenChanged.connect(onScreenChanged);
tablet.webEventReceived.connect(onWebEventReceived);
Script.scriptEnding.connect(function () {
if (onEmoteScreen) {
tablet.gotoHomeScreen();
}
button.clicked.disconnect(onClicked);
tablet.screenChanged.disconnect(onScreenChanged);
if (tablet) {
tablet.removeButton(button);
}
if (activeTimer !== false) {
Script.clearTimeout(activeTimer);
MyAvatar.restoreAnimation();
}
});
}()); // END LOCAL_SCOPE

View file

@ -0,0 +1,136 @@
<!--
// EmoteApp.html
//
// Created by Brad Hefta-Gaub on 7 Jan 2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
-->
<html>
<head>
<title>Emote App</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href="https://fonts.googleapis.com/css?family=Raleway:300,400,600,700"" rel="stylesheet">
<style>
body {
margin: 0;
width: 100%;
font-family: 'Raleway', sans-serif;
color: white;
background: linear-gradient(#2b2b2b, #0f212e);
}
.top-bar {
height: 90px;
background: linear-gradient(#2b2b2b, #1e1e1e);
font-weight: bold;
padding-left: 30px;
padding-right: 30px;
display: flex;
align-items: center;
position: fixed;
width: 480px;
top: 0;
z-index: 1;
}
.content {
margin-top: 90px;
padding: 30px;
}
input[type=button] {
font-family: 'Raleway';
font-weight: bold;
font-size: 13px;
text-transform: uppercase;
vertical-align: top;
height: 28px;
min-width: 120px;
padding: 0px 18px;
margin-right: 6px;
border-radius: 5px;
border: none;
color: #fff;
background-color: #000;
background: linear-gradient(#343434 20%, #000 100%);
cursor: pointer;
}
input[type=button].white {
color: #121212;
background-color: #afafaf;
background: linear-gradient(#fff 20%, #afafaf 100%);
}
input[type=button]:enabled:hover {
background: linear-gradient(#000, #000);
border: none;
}
input[type=button].white:enabled:hover {
background: linear-gradient(#fff, #fff);
border: none;
}
input[type=button]:active {
background: linear-gradient(#343434, #343434);
}
input[type=button].white:active {
background: linear-gradient(#afafaf, #afafaf);
}
input[type=button]:disabled {
color: #252525;
background: linear-gradient(#575757 20%, #252525 100%);
}
input[type=button][pressed=pressed] {
color: #00b4ef;
}
</style>
</head>
<body>
<div class="top-bar">
<h4>Emote App</h4>
</div>
<div class="content">
<p>Click an emotion to Emote:<p>
<p><input type="button" class="emote-button white" value="Crying"></p>
<p><input type="button" class="emote-button white" value="Surprised"></p>
<p><input type="button" class="emote-button white" value="Dancing"></p>
<p><input type="button" class="emote-button white" value="Cheering"></p>
<p><input type="button" class="emote-button white" value="Waving"></p>
<p><input type="button" class="emote-button white" value="Fall"></p>
<p><input type="button" class="emote-button white" value="Pointing"></p>
<p><input type="button" class="emote-button white" value="Clapping"></p>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.1/jquery.min.js"></script>
<script>
function main() {
// Send an event to emote.js when the page loads and is ready to get things rolling
console.log("document ready");
var readyEvent = {
"type": "ready",
};
// The event bridge handles event represented as a string the best. So here we first create a Javascript object, then convert to stirng
EventBridge.emitWebEvent(JSON.stringify(readyEvent));
// Send an event when user click on each of the emote buttons
$(".emote-button").click(function(){
console.log(this.value + " button click");
var clickEvent = {
"type": "click",
"data": this.value
};
EventBridge.emitWebEvent(JSON.stringify(clickEvent));
});
}
$(document).ready(main);
</script>
</body>
</html>

View file

@ -28,6 +28,7 @@ var selectionDisplay = null; // for gridTool.js to ignore
var MARKETPLACE_PURCHASES_QML_PATH = "hifi/commerce/purchases/Purchases.qml";
var MARKETPLACE_WALLET_QML_PATH = "hifi/commerce/wallet/Wallet.qml";
var MARKETPLACE_INSPECTIONCERTIFICATE_QML_PATH = "commerce/inspectionCertificate/InspectionCertificate.qml";
var REZZING_SOUND = SoundCache.getSound(Script.resolvePath("../assets/sounds/rezzing.wav"));
var HOME_BUTTON_TEXTURE = "http://hifi-content.s3.amazonaws.com/alan/dev/tablet-with-home-button.fbx/tablet-with-home-button.fbm/button-root.png";
// var HOME_BUTTON_TEXTURE = Script.resourcesPath() + "meshes/tablet-with-home-button.fbx/tablet-with-home-button.fbm/button-root.png";
@ -341,6 +342,15 @@ var selectionDisplay = null; // for gridTool.js to ignore
// we currently assume a wearable is a single entity
Entities.editEntity(pastedEntityIDs[0], offsets);
}
var rezPosition = Entities.getEntityProperties(pastedEntityIDs[0], "position").position;
Audio.playSound(REZZING_SOUND, {
volume: 1.0,
position: rezPosition,
localOnly: true
});
} else {
Window.notifyEditError("Can't import entities: entities would be out of bounds.");
}