Merge remote-tracking branch 'upstream/master' into android_nov

This commit is contained in:
Brad Davis 2018-01-09 11:43:03 -08:00
commit d6dd29b952
21 changed files with 1503 additions and 52 deletions

View file

@ -11,11 +11,8 @@ function(JOIN VALUES GLUE OUTPUT)
set (${OUTPUT} "${_TMP_STR}" PARENT_SCOPE)
endfunction()
if (ANDROID OR NOT DEV_BUILD)
set(INTERFACE_QML_QRC ${CMAKE_CURRENT_BINARY_DIR}/qml.qrc)
generate_qrc(OUTPUT ${INTERFACE_QML_QRC} PATH ${CMAKE_CURRENT_SOURCE_DIR}/resources GLOBS *.qml *.qss *.js *.html *.ttf *.gif *.svg *.png *.jpg)
endif()
set(INTERFACE_QML_QRC ${CMAKE_CURRENT_BINARY_DIR}/qml.qrc)
generate_qrc(OUTPUT ${INTERFACE_QML_QRC} PATH ${CMAKE_CURRENT_SOURCE_DIR}/resources GLOBS *.qml *.qss *.js *.html *.ttf *.gif *.svg *.png *.jpg)
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "LeapMotion")
@ -74,9 +71,7 @@ qt5_wrap_ui(QT_UI_HEADERS "${QT_UI_FILES}")
# add them to the interface source files
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${QT_UI_HEADERS}" "${QT_RESOURCES}")
if (ANDROID OR NOT DEV_BUILD)
list(APPEND INTERFACE_SRCS ${INTERFACE_QML_QRC})
endif()
if (UNIX)
install(

View file

@ -53,7 +53,7 @@ Item {
// Title Bar text
RalewaySemiBold {
text: "HIFI COMMERCE - LOGIN";
text: "Log in to continue";
// Text size
size: hifi.fontSizes.overlayTitle;
// Anchors

View file

@ -127,9 +127,15 @@ Item {
GridView {
id: gridView
keyNavigationEnabled: false
highlightFollowsCurrentItem: false
property int previousGridIndex: -1
// true if any of the buttons contains mouse
property bool containsMouse: false
anchors {
fill: parent
topMargin: 20
@ -162,15 +168,29 @@ Item {
flow: GridView.LeftToRight
model: page.proxyModel
delegate: Item {
delegate: Control {
id: wrapper
width: gridView.cellWidth
height: gridView.cellHeight
hoverEnabled: true
property bool containsMouse: gridView.containsMouse
onHoveredChanged: {
if (hovered && !gridView.containsMouse) {
gridView.containsMouse = true
} else {
gridView.containsMouse = false
}
}
property var proxy: modelData
TabletButton {
id: tabletButton
scale: wrapper.hovered ? 1.25 : wrapper.containsMouse ? 0.75 : 1.0
Behavior on scale { NumberAnimation { duration: 200; easing.type: Easing.Linear } }
anchors.centerIn: parent
gridView: wrapper.GridView.view
buttonIndex: page.proxyModel.buttonIndex(uuid);

View file

@ -3166,6 +3166,7 @@ glm::mat4 MyAvatar::getLeftHandCalibrationMat() const {
}
bool MyAvatar::pinJoint(int index, const glm::vec3& position, const glm::quat& orientation) {
std::lock_guard<std::mutex> guard(_pinnedJointsMutex);
auto hipsIndex = getJointIndex("Hips");
if (index != hipsIndex) {
qWarning() << "Pinning is only supported for the hips joint at the moment.";
@ -3185,7 +3186,14 @@ bool MyAvatar::pinJoint(int index, const glm::vec3& position, const glm::quat& o
return true;
}
bool MyAvatar::isJointPinned(int index) {
std::lock_guard<std::mutex> guard(_pinnedJointsMutex);
auto it = std::find(_pinnedJoints.begin(), _pinnedJoints.end(), index);
return it != _pinnedJoints.end();
}
bool MyAvatar::clearPinOnJoint(int index) {
std::lock_guard<std::mutex> guard(_pinnedJointsMutex);
auto it = std::find(_pinnedJoints.begin(), _pinnedJoints.end(), index);
if (it != _pinnedJoints.end()) {
_pinnedJoints.erase(it);

View file

@ -448,9 +448,8 @@ public:
virtual void clearJointData(const QString& name) override;
virtual void clearJointsData() override;
Q_INVOKABLE bool pinJoint(int index, const glm::vec3& position, const glm::quat& orientation);
bool isJointPinned(int index);
Q_INVOKABLE bool clearPinOnJoint(int index);
Q_INVOKABLE float getIKErrorOnLastSolve() const;
@ -837,6 +836,7 @@ private:
bool getIsAway() const { return _isAway; }
void setAway(bool value);
std::mutex _pinnedJointsMutex;
std::vector<int> _pinnedJoints;
// height of user in sensor space, when standing erect.

View file

@ -34,12 +34,25 @@ Rig::CharacterControllerState convertCharacterControllerState(CharacterControlle
}
static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
glm::mat4 worldToSensorMat = glm::inverse(myAvatar->getSensorToWorldMatrix());
// check for pinned hips.
auto hipsIndex = myAvatar->getJointIndex("Hips");
if (myAvatar->isJointPinned(hipsIndex)) {
Transform avatarTransform = myAvatar->getTransform();
AnimPose result = AnimPose(worldToSensorMat * avatarTransform.getMatrix() * Matrices::Y_180);
result.scale() = glm::vec3(1.0f, 1.0f, 1.0f);
return result;
} else {
DebugDraw::getInstance().removeMarker("pinnedHips");
}
glm::mat4 hipsMat = myAvatar->deriveBodyFromHMDSensor();
glm::vec3 hipsPos = extractTranslation(hipsMat);
glm::quat hipsRot = glmExtractRotation(hipsMat);
glm::mat4 avatarToWorldMat = myAvatar->getTransform().getMatrix();
glm::mat4 worldToSensorMat = glm::inverse(myAvatar->getSensorToWorldMatrix());
glm::mat4 avatarToSensorMat = worldToSensorMat * avatarToWorldMat;
// dampen hips rotation, by mixing it with the avatar orientation in sensor space
@ -323,17 +336,25 @@ void MySkeletonModel::updateFingers() {
for (auto& link : chain) {
int index = _rig.indexOfJoint(link.second);
if (index >= 0) {
auto rotationFrameOffset = _jointRotationFrameOffsetMap.find(index);
if (rotationFrameOffset == _jointRotationFrameOffsetMap.end()) {
_jointRotationFrameOffsetMap.insert(std::pair<int, int>(index, 0));
rotationFrameOffset = _jointRotationFrameOffsetMap.find(index);
}
auto pose = myAvatar->getControllerPoseInSensorFrame(link.first);
if (pose.valid) {
glm::quat relRot = glm::inverse(prevAbsRot) * pose.getRotation();
// only set the rotation for the finger joints, not the hands.
if (link.first != controller::Action::LEFT_HAND && link.first != controller::Action::RIGHT_HAND) {
_rig.setJointRotation(index, true, relRot, CONTROLLER_PRIORITY);
rotationFrameOffset->second = 0;
}
prevAbsRot = pose.getRotation();
} else {
} else if (rotationFrameOffset->second == 1) { // if the pose is invalid and was set on previous frame we do clear ( current frame offset = 1 )
_rig.clearJointAnimationPriority(index);
}
rotationFrameOffset->second++;
}
}
}

View file

@ -28,6 +28,8 @@ private:
AnimPose _prevHips; // sensor frame
bool _prevHipsValid { false };
std::map<int, int> _jointRotationFrameOffsetMap;
};
#endif // hifi_MySkeletonModel_h

View file

@ -192,8 +192,7 @@ void WindowScriptingInterface::ensureReticleVisible() const {
/// Display a "browse to directory" dialog. If `directory` is an invalid file or directory the browser will start at the current
/// working directory.
/// \param const QString& title title of the window
/// \param const QString& directory directory to start the file browser at
/// \param const QString& nameFilter filter to filter filenames by - see `QFileDialog`
/// \param const QString& directory directory to start the directory browser at
/// \return QScriptValue file path as a string if one was selected, otherwise `QScriptValue::NullValue`
QScriptValue WindowScriptingInterface::browseDir(const QString& title, const QString& directory) {
ensureReticleVisible();
@ -214,8 +213,7 @@ QScriptValue WindowScriptingInterface::browseDir(const QString& title, const QSt
/// Display a "browse to directory" dialog. If `directory` is an invalid file or directory the browser will start at the current
/// working directory.
/// \param const QString& title title of the window
/// \param const QString& directory directory to start the file browser at
/// \param const QString& nameFilter filter to filter filenames by - see `QFileDialog`
/// \param const QString& directory directory to start the directory browser at
void WindowScriptingInterface::browseDirAsync(const QString& title, const QString& directory) {
ensureReticleVisible();
QString path = directory;
@ -459,6 +457,41 @@ int WindowScriptingInterface::openMessageBox(QString title, QString text, int bu
return createMessageBox(title, text, buttons, defaultButton);
}
/**jsdoc
* <p>The buttons that may be included in a message box created by {@link Window.openMessageBox|openMessageBox} are defined by
* numeric values:
* <table>
* <thead>
* <tr>
* <th>Button</th>
* <th>Value</th>
* <th>Description</th>
* </tr>
* </thead>
* <tbody>
* <tr> <td><strong>NoButton</strong></td> <td><code>0x0</code></td> <td>An invalid button.</td> </tr>
* <tr> <td><strong>Ok</strong></td> <td><code>0x400</code></td> <td>"OK"</td> </tr>
* <tr> <td><strong>Save</strong></td> <td><code>0x800</code></td> <td>"Save"</td> </tr>
* <tr> <td><strong>SaveAll</strong></td> <td><code>0x1000</code></td> <td>"Save All"</td> </tr>
* <tr> <td><strong>Open</strong></td> <td><code>0x2000</code></td> <td>"Open"</td> </tr>
* <tr> <td><strong>Yes</strong></td> <td><code>0x4000</code></td> <td>"Yes"</td> </tr>
* <tr> <td><strong>YesToAll</strong></td> <td><code>0x8000</code></td> <td>"Yes to All"</td> </tr>
* <tr> <td><strong>No</strong></td> <td><code>0x10000</code></td> <td>"No"</td> </tr>
* <tr> <td><strong>NoToAll</strong></td> <td><code>0x20000</code></td> <td>"No to All"</td> </tr>
* <tr> <td><strong>Abort</strong></td> <td><code>0x40000</code></td> <td>"Abort"</td> </tr>
* <tr> <td><strong>Retry</strong></td> <td><code>0x80000</code></td> <td>"Retry"</td> </tr>
* <tr> <td><strong>Ignore</strong></td> <td><code>0x100000</code></td> <td>"Ignore"</td> </tr>
* <tr> <td><strong>Close</strong></td> <td><code>0x200000</code></td> <td>"Close"</td> </tr>
* <tr> <td><strong>Cancel</strong></td> <td><code>0x400000</code></td> <td>"Cancel"</td> </tr>
* <tr> <td><strong>Discard</strong></td> <td><code>0x800000</code></td> <td>"Discard" or "Don't Save"</td> </tr>
* <tr> <td><strong>Help</strong></td> <td><code>0x1000000</code></td> <td>"Help"</td> </tr>
* <tr> <td><strong>Apply</strong></td> <td><code>0x2000000</code></td> <td>"Apply"</td> </tr>
* <tr> <td><strong>Reset</strong></td> <td><code>0x4000000</code></td> <td>"Reset"</td> </tr>
* <tr> <td><strong>RestoreDefaults</strong></td> <td><code>0x8000000</code></td> <td>"Restore Defaults"</td> </tr>
* </tbody>
* </table>
* @typedef Window.MessageBoxButton
*/
int WindowScriptingInterface::createMessageBox(QString title, QString text, int buttons, int defaultButton) {
auto messageBox = DependencyManager::get<OffscreenUi>()->createMessageBox(OffscreenUi::ICON_INFORMATION, title, text,
static_cast<QFlags<QMessageBox::StandardButton>>(buttons), static_cast<QMessageBox::StandardButton>(defaultButton));

View file

@ -33,6 +33,21 @@ QScriptValue CustomPromptResultToScriptValue(QScriptEngine* engine, const Custom
void CustomPromptResultFromScriptValue(const QScriptValue& object, CustomPromptResult& result);
/**jsdoc
* The Window API provides various facilities not covered elsewhere: window dimensions, window focus, normal or entity camera
* view, clipboard, announcements, user connections, common dialog boxes, snapshots, file import, domain changes, domain
* physics.
*
* @namespace Window
* @property {number} innerWidth - The width of the drawable area of the Interface window (i.e., without borders or other
* chrome), in pixels. <em>Read-only.</em>
* @property {number} innerHeight - The height of the drawable area of the Interface window (i.e., without borders or other
* chrome) plus the height of the menu bar, in pixels. <em>Read-only.</em>
* @property {object} location - Provides facilities for working with your current metaverse location. See {@link location}.
* @property {number} x - The x coordinate of the top left corner of the Interface window on the display. <em>Read-only.</em>
* @property {number} y - The y coordinate of the top left corner of the Interface window on the display. <em>Read-only.</em>
*/
class WindowScriptingInterface : public QObject, public Dependency {
Q_OBJECT
Q_PROPERTY(int innerWidth READ getInnerWidth)
@ -48,63 +63,622 @@ public:
int getY();
public slots:
/**jsdoc
* Check if the Interface window has focus.
* @function Window.hasFocus
* @returns {boolean} <code>true</code> if the Interface window has focus, otherwise <code>false</code>.
*/
QScriptValue hasFocus();
/**jsdoc
* Make the Interface window have focus.
* @function Window.setFocus
*/
void setFocus();
/**jsdoc
* Raise the Interface window if it is minimized, and give it focus.
* @function Window.raiseMainWindow
*/
void raiseMainWindow();
/**jsdoc
* Display a dialog with the specified message and an "OK" button. The dialog is non-modal; the script continues without
* waiting for a user response.
* @function Window.alert
* @param {string} message="" - The message to display.
* @example <caption>Display a friendly greeting.</caption>
* Window.alert("Welcome!");
* print("Script continues without waiting");
*/
void alert(const QString& message = "");
/**jsdoc
* Prompt the user to confirm something. Displays a modal dialog with a message plus "Yes" and "No" buttons.
* responds.
* @function Window.confirm
* @param {string} message="" - The question to display.
* @returns {boolean} <code>true</code> if the user selects "Yes", otherwise <code>false</code>.
* @example <caption>Ask the user a question requiring a yes/no answer.</caption>
* var answer = Window.confirm("Are you sure?");
* print(answer); // true or false
*/
QScriptValue confirm(const QString& message = "");
/**jsdoc
* Prompt the user to enter some text. Displays a modal dialog with a message and a text box, plus "OK" and "Cancel"
* buttons.
* @function Window.prompt
* @param {string} message - The question to display.
* @param {string} defaultText - The default answer text.
* @returns {string} The text that the user entered if they select "OK", otherwise "".
* @example <caption>Ask the user a question requiring a text answer.</caption>
* var answer = Window.prompt("Question", "answer");
* if (answer === "") {
* print("User canceled");
* } else {
* print("User answer: " + answer);
* }
*/
QScriptValue prompt(const QString& message, const QString& defaultText);
/**jsdoc
* Prompt the user to enter some text. Displays a non-modal dialog with a message and a text box, plus "OK" and "Cancel"
* buttons. A {@link Window.promptTextChanged|promptTextChanged} signal is emitted when the user OKs the dialog; no signal
* is emitted if the user cancels the dialog.
* @function Window.promptAsync
* @param {string} message - The question to display.
* @param {string} defaultText - The default answer text.
* @example <caption>Ask the user a question requiring a text answer without waiting for the answer.</caption>
* function onPromptTextChanged(text) {
* print("User answer: " + text);
* }
* Window.promptTextChanged.connect(onPromptTextChanged);
*
* Window.promptAsync("Question", "answer");
* print("Script continues without waiting");
*/
void promptAsync(const QString& message = "", const QString& defaultText = "");
/**jsdoc
* Prompt the user for input in a custom, modal dialog.
* @deprecated This funtion is deprecated and will be removed.
* @function Window.customPrompt
* @param {object} config - Configures the modal dialog.
* @returns {object} The user's response.
*/
CustomPromptResult customPrompt(const QVariant& config);
/**jsdoc
* Prompt the user to choose a directory. Displays a modal dialog that navigates the directory tree.
* @function Window.browseDir
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @returns {string} The path of the directory if one is chosen, otherwise <code>null</code>.
* @example <caption>Ask the user to choose a directory.</caption>
* var directory = Window.browseDir("Select Directory", Paths.resources);
* print("Directory: " + directory);
*/
QScriptValue browseDir(const QString& title = "", const QString& directory = "");
/**jsdoc
* Prompt the user to choose a directory. Displays a non-modal dialog that navigates the directory tree. A
* {@link Window.browseDirChanged|browseDirChanged} signal is emitted when a directory is chosen; no signal is emitted if
* the user cancels the dialog.
* @function Window.browseDirAsync
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @example <caption>Ask the user to choose a directory without waiting for the answer.</caption>
* function onBrowseDirChanged(directory) {
* print("Directory: " + directory);
* }
* Window.browseDirChanged.connect(onBrowseDirChanged);
*
* Window.browseDirAsync("Select Directory", Paths.resources);
* print("Script continues without waiting");
*/
void browseDirAsync(const QString& title = "", const QString& directory = "");
/**jsdoc
* Prompt the user to choose a file. Displays a modal dialog that navigates the directory tree.
* @function Window.browse
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @returns {string} The path and name of the file if one is chosen, otherwise <code>null</code>.
* @example <caption>Ask the user to choose an image file.</caption>
* var filename = Window.browse("Select Image File", Paths.resources, "Images (*.png *.jpg *.svg)");
* print("File: " + filename);
*/
QScriptValue browse(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to choose a file. Displays a non-modal dialog that navigates the directory tree. A
* {@link Window.openFileChanged|openFileChanged} signal is emitted when a file is chosen; no signal is emitted if the user
* cancels the dialog.
* @function Window.browseAsync
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @example <caption>Ask the user to choose an image file without waiting for the answer.</caption>
* function onOpenFileChanged(filename) {
* print("File: " + filename);
* }
* Window.openFileChanged.connect(onOpenFileChanged);
*
* Window.browseAsync("Select Image File", Paths.resources, "Images (*.png *.jpg *.svg)");
* print("Script continues without waiting");
*/
void browseAsync(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to specify the path and name of a file to save to. Displays a model dialog that navigates the directory
* tree and allows the user to type in a file name.
* @function Window.save
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @returns {string} The path and name of the file if one is specified, otherwise <code>null</code>. If a single file type
* is specified in the nameFilter, that file type extension is automatically appended to the result when appropriate.
* @example <caption>Ask the user to specify a file to save to.</caption>
* var filename = Window.save("Save to JSON file", Paths.resources, "*.json");
* print("File: " + filename);
*/
QScriptValue save(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to specify the path and name of a file to save to. Displays a non-model dialog that navigates the
* directory tree and allows the user to type in a file name. A {@link Window.saveFileChanged|saveFileChanged} signal is
* emitted when a file is specified; no signal is emitted if the user cancels the dialog.
* @function Window.saveAsync
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @example <caption>Ask the user to specify a file to save to without waiting for an answer.</caption>
* function onSaveFileChanged(filename) {
* print("File: " + filename);
* }
* Window.saveFileChanged.connect(onSaveFileChanged);
*
* Window.saveAsync("Save to JSON file", Paths.resources, "*.json");
* print("Script continues without waiting");
*/
void saveAsync(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to choose an Asset Server item. Displays a modal dialog that navigates the tree of assets on the Asset
* Server.
* @function Window.browseAssets
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @returns {string} The path and name of the asset if one is chosen, otherwise <code>null</code>.
* @example <caption>Ask the user to select an FBX asset.</caption>
* var asset = Window.browseAssets("Select FBX File", "/", "*.fbx");
* print("FBX file: " + asset);
*/
QScriptValue browseAssets(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Prompt the user to choose an Asset Server item. Displays a non-modal dialog that navigates the tree of assets on the
* Asset Server. A {@link Window.assetsDirChanged|assetsDirChanged} signal is emitted when an asset is chosen; no signal is
* emitted if the user cancels the dialog.
* @function Window.browseAssetsAsync
* @param {string} title="" - The title to display at the top of the dialog.
* @param {string} directory="" - The initial directory to start browsing at.
* @param {string} nameFilter="" - The types of files to display. Examples: <code>"*.json"</code> and
* <code>"Images (*.png *.jpg *.svg)"</code>. All files are displayed if a filter isn't specified.
* @example
* function onAssetsDirChanged(asset) {
* print("FBX file: " + asset);
* }
* Window.assetsDirChanged.connect(onAssetsDirChanged);
*
* Window.browseAssetsAsync("Select FBX File", "/", "*.fbx");
* print("Script continues without waiting");
*/
void browseAssetsAsync(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
/**jsdoc
* Open the Asset Browser dialog. If a file to upload is specified, the user is prompted to enter the folder and name to
* map the file to on the asset server.
* @function Window.showAssetServer
* @param {string} uploadFile="" - The path and name of a file to upload to the asset server.
* @example <caption>Upload a file to the asset server.</caption>
* var filename = Window.browse("Select File to Add to Asset Server", Paths.resources);
* print("File: " + filename);
* Window.showAssetServer(filename);
*/
void showAssetServer(const QString& upload = "");
/**jsdoc
* Get Interface's build number.
* @function Window.checkVersion
* @returns {string} - Interface's build number.
*/
QString checkVersion();
/**jsdoc
* Copies text to the operating system's clipboard.
* @function Window.copyToClipboard
* @param {string} text - The text to copy to the operating system's clipboard.
*/
void copyToClipboard(const QString& text);
/**jsdoc
* Takes a snapshot of the current Interface view from the primary camera. When a still image only is captured,
* {@link Window.stillSnapshotTaken|stillSnapshotTaken} is emitted; when a still image plus moving images are captured,
* {@link Window.processingGifStarted|processingGifStarted} and {@link Window.processingGifCompleted|processingGifCompleted}
* are emitted. The path to store the snapshots and the length of the animated GIF to capture are specified in Settings >
* General > Snapshots.
* @function Window.takeSnapshot
* @param {boolean} notify=true - This value is passed on through the {@link Window.stillSnapshotTaken|stillSnapshotTaken}
* signal.
* @param {boolean} includeAnimated=false - If <code>true</code>, a moving image is captured as an animated GIF in addition
* to a still image.
* @param {number} aspectRatio=0 - The width/height ratio of the snapshot required. If the value is <code>0</code> the
* full resolution is used (window dimensions in desktop mode; HMD display dimensions in HMD mode), otherwise one of the
* dimensions is adjusted in order to match the aspect ratio.
* @example <caption>Using the snapshot function and signals.</caption>
* function onStillSnapshottaken(path, notify) {
* print("Still snapshot taken: " + path);
* print("Notify: " + notify);
* }
*
* function onProcessingGifStarted(stillPath) {
* print("Still snapshot taken: " + stillPath);
* }
*
* function onProcessingGifCompleted(animatedPath) {
* print("Animated snapshot taken: " + animatedPath);
* }
*
* Window.stillSnapshotTaken.connect(onStillSnapshottaken);
* Window.processingGifStarted.connect(onProcessingGifStarted);
* Window.processingGifCompleted.connect(onProcessingGifCompleted);
*
* var notify = true;
* var animated = true;
* var aspect = 1920 / 1080;
* Window.takeSnapshot(notify, animated, aspect);
*/
void takeSnapshot(bool notify = true, bool includeAnimated = false, float aspectRatio = 0.0f);
/**jsdoc
* Takes a still snapshot of the current view from the secondary camera that can be set up through the {@link Render} API.
* @function Window.takeSecondaryCameraSnapshot
*/
void takeSecondaryCameraSnapshot();
/**jsdoc
* Emit a {@link Window.connectionAdded|connectionAdded} or a {@link Window.connectionError|connectionError} signal that
* indicates whether or not a user connection was successfully made using the Web API.
* @function Window.makeConnection
* @param {boolean} success - If <code>true</code> then {@link Window.connectionAdded|connectionAdded} is emitted, otherwise
* {@link Window.connectionError|connectionError} is emitted.
* @param {string} description - Descriptive text about the connection success or error. This is sent in the signal emitted.
*/
void makeConnection(bool success, const QString& userNameOrError);
/**jsdoc
* Display a notification message. Notifications are displayed in panels by the default script, nofications.js. An
* {@link Window.announcement|announcement} signal is emitted when this function is called.
* @function Window.displayAnnouncement
* @param {string} message - The announcement message.
* @example <caption>Send and capture an announcement message.</caption>
* function onAnnouncement(message) {
* // The message is also displayed as a notification by notifications.js.
* print("Announcement: " + message);
* }
* Window.announcement.connect(onAnnouncement);
*
* Window.displayAnnouncement("Hello");
*/
void displayAnnouncement(const QString& message);
/**jsdoc
* Prepare a snapshot ready for sharing. A {@link Window.snapshotShared|snapshotShared} signal is emitted when the snapshot
* has been prepared.
* @function Window.shareSnapshot
* @param {string} path - The path and name of the image file to share.
* @param {string} href="" - The metaverse location where the snapshot was taken.
*/
void shareSnapshot(const QString& path, const QUrl& href = QUrl(""));
/**jsdoc
* Check to see if physics is active for you in the domain you're visiting - there is a delay between your arrival at a
* domain and physics becoming active for you in that domain.
* @function Window.isPhysicsEnabled
* @returns {boolean} <code>true</code> if physics is currently active for you, otherwise <code>false</code>.
* @example <caption>Wait for physics to be enabled when you change domains.</caption>
* function checkForPhysics() {
* var isPhysicsEnabled = Window.isPhysicsEnabled();
* print("Physics enabled: " + isPhysicsEnabled);
* if (!isPhysicsEnabled) {
* Script.setTimeout(checkForPhysics, 1000);
* }
* }
*
* function onDomainChanged(domain) {
* print("Domain changed: " + domain);
* Script.setTimeout(checkForPhysics, 1000);
* }
*
* Window.domainChanged.connect(onDomainChanged);
*/
bool isPhysicsEnabled();
/**jsdoc
* Set what to show on the PC display: normal view or entity camera view. The entity camera is configured using
* {@link Camera.setCameraEntity} and {@link Camera|Camera.mode}.
* @function Window.setDisplayTexture
* @param {Window.DisplayTexture} texture - The view to display.
* @returns {boolean} <code>true</code> if the display texture was successfully set, otherwise <code>false</code>.
*/
// See spectatorCamera.js for Valid parameter values.
/**jsdoc
* <p>The views that may be displayed on the PC display.</p>
* <table>
* <thead>
* <tr>
* <th>Value</th>
* <th>View Displayed</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td><code>""</code></td>
* <td>Normal view.</td>
* </tr>
* <tr>
* <td><code>"resource://spectatorCameraFrame"</code></td>
* <td>Entity camera view.</td>
* </tr>
* </tbody>
* </table>
* @typedef Window.DisplayTexture
*/
bool setDisplayTexture(const QString& name);
/**jsdoc
* Check if a 2D point is within the desktop window if in desktop mode, or the drawable area of the HUD overlay if in HMD
* mode.
* @function Window.isPointOnDesktopWindow
* @param {Vec2} point - The point to check.
* @returns {boolean} <code>true</code> if the point is within the window or HUD, otherwise <code>false</code>.
*/
bool isPointOnDesktopWindow(QVariant point);
/**jsdoc
* Get the size of the drawable area of the Interface window if in desktop mode or the HMD rendering surface if in HMD mode.
* @function Window.getDeviceSize
* @returns {Vec2} The width and height of the Interface window or HMD rendering surface, in pixels.
*/
glm::vec2 getDeviceSize() const;
/**jsdoc
* Open a non-modal message box that can have a variety of button combinations. See also,
* {@link Window.updateMessageBox|updateMessageBox} and {@link Window.closeMessageBox|closeMessageBox}.
* @function Window.openMessageBox
* @param {string} title - The title to display for the message box.
* @param {string} text - Text to display in the message box.
* @param {Window.MessageBoxButton} buttons - The buttons to display on the message box; one or more button values added
* together.
* @param {Window.MessageBoxButton} defaultButton - The button that has focus when the message box is opened.
* @returns {number} The ID of the message box created.
* @example <caption>Ask the user whether that want to reset something.</caption>
* var messageBox;
* var resetButton = 0x4000000;
* var cancelButton = 0x400000;
*
* function onMessageBoxClosed(id, button) {
* if (id === messageBox) {
* if (button === resetButton) {
* print("Reset");
* } else {
* print("Don't reset");
* }
* }
* }
* Window.messageBoxClosed.connect(onMessageBoxClosed);
*
* messageBox = Window.openMessageBox("Reset Something",
* "Do you want to reset something?",
* resetButton + cancelButton, cancelButton);
*/
int openMessageBox(QString title, QString text, int buttons, int defaultButton);
/**jsdoc
* Update the content of a message box that was opened with {@link Window.openMessageBox|openMessageBox}.
* @function Window.updateMessageBox
* @param {number} id - The ID of the message box.
* @param {string} title - The title to display for the message box.
* @param {string} text - Text to display in the message box.
* @param {Window.MessageBoxButton} buttons - The buttons to display on the message box; one or more button values added
* together.
* @param {Window.MessageBoxButton} defaultButton - The button that has focus when the message box is opened.
*/
void updateMessageBox(int id, QString title, QString text, int buttons, int defaultButton);
/**jsdoc
* Close a message box that was opened with {@link Window.openMessageBox|openMessageBox}.
* @function Window.closeMessageBox
* @param {number} id - The ID of the message box.
*/
void closeMessageBox(int id);
private slots:
void onMessageBoxSelected(int button);
signals:
void domainChanged(const QString& domainHostname);
/**jsdoc
* Triggered when you change the domain you're visiting. <strong>Warning:</strong> Is not emitted if you go to domain that
* isn't running.
* @function Window.domainChanged
* @param {string} domain - The domain's IP address.
* @returns {Signal}
* @example <caption>Report when you change domains.</caption>
* function onDomainChanged(domain) {
* print("Domain changed: " + domain);
* }
*
* Window.domainChanged.connect(onDomainChanged);
*/
void domainChanged(const QString& domain);
/**jsdoc
* Triggered when you try to navigate to a *.json, *.svo, or *.svo.json URL in a Web browser within Interface.
* @function Window.svoImportRequested
* @param {string} url - The URL of the file to import.
* @returns {Signal}
*/
void svoImportRequested(const QString& url);
/**jsdoc
* Triggered when you try to visit a domain but are refused connection.
* @function Window.domainConnectionRefused
* @param {string} reasonMessage - A description of the refusal.
* @param {Window.ConnectionRefusedReason} reasonCode - Integer number that enumerates the reason for the refusal.
* @param {string} extraInfo - Extra information about the refusal.
* @returns {Signal}
*/
void domainConnectionRefused(const QString& reasonMessage, int reasonCode, const QString& extraInfo);
/**jsdoc
* Triggered when a still snapshot has been taken by calling {@link Window.takeSnapshot|takeSnapshot} with
* <code>includeAnimated = false</code>.
* @function Window.stillSnapshotTaken
* @param {string} pathStillSnapshot - The path and name of the snapshot image file.
* @param {boolean} notify - The value of the <code>notify</code> parameter that {@link Window.takeSnapshot|takeSnapshot}
* was called with.
* @returns {Signal}
*/
void stillSnapshotTaken(const QString& pathStillSnapshot, bool notify);
/**jsdoc
* Triggered when a snapshot submitted via {@link Window.shareSnapshot|shareSnapshot} is ready for sharing. The snapshot
* may then be shared via the {@link Account.metaverseServerURL} Web API.
* @function Window.snapshotShared
* @param {boolean} isError - <code>true</code> if an error was encountered preparing the snapshot for sharing, otherwise
* <code>false</code>.
* @param {string} reply - JSON-formatted information about the snapshot.
* @returns {Signal}
*/
void snapshotShared(bool isError, const QString& reply);
/**jsdoc
* Triggered when the snapshot images have been captured by {@link Window.takeSnapshot|takeSnapshot} and the GIF is
* starting to be processed.
* @function Window.processingGifStarted
* @param {string} pathStillSnapshot - The path and name of the still snapshot image file.
* @returns {Signal}
*/
void processingGifStarted(const QString& pathStillSnapshot);
/**jsdoc
* Triggered when a GIF has been prepared of the snapshot images captured by {@link Window.takeSnapshot|takeSnapshot}.
* @function Window.processingGifCompleted
* @param {string} pathAnimatedSnapshot - The path and name of the moving snapshot GIF file.
* @returns {Signal}
*/
void processingGifCompleted(const QString& pathAnimatedSnapshot);
/**jsdoc
* Triggered when you've successfully made a user connection.
* @function Window.connectionAdded
* @param {string} message - A description of the success.
* @returns {Signal}
*/
void connectionAdded(const QString& connectionName);
/**jsdoc
* Triggered when you failed to make a user connection.
* @function Window.connectionError
* @param {string} message - A description of the error.
* @returns {Signal}
*/
void connectionError(const QString& errorString);
/**jsdoc
* Triggered when a message is announced by {@link Window.displayAnnouncement|displayAnnouncement}.
* @function Window.announcement
* @param {string} message - The message text.
* @returns {Signal}
*/
void announcement(const QString& message);
/**jsdoc
* Triggered when the user closes a message box that was opened with {@link Window.openMessageBox|openMessageBox}.
* @function Window.messageBoxClosed
* @param {number} id - The ID of the message box that was closed.
* @param {number} button - The button that the user clicked. If the user presses Esc, the Cancel button value is returned,
* whether or not the Cancel button is displayed in the message box.
* @returns {Signal}
*/
void messageBoxClosed(int id, int button);
/**jsdoc
* Triggered when the user chooses a directory in a {@link Window.browseDirAsync|browseDirAsync} dialog.
* @function Window.browseDirChanged
* @param {string} directory - The directory the user chose in the dialog.
* @returns {Signal}
*/
void browseDirChanged(QString browseDir);
/**jsdoc
* Triggered when the user chooses an asset in a {@link Window.browseAssetsAsync|browseAssetsAsync} dialog.
* @function Window.assetsDirChanged
* @param {string} asset - The path and name of the asset the user chose in the dialog.
* @returns {Signal}
*/
void assetsDirChanged(QString assetsDir);
/**jsdoc
* Triggered when the user specifies a file in a {@link Window.saveAsync|saveAsync} dialog.
* @function Window.saveFileChanged
* @param {string} filename - The path and name of the file that the user specified in the dialog.
* @returns {Signal}
*/
void saveFileChanged(QString filename);
/**jsdoc
* Triggered when the user chooses a file in a {@link Window.browseAsync|browseAsync} dialog.
* @function Window.openFileChanged
* @param {string} filename - The path and name of the file the user chose in the dialog.
* @returns {Signal}
*/
void openFileChanged(QString filename);
/**jsdoc
* Triggered when the user OKs a {@link Window.promptAsync|promptAsync} dialog.
* @function Window.promptTextChanged
* @param {string} text - The text the user entered in the dialog.
* @returns {Signal}
*/
void promptTextChanged(QString text);
// triggered when window size or position changes
/**jsdoc
* Triggered when the position or size of the Interface window changes.
* @function Window.geometryChanged
* @param {Rect} geometry - The position and size of the drawable area of the Interface window.
* @returns {Signal}
* @example <caption>Report the position of size of the Interface window when it changes.</caption>
* function onWindowGeometryChanged(rect) {
* print("Window geometry: " + JSON.stringify(rect));
* }
*
* Window.geometryChanged.connect(onWindowGeometryChanged);
*/
void geometryChanged(QRect geometry);
private:

View file

@ -37,6 +37,16 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
SkeletonModel::~SkeletonModel() {
}
void SkeletonModel::setURL(const QUrl& url) {
_texturesLoaded = false;
Model::setURL(url);
}
void SkeletonModel::setTextures(const QVariantMap& textures) {
_texturesLoaded = false;
Model::setTextures(textures);
}
void SkeletonModel::initJointStates() {
const FBXGeometry& geometry = getFBXGeometry();
glm::mat4 modelOffset = glm::scale(_scale) * glm::translate(_offset);
@ -142,6 +152,13 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
Parent::simulate(deltaTime, fullUpdate);
}
// FIXME: This texture loading logic should probably live in Avatar, to mirror RenderableModelEntityItem and ModelOverlay,
// but Avatars don't get updates in the same way
if (!_texturesLoaded && getGeometry() && getGeometry()->areTexturesLoaded()) {
_texturesLoaded = true;
updateRenderItems();
}
if (!isActive() || !_owningAvatar->isMyAvatar()) {
return; // only simulate for own avatar
}

View file

@ -31,6 +31,9 @@ public:
SkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr);
~SkeletonModel();
Q_INVOKABLE void setURL(const QUrl& url) override;
Q_INVOKABLE void setTextures(const QVariantMap& textures) override;
void initJointStates() override;
void simulate(float deltaTime, bool fullUpdate = true) override;
@ -115,8 +118,6 @@ protected:
void computeBoundingShape();
protected:
bool getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
Avatar* _owningAvatar;
@ -128,6 +129,9 @@ protected:
glm::vec3 _defaultEyeModelPosition;
float _headClipDistance; // Near clip distance to use if no separate head model
private:
bool _texturesLoaded { false };
};
#endif // hifi_SkeletonModel_h

View file

@ -965,7 +965,10 @@ void EntityItem::setMass(float mass) {
void EntityItem::setHref(QString value) {
auto href = value.toLower();
if (! (value.toLower().startsWith("hifi://")) ) {
// If the string has something and doesn't start with with "hifi://" it shouldn't be set
// We allow the string to be empty, because that's the initial state of this property
if ( !(value.toLower().startsWith("hifi://")) && !value.isEmpty()) {
return;
}
withWriteLock([&] {

View file

@ -84,6 +84,46 @@ public:
void softReset();
/**jsdoc
* <p>The reasons that you may be refused connection to a domain are defined by numeric values:</p>
* <table>
* <thead>
* <tr>
* <th>Reason</th>
* <th>Value</th>
* <th>Description</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td><strong>Unknown</strong></td>
* <td><code>0</code></td>
* <td>Some unknown reason.</td>
* </tr>
* <tr>
* <td><strong>ProtocolMismatch</strong></td>
* <td><code>1</code></td>
* <td>The communications protocols of the domain and your Interface are not the same.</td>
* </tr>
* <tr>
* <td><strong>LoginError</strong></td>
* <td><code>2</code></td>
* <td>You could not be logged into the domain.</td>
* </tr>
* <tr>
* <td><strong>NotAuthorized</strong></td>
* <td><code>3</code></td>
* <td>You are not authorized to connect to the domain.</td>
* </tr>
* <tr>
* <td><strong>TooManyUsers</strong></td>
* <td><code>4</code></td>
* <td>The domain already has its maximum number of users.</td>
* </tr>
* </tbody>
* </table>
* @typedef Window.ConnectionRefusedReason
*/
enum class ConnectionRefusedReason : uint8_t {
Unknown,
ProtocolMismatch,

View file

@ -46,10 +46,7 @@ void CauterizedMeshPartPayload::bindTransform(gpu::Batch& batch, const render::S
}
batch.setModelTransform(_cauterizedTransform);
} else {
if (_clusterBuffer) {
batch.setUniformBuffer(ShapePipeline::Slot::BUFFER::SKINNING, _clusterBuffer);
}
batch.setModelTransform(_transform);
ModelMeshPartPayload::bindTransform(batch, locations, renderMode);
}
}

View file

@ -480,23 +480,14 @@ ShapeKey ModelMeshPartPayload::getShapeKey() const {
}
void ModelMeshPartPayload::bindMesh(gpu::Batch& batch) {
if (!_isBlendShaped) {
batch.setIndexBuffer(gpu::UINT32, (_drawMesh->getIndexBuffer()._buffer), 0);
batch.setInputFormat((_drawMesh->getVertexFormat()));
batch.setInputStream(0, _drawMesh->getVertexStream());
batch.setIndexBuffer(gpu::UINT32, (_drawMesh->getIndexBuffer()._buffer), 0);
batch.setInputFormat((_drawMesh->getVertexFormat()));
if (_isBlendShaped && _blendedVertexBuffer) {
batch.setInputBuffer(0, _blendedVertexBuffer, 0, sizeof(glm::vec3));
batch.setInputBuffer(1, _blendedVertexBuffer, _drawMesh->getNumVertices() * sizeof(glm::vec3), sizeof(glm::vec3));
batch.setInputStream(2, _drawMesh->getVertexStream().makeRangedStream(2));
} else {
batch.setIndexBuffer(gpu::UINT32, (_drawMesh->getIndexBuffer()._buffer), 0);
batch.setInputFormat((_drawMesh->getVertexFormat()));
if (_blendedVertexBuffer) {
batch.setInputBuffer(0, _blendedVertexBuffer, 0, sizeof(glm::vec3));
batch.setInputBuffer(1, _blendedVertexBuffer, _drawMesh->getNumVertices() * sizeof(glm::vec3), sizeof(glm::vec3));
batch.setInputStream(2, _drawMesh->getVertexStream().makeRangedStream(2));
} else {
batch.setIndexBuffer(gpu::UINT32, (_drawMesh->getIndexBuffer()._buffer), 0);
batch.setInputFormat((_drawMesh->getVertexFormat()));
batch.setInputStream(0, _drawMesh->getVertexStream());
}
batch.setInputStream(0, _drawMesh->getVertexStream());
}
}

View file

@ -78,7 +78,7 @@ public:
/// Sets the URL of the model to render.
// Should only be called from the model's rendering thread to avoid access violations of changed geometry.
Q_INVOKABLE void setURL(const QUrl& url);
Q_INVOKABLE virtual void setURL(const QUrl& url);
const QUrl& getURL() const { return _url; }
// new Scene/Engine rendering support
@ -136,7 +136,7 @@ public:
const Geometry::Pointer& getCollisionGeometry() const { return _collisionGeometry; }
const QVariantMap getTextures() const { assert(isLoaded()); return _renderGeometry->getTextures(); }
Q_INVOKABLE void setTextures(const QVariantMap& textures);
Q_INVOKABLE virtual void setTextures(const QVariantMap& textures);
/// Provided as a convenience, will crash if !isLoaded()
// And so that getGeometry() isn't chained everywhere

View file

@ -262,7 +262,7 @@ controller::Pose ovrControllerPoseToHandPose(
pose.translation = toGlm(handPose.ThePose.Position);
pose.translation += rotation * translationOffset;
pose.rotation = rotation * rotationOffset;
pose.angularVelocity = toGlm(handPose.AngularVelocity);
pose.angularVelocity = rotation * toGlm(handPose.AngularVelocity);
pose.velocity = toGlm(handPose.LinearVelocity);
pose.valid = true;
return pose;

View file

@ -148,8 +148,8 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
this.setIgnorePointerItems = function() {
if (HMD.tabletID !== this.tabletID) {
this.tabletID = HMD.tabletID;
Pointers.setIgnoreItems(_this.leftPointer, _this.blacklist);
Pointers.setIgnoreItems(_this.rightPointer, _this.blacklist);
Pointers.setIgnoreItems(_this.leftPointer, _this.blacklist.concat([HMD.tabletID]));
Pointers.setIgnoreItems(_this.rightPointer, _this.blacklist.concat([HMD.tabletID]));
}
};
@ -378,8 +378,8 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
};
this.setBlacklist = function() {
RayPick.setIgnoreItems(_this.leftControllerRayPick, this.blacklist);
RayPick.setIgnoreItems(_this.rightControllerRayPick, this.blacklist);
RayPick.setIgnoreItems(_this.leftControllerRayPick, this.blacklist.concat([HMD.tabletID]));
RayPick.setIgnoreItems(_this.rightControllerRayPick, this.blacklist.concat([HMD.tabletID]));
};
var MAPPING_NAME = "com.highfidelity.controllerDispatcher";

View file

@ -447,7 +447,9 @@ Script.include("/~/system/libraries/Xform.js");
this.targetObject = new TargetObject(entityID, targetProps);
this.targetObject.parentProps = getEntityParents(targetProps);
Script.clearTimeout(this.contextOverlayTimer);
if (this.contextOverlayTimer) {
Script.clearTimeout(this.contextOverlayTimer);
}
this.contextOverlayTimer = false;
if (entityID !== this.entityWithContextOverlay) {
this.destroyContextOverlay();

View file

@ -0,0 +1,739 @@
//
// scripts/system/libraries/handTouch.js
//
// Created by Luis Cuenca on 12/29/17
// Copyright 2017 High Fidelity, Inc.
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
/* jslint bitwise: true */
/* global Script, Overlays, Controller, Vec3, MyAvatar, Entities
*/
(function(){
var updateFingerWithIndex = 0;
// Keys to access finger data
var fingerKeys = ["pinky", "ring", "middle", "index", "thumb"];
// Additionally close the hands to achieve a grabbing effect
var grabPercent = { left: 0,
right: 0 };
// var isGrabbing = false;
var Palm = function() {
this.position = {x:0, y:0, z:0};
this.perpendicular = {x:0, y:0, z:0};
this.distance = 0;
this.fingers = {
pinky: {x:0, y:0, z:0},
middle: {x:0, y:0, z:0},
ring: {x:0, y:0, z:0},
thumb: {x:0, y:0, z:0},
index: {x:0, y:0, z:0}
};
this.set = false;
};
var palmData = {
left: new Palm(),
right: new Palm()
};
var handJointNames = {left: "LeftHand", right: "RightHand"};
// Store which fingers are touching - if all false restate the default poses
var isTouching = {
left: {
pinky: false,
middle: false,
ring: false,
thumb: false,
index: false
}, right: {
pinky: false,
middle: false,
ring: false,
thumb: false,
index: false
}
};
// frame count for transition to default pose
var countToDefault = {
left: 0,
right: 0
};
// joint data for opened pose
var dataOpen = {
left: {
pinky:[{x: -0.0066, y:-0.0224, z:-0.2174, w:0.9758},{x: 0.0112, y:0.0001, z:0.0093, w:0.9999},{x: -0.0346, y:0.0003, z:-0.0073, w:0.9994}],
ring:[{x: -0.0029, y:-0.0094, z:-0.1413, w:0.9899},{x: 0.0112, y:0.0001, z:0.0059, w:0.9999},{x: -0.0346, y:0.0002, z:-0.006, w:0.9994}],
middle:[{x: -0.0016, y:0, z:-0.0286, w:0.9996},{x: 0.0112, y:-0.0001, z:-0.0063, w:0.9999},{x: -0.0346, y:-0.0003, z:0.0073, w:0.9994}],
index:[{x: -0.0016, y:0.0001, z:0.0199, w:0.9998},{x: 0.0112, y:0, z:0.0081, w:0.9999},{x: -0.0346, y:0.0008, z:-0.023, w:0.9991}],
thumb:[{x: 0.0354, y:0.0363, z:0.3275, w:0.9435},{x: -0.0945, y:0.0938, z:0.0995, w:0.9861},{x: -0.0952, y:0.0718, z:0.1382, w:0.9832}]
}, right: {
pinky:[{x: -0.0034, y:0.023, z:0.1051, w:0.9942},{x: 0.0106, y:-0.0001, z:-0.0091, w:0.9999},{x: -0.0346, y:-0.0003, z:0.0075, w:0.9994}],
ring:[{x: -0.0013, y:0.0097, z:0.0311, w:0.9995},{x: 0.0106, y:-0.0001, z:-0.0056, w:0.9999},{x: -0.0346, y:-0.0002, z:0.0061, w:0.9994}],
middle:[{x: -0.001, y:0, z:0.0285, w:0.9996},{x: 0.0106, y:0.0001, z:0.0062, w:0.9999},{x: -0.0346, y:0.0003, z:-0.0074, w:0.9994}],
index:[{x: -0.001, y:0, z:-0.0199, w:0.9998},{x: 0.0106, y:-0.0001, z:-0.0079, w:0.9999},{x: -0.0346, y:-0.0008, z:0.0229, w:0.9991}],
thumb:[{x: 0.0355, y:-0.0363, z:-0.3263, w:0.9439},{x: -0.0946, y:-0.0938, z:-0.0996, w:0.9861},{x: -0.0952, y:-0.0719, z:-0.1376, w:0.9833}]
}
};
var dataClose = {
left: {
pinky:[{x: 0.5878, y:-0.1735, z:-0.1123, w:0.7821},{x: 0.5704, y:0.0053, z:0.0076, w:0.8213},{x: 0.6069, y:-0.0044, z:-0.0058, w:0.7947}],
ring:[{x: 0.5761, y:-0.0989, z:-0.1025, w:0.8048},{x: 0.5332, y:0.0032, z:0.005, w:0.846},{x: 0.5773, y:-0.0035, z:-0.0049, w:0.8165}],
middle:[{x: 0.543, y:-0.0469, z:-0.0333, w:0.8378},{x: 0.5419, y:-0.0034, z:-0.0053, w:0.8404},{x: 0.5015, y:0.0037, z:0.0063, w:0.8651}],
index:[{x: 0.3051, y:-0.0156, z:-0.014, w:0.9521},{x: 0.6414, y:0.0051, z:0.0063, w:0.7671},{x: 0.5646, y:-0.013, z:-0.019, w:0.8251}],
thumb:[{x: 0.313, y:-0.0348, z:0.3192, w:0.8938},{x: 0, y:0, z:-0.37, w:0.929},{x: 0, y:0, z:-0.2604, w:0.9655}]
}, right: {
pinky:[{x: 0.5881, y:0.1728, z:0.1114, w:0.7823},{x: 0.5704, y:-0.0052, z:-0.0075, w:0.8213},{x: 0.6069, y:0.0046, z:0.006, w:0.7947}],
ring:[{x: 0.5729, y:0.1181, z:0.0898, w:0.8061},{x: 0.5332, y:-0.003, z:-0.0048, w:0.846},{x: 0.5773, y:0.0035, z:0.005, w:0.8165}],
middle:[{x: 0.543, y:0.0468, z:0.0332, w:0.8378},{x: 0.5419, y:0.0034, z:0.0052, w:0.8404},{x: 0.5047, y:-0.0037, z:-0.0064, w:0.8632}],
index:[{x: 0.306, y:-0.0076, z:-0.0584, w:0.9502},{x: 0.6409, y:-0.005, z:-0.006, w:0.7675},{x: 0.5646, y:0.0129, z:0.0189, w:0.8251}],
thumb:[{x: 0.313, y:0.0352, z:-0.3181, w:0.8942},{x: 0, y:0, z:0.3698, w:0.9291},{x: 0, y:0, z:0.2609, w:0.9654}]
}
};
// snapshot for the default pose
var dataDefault = {
left:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
set: false
},
right:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
set: false
}
};
// joint data for the current frame
var dataCurrent = {
left:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}]
},
right:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}]
}
};
// interpolated values on joint data to smooth movement
var dataDelta = {
left:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}]
},
right:{
pinky:[{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
middle: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
ring: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
thumb: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}],
index: [{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0},{x: 0, y: 0, z: 0, w: 0}]
}
};
// Acquire an updated value per hand every 5 frames when finger is touching (faster in)
var touchAnimationSteps = 5;
// Acquire an updated value per hand every 10 frames when finger is returning to default position (slower out)
var defaultAnimationSteps = 10;
// Debugging info
var showSphere = false;
var showLines = false;
// This get setup on creation
var linesCreated = false;
var sphereCreated = false;
// Register object with API Debugger
var varsToDebug = {
scriptLoaded: false,
toggleDebugSphere: function(){
showSphere = !showSphere;
if (showSphere && !sphereCreated) {
createDebugSphere();
sphereCreated = true;
}
},
toggleDebugLines: function(){
showLines = !showLines;
if (showLines && !linesCreated) {
createDebugLines();
linesCreated = true;
}
},
fingerPercent: {
left: {
pinky: 0.38,
middle: 0.38,
ring: 0.38,
thumb: 0.38,
index: 0.38
} ,
right: {
pinky: 0.38,
middle: 0.38,
ring: 0.38,
thumb: 0.38,
index: 0.38
}
},
triggerValues: {
leftTriggerValue: 0,
leftTriggerClicked: 0,
rightTriggerValue: 0,
rightTriggerClicked: 0,
leftSecondaryValue: 0,
rightSecondaryValue: 0
},
palmData: {
left: new Palm(),
right: new Palm()
},
offset: {x:0, y:0, z:0},
avatarLoaded: false
};
// Add/Subtract the joint data - per finger joint
function addVals(val1, val2, sign) {
var val = [];
if (val1.length != val2.length) return;
for (var i = 0; i < val1.length; i++) {
val.push({x: 0, y: 0, z: 0, w: 0});
val[i].x = val1[i].x + sign*val2[i].x;
val[i].y = val1[i].y + sign*val2[i].y;
val[i].z = val1[i].z + sign*val2[i].z;
val[i].w = val1[i].w + sign*val2[i].w;
}
return val;
}
// Multiply/Divide the joint data - per finger joint
function multiplyValsBy(val1, num) {
var val = [];
for (var i = 0; i < val1.length; i++) {
val.push({x: 0, y: 0, z: 0, w: 0});
val[i].x = val1[i].x * num;
val[i].y = val1[i].y * num;
val[i].z = val1[i].z * num;
val[i].w = val1[i].w * num;
}
return val;
}
// Calculate the finger lengths by adding its joint lengths
function getJointDistances(jointNamesArray) {
var result = {distances: [], totalDistance: 0};
for (var i = 1; i < jointNamesArray.length; i++) {
var index0 = MyAvatar.getJointIndex(jointNamesArray[i-1]);
var index1 = MyAvatar.getJointIndex(jointNamesArray[i]);
var pos0 = MyAvatar.getJointPosition(index0);
var pos1 = MyAvatar.getJointPosition(index1);
var distance = Vec3.distance(pos0, pos1);
result.distances.push(distance);
result.totalDistance += distance;
}
return result;
}
function dataRelativeToWorld(side, dataIn, dataOut) {
var handJoint = handJointNames[side];
var jointIndex = MyAvatar.getJointIndex(handJoint);
var worldPosHand = MyAvatar.jointToWorldPoint({x:0, y:0, z:0}, jointIndex);
dataOut.position = MyAvatar.jointToWorldPoint(dataIn.position, jointIndex);
// dataOut.perpendicular = Vec3.subtract(MyAvatar.jointToWorldPoint(dataIn.perpendicular, jointIndex), worldPosHand);
var localPerpendicular = side == "right" ? {x:0.2, y:0, z:1} : {x:-0.2, y:0, z:1};
dataOut.perpendicular = Vec3.normalize(Vec3.subtract(MyAvatar.jointToWorldPoint(localPerpendicular, jointIndex), worldPosHand));
dataOut.distance = dataIn.distance;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
dataOut.fingers[finger] = MyAvatar.jointToWorldPoint(dataIn.fingers[finger], jointIndex);
}
}
function dataRelativeToHandJoint(side, dataIn, dataOut) {
var handJoint = handJointNames[side];
var jointIndex = MyAvatar.getJointIndex(handJoint);
var worldPosHand = MyAvatar.jointToWorldPoint({x:0, y:0, z:0}, jointIndex);
dataOut.position = MyAvatar.worldToJointPoint(dataIn.position, jointIndex);
dataOut.perpendicular = MyAvatar.worldToJointPoint(Vec3.sum(worldPosHand, dataIn.perpendicular), jointIndex);
dataOut.distance = dataIn.distance;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
dataOut.fingers[finger] = MyAvatar.worldToJointPoint(dataIn.fingers[finger], jointIndex);
}
}
// Calculate the sphere that look up for entities, the center of the palm, perpendicular vector from the palm plane and origin of the the finger rays
function estimatePalmData(side) {
// Return data object
var data = new Palm();
var jointOffset = { x: 0, y: 0, z: 0 };
var upperSide = side[0].toUpperCase() + side.substring(1);
var jointIndexHand = MyAvatar.getJointIndex(upperSide + "Hand");
// Store position of the hand joint
var worldPosHand = MyAvatar.jointToWorldPoint(jointOffset, jointIndexHand);
var minusWorldPosHand = {x:-worldPosHand.x, y:-worldPosHand.y, z:-worldPosHand.z};
// Data for finger rays
var directions = {pinky: undefined, middle: undefined, ring: undefined, thumb: undefined, index: undefined};
var positions = {pinky: undefined, middle: undefined, ring: undefined, thumb: undefined, index: undefined};
var thumbLength = 0;
var weightCount = 0;
// Calculate palm center
var handJointWeight = 1;
var fingerJointWeight = 2;
var palmCenter = {x:0, y:0, z:0};
palmCenter = Vec3.sum(worldPosHand, palmCenter);
weightCount += handJointWeight;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
var jointSuffixes = 4; // Get 4 joint names with suffix numbers (0, 1, 2, 3)
var jointNames = getJointNames(side, finger, jointSuffixes);
var fingerLength = getJointDistances(jointNames).totalDistance;
var jointIndex = MyAvatar.getJointIndex(jointNames[0]);
positions[finger] = MyAvatar.jointToWorldPoint(jointOffset, jointIndex);
directions[finger] = Vec3.normalize(Vec3.sum(positions[finger], minusWorldPosHand));
data.fingers[finger] = Vec3.sum(positions[finger], Vec3.multiply(fingerLength, directions[finger]));
if (finger != "thumb") {
// finger joints have double the weight than the hand joint
// This would better position the palm estimation
palmCenter = Vec3.sum(Vec3.multiply(fingerJointWeight, positions[finger]), palmCenter);
weightCount += fingerJointWeight;
} else {
thumbLength = fingerLength;
}
}
// perpendicular change direction depending on the side
data.perpendicular = (side == "right") ?
Vec3.normalize(Vec3.cross(directions.index, directions.pinky)):
Vec3.normalize(Vec3.cross(directions.pinky, directions.index));
data.position = Vec3.multiply(1.0/weightCount, palmCenter);
if (side == "right") varsToDebug.offset = MyAvatar.worldToJointPoint(worldPosHand, jointIndexHand);
var palmDistanceMultiplier = 1.55; // 1.55 based on test/error for the sphere radius that best fits the hand
data.distance = palmDistanceMultiplier*Vec3.distance(data.position, positions.index);
// move back thumb ray origin
var thumbBackMultiplier = 0.2;
data.fingers.thumb = Vec3.sum(data.fingers.thumb, Vec3.multiply( -thumbBackMultiplier * thumbLength, data.perpendicular));
//return getDataRelativeToHandJoint(side, data);
dataRelativeToHandJoint(side, data, palmData[side]);
palmData[side].set = true;
// return palmData[side];
}
// Register GlobalDebugger for API Debugger
Script.registerValue("GlobalDebugger", varsToDebug);
// store the rays for the fingers - only for debug purposes
var fingerRays = {
left:{
pinky: undefined,
middle: undefined,
ring: undefined,
thumb: undefined,
index: undefined
},
right:{
pinky: undefined,
middle: undefined,
ring: undefined,
thumb: undefined,
index: undefined
}
};
// Create debug overlays - finger rays + palm rays + spheres
var palmRay, sphereHand;
function createDebugLines() {
for (var i = 0; i < fingerKeys.length; i++) {
fingerRays.left[fingerKeys[i]] = Overlays.addOverlay("line3d", {
color: { red: 0, green: 0, blue: 255 },
start: { x:0, y:0, z:0 },
end: { x:0, y:1, z:0 },
visible: showLines
});
fingerRays.right[fingerKeys[i]] = Overlays.addOverlay("line3d", {
color: { red: 0, green: 0, blue: 255 },
start: { x:0, y:0, z:0 },
end: { x:0, y:1, z:0 },
visible: showLines
});
}
palmRay = {
left: Overlays.addOverlay("line3d", {
color: { red: 255, green: 0, blue: 0 },
start: { x:0, y:0, z:0 },
end: { x:0, y:1, z:0 },
visible: showLines
}),
right: Overlays.addOverlay("line3d", {
color: { red: 255, green: 0, blue: 0 },
start: { x:0, y:0, z:0 },
end: { x:0, y:1, z:0 },
visible: showLines
})
};
linesCreated = true;
}
function createDebugSphere() {
sphereHand = {
right: Overlays.addOverlay("sphere", {
position: MyAvatar.position,
color: { red: 0, green: 255, blue: 0 },
scale: { x: 0.01, y: 0.01, z: 0.01 },
visible: showSphere
}),
left: Overlays.addOverlay("sphere", {
position: MyAvatar.position,
color: { red: 0, green: 255, blue: 0 },
scale: { x: 0.01, y: 0.01, z: 0.01 },
visible: showSphere
})
};
sphereCreated = true;
}
function acquireDefaultPose(side) {
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
var jointSuffixes = 3; // We need rotation of the 0, 1 and 2 joints
var names = getJointNames(side, finger, jointSuffixes);
for (var j = 0; j < names.length; j++) {
var index = MyAvatar.getJointIndex(names[j]);
var rotation = MyAvatar.getJointRotation(index);
dataDefault[side][finger][j] = dataCurrent[side][finger][j] = rotation;
}
}
dataDefault[side].set = true;
}
function updateSphereHand(side) {
var data = new Palm();
dataRelativeToWorld(side, palmData[side], data);
varsToDebug.palmData[side] = palmData[side];
var palmPoint = data.position;
var LOOKUP_DISTANCE_MULTIPLIER = 1.5;
var dist = LOOKUP_DISTANCE_MULTIPLIER*data.distance;
// Situate the debugging overlays
var checkOffset = { x: data.perpendicular.x * dist,
y: data.perpendicular.y * dist,
z: data.perpendicular.z * dist };
var spherePos = Vec3.sum(palmPoint, checkOffset);
var checkPoint = Vec3.sum(palmPoint, Vec3.multiply(2, checkOffset));
if (showLines) {
Overlays.editOverlay(palmRay[side], {
start: palmPoint,
end: checkPoint,
visible: showLines
});
for (var i = 0; i < fingerKeys.length; i++) {
Overlays.editOverlay(fingerRays[side][fingerKeys[i]], {
start: data.fingers[fingerKeys[i]],
end: checkPoint,
visible: showLines
});
}
}
if (showSphere) {
Overlays.editOverlay(sphereHand[side], {
position: spherePos,
scale: {
x: 2*dist,
y: 2*dist,
z: 2*dist
},
visible: showSphere
});
}
// Update the intersection of only one finger at a time
var finger = fingerKeys[updateFingerWithIndex];
var grabbables = Entities.findEntities(spherePos, dist);
var newFingerData = dataDefault[side][finger];
var animationSteps = defaultAnimationSteps;
if (grabbables.length > 0) {
var origin = data.fingers[finger];
var direction = Vec3.normalize(Vec3.subtract(checkPoint, origin));
var intersection = Entities.findRayIntersection({origin: origin, direction: direction}, true, grabbables, [], true, false);
var percent = 0; // Initialize
var isAbleToGrab = intersection.intersects && intersection.distance < LOOKUP_DISTANCE_MULTIPLIER*dist;
if (isAbleToGrab && !getTouching(side)) {
acquireDefaultPose(side); // take a snapshot of the default pose before touch starts
newFingerData = dataDefault[side][finger]; // assign default pose to finger data
}
// Store if this finger is touching something
isTouching[side][finger] = isAbleToGrab;
if (isAbleToGrab) {
// update the open/close percentage for this finger
var FINGER_REACT_MULTIPLIER = 2.8;
percent = intersection.distance/(FINGER_REACT_MULTIPLIER*dist);
var THUMB_FACTOR = 0.2;
var FINGER_FACTOR = 0.05;
var grabMultiplier = finger === "thumb" ? THUMB_FACTOR : FINGER_FACTOR; // Amount of grab coefficient added to the fingers - thumb is higher
percent += grabMultiplier * grabPercent[side];
// Calculate new interpolation data
var totalDistance = addVals(dataClose[side][finger], dataOpen[side][finger], -1);
newFingerData = addVals(dataOpen[side][finger], multiplyValsBy(totalDistance, percent), 1); // assign close/open ratio to finger to simulate touch
animationSteps = touchAnimationSteps;
}
varsToDebug.fingerPercent[side][finger] = percent;
}
// Calculate animation increments
dataDelta[side][finger] = multiplyValsBy(addVals(newFingerData, dataCurrent[side][finger], -1), 1.0/animationSteps);
}
// Recreate the finger joint names
function getJointNames(side, finger, count) {
var names = [];
for (var i = 1; i < count+1; i++) {
var name = side[0].toUpperCase()+side.substring(1)+"Hand"+finger[0].toUpperCase()+finger.substring(1)+(i);
names.push(name);
}
return names;
}
// Capture the controller values
var leftTriggerPress = function (value) {
varsToDebug.triggerValues.leftTriggerValue = value;
// the value for the trigger increments the hand-close percentage
grabPercent.left = value;
};
var leftTriggerClick = function (value) {
varsToDebug.triggerValues.leftTriggerClicked = value;
};
var rightTriggerPress = function (value) {
varsToDebug.triggerValues.rightTriggerValue = value;
// the value for the trigger increments the hand-close percentage
grabPercent.right = value;
};
var rightTriggerClick = function (value) {
varsToDebug.triggerValues.rightTriggerClicked = value;
};
var leftSecondaryPress = function (value) {
varsToDebug.triggerValues.leftSecondaryValue = value;
};
var rightSecondaryPress = function (value) {
varsToDebug.triggerValues.rightSecondaryValue = value;
};
var MAPPING_NAME = "com.highfidelity.handTouch";
var mapping = Controller.newMapping(MAPPING_NAME);
mapping.from([Controller.Standard.RT]).peek().to(rightTriggerPress);
mapping.from([Controller.Standard.RTClick]).peek().to(rightTriggerClick);
mapping.from([Controller.Standard.LT]).peek().to(leftTriggerPress);
mapping.from([Controller.Standard.LTClick]).peek().to(leftTriggerClick);
mapping.from([Controller.Standard.RB]).peek().to(rightSecondaryPress);
mapping.from([Controller.Standard.LB]).peek().to(leftSecondaryPress);
mapping.from([Controller.Standard.LeftGrip]).peek().to(leftSecondaryPress);
mapping.from([Controller.Standard.RightGrip]).peek().to(rightSecondaryPress);
Controller.enableMapping(MAPPING_NAME);
if (showLines && !linesCreated) {
createDebugLines();
linesCreated = true;
}
if (showSphere && !sphereCreated) {
createDebugSphere();
sphereCreated = true;
}
function getTouching(side) {
var animating = false;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
animating = animating || isTouching[side][finger];
}
return animating; // return false only if none of the fingers are touching
}
function reEstimatePalmData() {
["right", "left"].forEach(function(side){
estimatePalmData(side);
});
}
MyAvatar.onLoadComplete.connect(function () {
// Sometimes the rig is not ready when this signal is trigger
console.log("avatar loaded");
Script.setInterval(function(){
reEstimatePalmData();
}, 2000);
});
MyAvatar.sensorToWorldScaleChanged.connect(function(){
reEstimatePalmData();
});
Script.scriptEnding.connect(function () {
["right", "left"].forEach(function(side){
if (linesCreated) {
Overlays.deleteOverlay(palmRay[side]);
}
if (sphereCreated) {
Overlays.deleteOverlay(sphereHand[side]);
}
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
var jointSuffixes = 3; // We need to clear the joints 0, 1 and 2 joints
var names = getJointNames(side, finger, jointSuffixes);
for (var j = 0; j < names.length; j++) {
var index = MyAvatar.getJointIndex(names[j]);
MyAvatar.clearJointData(index);
}
if (linesCreated) {
Overlays.deleteOverlay(fingerRays[side][finger]);
}
}
});
});
Script.update.connect(function(){
// index of the finger that needs to be updated this frame
updateFingerWithIndex = (updateFingerWithIndex < fingerKeys.length-1) ? updateFingerWithIndex + 1 : 0;
["right", "left"].forEach(function(side){
if (!palmData[side].set) {
reEstimatePalmData();
}
// recalculate the base data
updateSphereHand(side);
// this vars manage the transition to default pose
var isHandTouching = getTouching(side);
countToDefault[side] = isHandTouching ? 0 : countToDefault[side] + 1;
for (var i = 0; i < fingerKeys.length; i++) {
var finger = fingerKeys[i];
var jointSuffixes = 3; // We need to update rotation of the 0, 1 and 2 joints
var names = getJointNames(side, finger, jointSuffixes);
// Add the animation increments
dataCurrent[side][finger] = addVals(dataCurrent[side][finger], dataDelta[side][finger], 1);
// update every finger joint
for (var j = 0; j < names.length; j++) {
var index = MyAvatar.getJointIndex(names[j]);
// if no finger is touching restate the default poses
if (isHandTouching || (dataDefault[side].set && countToDefault[side] < 5*touchAnimationSteps)) {
var quatRot = dataCurrent[side][finger][j];
MyAvatar.setJointRotation(index, quatRot);
} else {
MyAvatar.clearJointData(index);
}
}
}
});
});
}());

View file

@ -225,7 +225,7 @@ function adjustPositionPerBoundingBox(position, direction, registration, dimensi
var TOOLS_PATH = Script.resolvePath("assets/images/tools/");
var GRABBABLE_ENTITIES_MENU_CATEGORY = "Edit";
var GRABBABLE_ENTITIES_MENU_ITEM = "Create Entities As Grabbable";
var GRABBABLE_ENTITIES_MENU_ITEM = "Create Entities As Grabbable (except Zones, Particles, and Lights)";
var toolBar = (function () {
var EDIT_SETTING = "io.highfidelity.isEditing"; // for communication with other scripts
@ -239,6 +239,7 @@ var toolBar = (function () {
var dimensions = properties.dimensions ? properties.dimensions : DEFAULT_DIMENSIONS;
var position = getPositionToCreateEntity();
var entityID = null;
if (position !== null && position !== undefined) {
var direction;
if (Camera.mode === "entity" || Camera.mode === "independent") {
@ -278,9 +279,13 @@ var toolBar = (function () {
position = grid.snapToSurface(grid.snapToGrid(position, false, dimensions), dimensions);
properties.position = position;
if (Menu.isOptionChecked(GRABBABLE_ENTITIES_MENU_ITEM)) {
if (Menu.isOptionChecked(GRABBABLE_ENTITIES_MENU_ITEM) &&
!(properties.type === "Zone" || properties.type === "Light" || properties.type === "ParticleEffect")) {
properties.userData = JSON.stringify({ grabbableKey: { grabbable: true } });
} else {
properties.userData = JSON.stringify({ grabbableKey: { grabbable: false } });
}
entityID = Entities.addEntity(properties);
if (properties.type === "ParticleEffect") {