mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-04 06:13:09 +02:00
Merge branch 'master' into 21040
This commit is contained in:
commit
f83599f855
39 changed files with 1965 additions and 553 deletions
|
@ -1,103 +0,0 @@
|
|||
//
|
||||
// MarketplaceComboBox.qml
|
||||
//
|
||||
// Created by Elisa Lupin-Jimenez on 3 Aug 2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtWebChannel 1.0
|
||||
import QtWebEngine 1.1
|
||||
import QtWebSockets 1.0
|
||||
import "qrc:///qtwebchannel/qwebchannel.js" as WebChannel
|
||||
|
||||
import "controls"
|
||||
import "controls-uit" as Controls
|
||||
import "styles"
|
||||
import "styles-uit"
|
||||
|
||||
|
||||
Rectangle {
|
||||
HifiConstants { id: hifi }
|
||||
id: marketplaceComboBox
|
||||
anchors.fill: parent
|
||||
color: hifi.colors.baseGrayShadow
|
||||
property var currentUrl: "https://metaverse.highfidelity.com/marketplace"
|
||||
|
||||
Controls.BaseWebView {
|
||||
id: webview
|
||||
url: currentUrl
|
||||
anchors.top: switchMarketView.bottom
|
||||
width: parent.width
|
||||
height: parent.height - 40
|
||||
focus: true
|
||||
|
||||
Timer {
|
||||
id: zipTimer
|
||||
running: false
|
||||
repeat: false
|
||||
interval: 1500
|
||||
property var handler;
|
||||
onTriggered: handler();
|
||||
}
|
||||
|
||||
property var autoCancel: 'var element = $("a.btn.cancel");
|
||||
element.click();'
|
||||
|
||||
onNewViewRequested: {
|
||||
var component = Qt.createComponent("Browser.qml");
|
||||
var newWindow = component.createObject(desktop);
|
||||
request.openIn(newWindow.webView);
|
||||
if (File.isZippedFbx(desktop.currentUrl)) {
|
||||
zipTimer.handler = function() {
|
||||
newWindow.destroy();
|
||||
runJavaScript(autoCancel);
|
||||
}
|
||||
zipTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
property var simpleDownload: 'var element = $("a.download-file");
|
||||
element.removeClass("download-file");
|
||||
element.removeAttr("download");'
|
||||
|
||||
onLinkHovered: {
|
||||
desktop.currentUrl = hoveredUrl;
|
||||
// add an error message for non-fbx files
|
||||
if (File.isZippedFbx(desktop.currentUrl)) {
|
||||
runJavaScript(simpleDownload, function(){console.log("ran the JS");});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Controls.ComboBox {
|
||||
id: switchMarketView
|
||||
anchors.top: parent.top
|
||||
anchors.right: parent.right
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
width: 200
|
||||
height: 40
|
||||
visible: true
|
||||
model: ["Marketplace", "Clara.io"]
|
||||
onCurrentIndexChanged: {
|
||||
if (currentIndex === 0) { webview.url = "https://metaverse.highfidelity.com/marketplace"; }
|
||||
if (currentIndex === 1) { webview.url = "https://clara.io/library"; }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Controls.Label {
|
||||
id: switchMarketLabel
|
||||
anchors.verticalCenter: switchMarketView.verticalCenter
|
||||
anchors.right: switchMarketView.left
|
||||
color: hifi.colors.white
|
||||
text: "Explore interesting content from: "
|
||||
}
|
||||
|
||||
}
|
167
interface/resources/qml/Marketplaces.qml
Normal file
167
interface/resources/qml/Marketplaces.qml
Normal file
|
@ -0,0 +1,167 @@
|
|||
//
|
||||
// Marketplaces.qml
|
||||
//
|
||||
// Created by Elisa Lupin-Jimenez on 3 Aug 2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtWebChannel 1.0
|
||||
import QtWebEngine 1.1
|
||||
import QtWebSockets 1.0
|
||||
import "qrc:///qtwebchannel/qwebchannel.js" as WebChannel
|
||||
|
||||
import "controls"
|
||||
import "controls-uit" as Controls
|
||||
import "styles"
|
||||
import "styles-uit"
|
||||
|
||||
|
||||
Rectangle {
|
||||
HifiConstants { id: hifi }
|
||||
id: marketplace
|
||||
anchors.fill: parent
|
||||
property var marketplacesUrl: "../../scripts/system/html/marketplaces.html"
|
||||
property int statusBarHeight: 50
|
||||
property int statusMargin: 50
|
||||
property string standardMessage: "Check out other marketplaces."
|
||||
property string claraMessage: "Choose a model and click Download -> Autodesk FBX."
|
||||
property string claraError: "High Fidelity only supports Autodesk FBX models."
|
||||
|
||||
Controls.BaseWebView {
|
||||
id: webview
|
||||
url: marketplacesUrl
|
||||
anchors.top: marketplace.top
|
||||
width: parent.width
|
||||
height: parent.height - statusBarHeight
|
||||
focus: true
|
||||
|
||||
Timer {
|
||||
id: zipTimer
|
||||
running: false
|
||||
repeat: false
|
||||
interval: 1500
|
||||
property var handler;
|
||||
onTriggered: handler();
|
||||
}
|
||||
|
||||
Timer {
|
||||
id: alertTimer
|
||||
running: false
|
||||
repeat: false
|
||||
interval: 9000
|
||||
property var handler;
|
||||
onTriggered: handler();
|
||||
}
|
||||
|
||||
property var autoCancel: 'var element = $("a.btn.cancel");
|
||||
element.click();'
|
||||
|
||||
property var simpleDownload: 'var element = $("a.download-file");
|
||||
element.removeClass("download-file");
|
||||
element.removeAttr("download");'
|
||||
|
||||
function displayErrorStatus() {
|
||||
alertTimer.handler = function() {
|
||||
statusLabel.text = claraMessage;
|
||||
statusBar.color = hifi.colors.blueHighlight;
|
||||
statusIcon.text = hifi.glyphs.info;
|
||||
}
|
||||
alertTimer.start();
|
||||
}
|
||||
|
||||
property var notFbxHandler: 'var element = $("a.btn.btn-primary.viewer-button.download-file")
|
||||
element.click();'
|
||||
|
||||
// this code is for removing other file types from Clara.io's download options
|
||||
//property var checkFileType: "$('[data-extension]:not([data-extension=\"fbx\"])').parent().remove()"
|
||||
|
||||
onLinkHovered: {
|
||||
desktop.currentUrl = hoveredUrl;
|
||||
//runJavaScript(checkFileType, function(){console.log("Remove filetypes JS injection");});
|
||||
if (File.isZippedFbx(desktop.currentUrl)) {
|
||||
runJavaScript(simpleDownload, function(){console.log("Download JS injection");});
|
||||
return;
|
||||
}
|
||||
|
||||
if (File.isZipped(desktop.currentUrl)) {
|
||||
statusLabel.text = claraError;
|
||||
statusBar.color = hifi.colors.redHighlight;
|
||||
statusIcon.text = hifi.glyphs.alert;
|
||||
runJavaScript(notFbxHandler, displayErrorStatus());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
onLoadingChanged: {
|
||||
if (File.isClaraLink(webview.url)) {
|
||||
statusLabel.text = claraMessage;
|
||||
} else {
|
||||
statusLabel.text = standardMessage;
|
||||
}
|
||||
statusBar.color = hifi.colors.blueHighlight;
|
||||
statusIcon.text = hifi.glyphs.info;
|
||||
}
|
||||
|
||||
onNewViewRequested: {
|
||||
var component = Qt.createComponent("Browser.qml");
|
||||
var newWindow = component.createObject(desktop);
|
||||
request.openIn(newWindow.webView);
|
||||
if (File.isZippedFbx(desktop.currentUrl)) {
|
||||
runJavaScript(autoCancel);
|
||||
zipTimer.handler = function() {
|
||||
newWindow.destroy();
|
||||
}
|
||||
zipTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: statusBar
|
||||
anchors.top: webview.bottom
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.left: parent.left
|
||||
anchors.right: parent.right
|
||||
color: hifi.colors.blueHighlight
|
||||
|
||||
Controls.Button {
|
||||
id: switchMarketView
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: statusMargin
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
width: 150
|
||||
text: "See all markets"
|
||||
onClicked: {
|
||||
webview.url = "../../scripts/system/html/marketplaces.html";
|
||||
statusLabel.text = standardMessage;
|
||||
}
|
||||
}
|
||||
|
||||
Controls.Label {
|
||||
id: statusLabel
|
||||
anchors.verticalCenter: switchMarketView.verticalCenter
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: statusMargin
|
||||
color: hifi.colors.white
|
||||
text: standardMessage
|
||||
size: 18
|
||||
}
|
||||
|
||||
HiFiGlyphs {
|
||||
id: statusIcon
|
||||
anchors.right: statusLabel.left
|
||||
anchors.verticalCenter: statusLabel.verticalCenter
|
||||
text: hifi.glyphs.info
|
||||
color: hifi.colors.white
|
||||
size: hifi.fontSizes.tableHeadingIcon
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -176,8 +176,6 @@ static const int MAX_CONCURRENT_RESOURCE_DOWNLOADS = 16;
|
|||
// For processing on QThreadPool, target 2 less than the ideal number of threads, leaving
|
||||
// 2 logical cores available for time sensitive tasks.
|
||||
static const int MIN_PROCESSING_THREAD_POOL_SIZE = 2;
|
||||
static const int PROCESSING_THREAD_POOL_SIZE = std::max(MIN_PROCESSING_THREAD_POOL_SIZE,
|
||||
QThread::idealThreadCount() - 2);
|
||||
|
||||
static const QString SNAPSHOT_EXTENSION = ".jpg";
|
||||
static const QString SVO_EXTENSION = ".svo";
|
||||
|
@ -537,7 +535,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
PluginContainer* pluginContainer = dynamic_cast<PluginContainer*>(this); // set the container for any plugins that care
|
||||
PluginManager::getInstance()->setContainer(pluginContainer);
|
||||
|
||||
QThreadPool::globalInstance()->setMaxThreadCount(PROCESSING_THREAD_POOL_SIZE);
|
||||
QThreadPool::globalInstance()->setMaxThreadCount(MIN_PROCESSING_THREAD_POOL_SIZE);
|
||||
thread()->setPriority(QThread::HighPriority);
|
||||
thread()->setObjectName("Main Thread");
|
||||
|
||||
|
@ -707,6 +705,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
connect(addressManager.data(), &AddressManager::hostChanged, this, &Application::updateWindowTitle);
|
||||
connect(this, &QCoreApplication::aboutToQuit, addressManager.data(), &AddressManager::storeCurrentAddress);
|
||||
|
||||
connect(this, &Application::activeDisplayPluginChanged, this, &Application::updateThreadPoolCount);
|
||||
|
||||
// Save avatar location immediately after a teleport.
|
||||
connect(getMyAvatar(), &MyAvatar::positionGoneTo,
|
||||
DependencyManager::get<AddressManager>().data(), &AddressManager::storeCurrentAddress);
|
||||
|
@ -5727,3 +5727,18 @@ void Application::sendHoverLeaveEntity(QUuid id, PointerEvent event) {
|
|||
EntityItemID entityItemID(id);
|
||||
emit getEntities()->hoverLeaveEntity(entityItemID, event);
|
||||
}
|
||||
|
||||
// FIXME? perhaps two, one for the main thread and one for the offscreen UI rendering thread?
|
||||
static const int UI_RESERVED_THREADS = 1;
|
||||
// Windows won't let you have all the cores
|
||||
static const int OS_RESERVED_THREADS = 1;
|
||||
|
||||
void Application::updateThreadPoolCount() const {
|
||||
auto reservedThreads = UI_RESERVED_THREADS + OS_RESERVED_THREADS + _displayPlugin->getRequiredThreadCount();
|
||||
auto availableThreads = QThread::idealThreadCount() - reservedThreads;
|
||||
auto threadPoolSize = std::max(MIN_PROCESSING_THREAD_POOL_SIZE, availableThreads);
|
||||
qDebug() << "Ideal Thread Count " << QThread::idealThreadCount();
|
||||
qDebug() << "Reserved threads " << reservedThreads;
|
||||
qDebug() << "Setting thread pool size to " << threadPoolSize;
|
||||
QThreadPool::globalInstance()->setMaxThreadCount(threadPoolSize);
|
||||
}
|
|
@ -286,6 +286,7 @@ public slots:
|
|||
bool exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs, const glm::vec3* givenOffset = nullptr);
|
||||
bool exportEntities(const QString& filename, float x, float y, float z, float scale);
|
||||
bool importEntities(const QString& url);
|
||||
void updateThreadPoolCount() const;
|
||||
|
||||
static void setLowVelocityFilter(bool lowVelocityFilter);
|
||||
Q_INVOKABLE void loadDialog();
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include <avatar/AvatarActionHold.h>
|
||||
#include <ObjectActionOffset.h>
|
||||
#include <ObjectActionSpring.h>
|
||||
#include <ObjectActionTravelOriented.h>
|
||||
#include <LogHandler.h>
|
||||
|
||||
#include "InterfaceActionFactory.h"
|
||||
|
@ -29,6 +30,8 @@ EntityActionPointer interfaceActionFactory(EntityActionType type, const QUuid& i
|
|||
return std::make_shared<ObjectActionSpring>(id, ownerEntity);
|
||||
case ACTION_TYPE_HOLD:
|
||||
return std::make_shared<AvatarActionHold>(id, ownerEntity);
|
||||
case ACTION_TYPE_TRAVEL_ORIENTED:
|
||||
return std::make_shared<ObjectActionTravelOriented>(id, ownerEntity);
|
||||
}
|
||||
|
||||
Q_ASSERT_X(false, Q_FUNC_INFO, "Unknown entity action type");
|
||||
|
|
|
@ -59,8 +59,6 @@ const float DISPLAYNAME_ALPHA = 1.0f;
|
|||
const float DISPLAYNAME_BACKGROUND_ALPHA = 0.4f;
|
||||
const glm::vec3 HAND_TO_PALM_OFFSET(0.0f, 0.12f, 0.08f);
|
||||
|
||||
const int SENSOR_TO_WORLD_MATRIX_INDEX = 65534;
|
||||
|
||||
namespace render {
|
||||
template <> const ItemKey payloadGetKey(const AvatarSharedPointer& avatar) {
|
||||
return ItemKey::Builder::opaqueShape();
|
||||
|
@ -853,32 +851,54 @@ glm::vec3 Avatar::getDefaultJointTranslation(int index) const {
|
|||
}
|
||||
|
||||
glm::quat Avatar::getAbsoluteJointRotationInObjectFrame(int index) const {
|
||||
if (index == SENSOR_TO_WORLD_MATRIX_INDEX) {
|
||||
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
|
||||
bool success;
|
||||
Transform avatarTransform;
|
||||
Transform::mult(avatarTransform, getParentTransform(success), getLocalTransform());
|
||||
glm::mat4 invAvatarMat = avatarTransform.getInverseMatrix();
|
||||
return glmExtractRotation(invAvatarMat * sensorToWorldMatrix);
|
||||
} else {
|
||||
glm::quat rotation;
|
||||
_skeletonModel->getAbsoluteJointRotationInRigFrame(index, rotation);
|
||||
return Quaternions::Y_180 * rotation;
|
||||
switch(index) {
|
||||
case SENSOR_TO_WORLD_MATRIX_INDEX: {
|
||||
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
|
||||
bool success;
|
||||
Transform avatarTransform;
|
||||
Transform::mult(avatarTransform, getParentTransform(success), getLocalTransform());
|
||||
glm::mat4 invAvatarMat = avatarTransform.getInverseMatrix();
|
||||
return glmExtractRotation(invAvatarMat * sensorToWorldMatrix);
|
||||
}
|
||||
case CONTROLLER_LEFTHAND_INDEX: {
|
||||
Transform controllerLeftHandTransform = Transform(getControllerLeftHandMatrix());
|
||||
return controllerLeftHandTransform.getRotation();
|
||||
}
|
||||
case CONTROLLER_RIGHTHAND_INDEX: {
|
||||
Transform controllerRightHandTransform = Transform(getControllerRightHandMatrix());
|
||||
return controllerRightHandTransform.getRotation();
|
||||
}
|
||||
default: {
|
||||
glm::quat rotation;
|
||||
_skeletonModel->getAbsoluteJointRotationInRigFrame(index, rotation);
|
||||
return Quaternions::Y_180 * rotation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec3 Avatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
|
||||
if (index == SENSOR_TO_WORLD_MATRIX_INDEX) {
|
||||
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
|
||||
bool success;
|
||||
Transform avatarTransform;
|
||||
Transform::mult(avatarTransform, getParentTransform(success), getLocalTransform());
|
||||
glm::mat4 invAvatarMat = avatarTransform.getInverseMatrix();
|
||||
return extractTranslation(invAvatarMat * sensorToWorldMatrix);
|
||||
} else {
|
||||
glm::vec3 translation;
|
||||
_skeletonModel->getAbsoluteJointTranslationInRigFrame(index, translation);
|
||||
return Quaternions::Y_180 * translation;
|
||||
switch(index) {
|
||||
case SENSOR_TO_WORLD_MATRIX_INDEX: {
|
||||
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
|
||||
bool success;
|
||||
Transform avatarTransform;
|
||||
Transform::mult(avatarTransform, getParentTransform(success), getLocalTransform());
|
||||
glm::mat4 invAvatarMat = avatarTransform.getInverseMatrix();
|
||||
return extractTranslation(invAvatarMat * sensorToWorldMatrix);
|
||||
}
|
||||
case CONTROLLER_LEFTHAND_INDEX: {
|
||||
Transform controllerLeftHandTransform = Transform(getControllerLeftHandMatrix());
|
||||
return controllerLeftHandTransform.getTranslation();
|
||||
}
|
||||
case CONTROLLER_RIGHTHAND_INDEX: {
|
||||
Transform controllerRightHandTransform = Transform(getControllerRightHandMatrix());
|
||||
return controllerRightHandTransform.getTranslation();
|
||||
}
|
||||
default: {
|
||||
glm::vec3 translation;
|
||||
_skeletonModel->getAbsoluteJointTranslationInRigFrame(index, translation);
|
||||
return Quaternions::Y_180 * translation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -889,6 +909,10 @@ int Avatar::getJointIndex(const QString& name) const {
|
|||
Q_RETURN_ARG(int, result), Q_ARG(const QString&, name));
|
||||
return result;
|
||||
}
|
||||
int result = getFauxJointIndex(name);
|
||||
if (result != -1) {
|
||||
return result;
|
||||
}
|
||||
return _skeletonModel->isActive() ? _skeletonModel->getFBXGeometry().getJointIndex(name) : -1;
|
||||
}
|
||||
|
||||
|
|
|
@ -56,6 +56,10 @@ void AvatarActionHold::prepareForPhysicsSimulation() {
|
|||
}
|
||||
|
||||
withWriteLock([&]{
|
||||
glm::vec3 avatarRigidBodyPosition;
|
||||
glm::quat avatarRigidBodyRotation;
|
||||
getAvatarRigidBodyLocation(avatarRigidBodyPosition, avatarRigidBodyRotation);
|
||||
|
||||
if (_ignoreIK) {
|
||||
return;
|
||||
}
|
||||
|
@ -70,9 +74,6 @@ void AvatarActionHold::prepareForPhysicsSimulation() {
|
|||
palmRotation = holdingAvatar->getUncachedLeftPalmRotation();
|
||||
}
|
||||
|
||||
glm::vec3 avatarRigidBodyPosition;
|
||||
glm::quat avatarRigidBodyRotation;
|
||||
getAvatarRigidBodyLocation(avatarRigidBodyPosition, avatarRigidBodyRotation);
|
||||
|
||||
// determine the difference in translation and rotation between the avatar's
|
||||
// rigid body and the palm position. The avatar's rigid body will be moved by bullet
|
||||
|
@ -124,13 +125,20 @@ bool AvatarActionHold::getTarget(float deltaTimeStep, glm::quat& rotation, glm::
|
|||
if (pose.isValid()) {
|
||||
linearVelocity = pose.getVelocity();
|
||||
angularVelocity = pose.getAngularVelocity();
|
||||
|
||||
if (isRightHand) {
|
||||
pose = avatarManager->getMyAvatar()->getRightHandControllerPoseInAvatarFrame();
|
||||
} else {
|
||||
pose = avatarManager->getMyAvatar()->getLeftHandControllerPoseInAvatarFrame();
|
||||
}
|
||||
}
|
||||
|
||||
if (_ignoreIK && pose.isValid()) {
|
||||
// We cannot ignore other avatars IK and this is not the point of this option
|
||||
// This is meant to make the grabbing behavior more reactive.
|
||||
palmPosition = pose.getTranslation();
|
||||
palmRotation = pose.getRotation();
|
||||
Transform avatarTransform;
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
avatarTransform = myAvatar->getTransform();
|
||||
palmPosition = avatarTransform.transform(pose.getTranslation() / myAvatar->getTargetScale());
|
||||
palmRotation = avatarTransform.getRotation() * pose.getRotation();
|
||||
} else {
|
||||
glm::vec3 avatarRigidBodyPosition;
|
||||
glm::quat avatarRigidBodyRotation;
|
||||
|
@ -159,11 +167,17 @@ bool AvatarActionHold::getTarget(float deltaTimeStep, glm::quat& rotation, glm::
|
|||
}
|
||||
} else { // regular avatar
|
||||
if (isRightHand) {
|
||||
palmPosition = holdingAvatar->getRightPalmPosition();
|
||||
palmRotation = holdingAvatar->getRightPalmRotation();
|
||||
Transform controllerRightTransform = Transform(holdingAvatar->getControllerRightHandMatrix());
|
||||
Transform avatarTransform = holdingAvatar->getTransform();
|
||||
palmRotation = avatarTransform.getRotation() * controllerRightTransform.getRotation();
|
||||
palmPosition = avatarTransform.getTranslation() +
|
||||
(avatarTransform.getRotation() * controllerRightTransform.getTranslation());
|
||||
} else {
|
||||
palmPosition = holdingAvatar->getLeftPalmPosition();
|
||||
palmRotation = holdingAvatar->getLeftPalmRotation();
|
||||
Transform controllerLeftTransform = Transform(holdingAvatar->getControllerLeftHandMatrix());
|
||||
Transform avatarTransform = holdingAvatar->getTransform();
|
||||
palmRotation = avatarTransform.getRotation() * controllerLeftTransform.getRotation();
|
||||
palmPosition = avatarTransform.getTranslation() +
|
||||
(avatarTransform.getRotation() * controllerLeftTransform.getTranslation());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -532,11 +532,21 @@ void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
|||
_hmdSensorFacing = getFacingDir2D(_hmdSensorOrientation);
|
||||
}
|
||||
|
||||
void MyAvatar::updateJointFromController(controller::Action poseKey, ThreadSafeValueCache<glm::mat4>& matrixCache) {
|
||||
assert(QThread::currentThread() == thread());
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
controller::Pose controllerPose = userInputMapper->getPoseState(poseKey);
|
||||
Transform transform;
|
||||
transform.setTranslation(controllerPose.getTranslation());
|
||||
transform.setRotation(controllerPose.getRotation());
|
||||
glm::mat4 controllerMatrix = transform.getMatrix();
|
||||
matrixCache.set(controllerMatrix);
|
||||
}
|
||||
|
||||
// best called at end of main loop, after physics.
|
||||
// update sensor to world matrix from current body position and hmd sensor.
|
||||
// This is so the correct camera can be used for rendering.
|
||||
void MyAvatar::updateSensorToWorldMatrix() {
|
||||
|
||||
// update the sensor mat so that the body position will end up in the desired
|
||||
// position when driven from the head.
|
||||
glm::mat4 desiredMat = createMatFromQuatAndPos(getOrientation(), getPosition());
|
||||
|
@ -545,10 +555,14 @@ void MyAvatar::updateSensorToWorldMatrix() {
|
|||
lateUpdatePalms();
|
||||
|
||||
if (_enableDebugDrawSensorToWorldMatrix) {
|
||||
DebugDraw::getInstance().addMarker("sensorToWorldMatrix", glmExtractRotation(_sensorToWorldMatrix), extractTranslation(_sensorToWorldMatrix), glm::vec4(1));
|
||||
DebugDraw::getInstance().addMarker("sensorToWorldMatrix", glmExtractRotation(_sensorToWorldMatrix),
|
||||
extractTranslation(_sensorToWorldMatrix), glm::vec4(1));
|
||||
}
|
||||
|
||||
_sensorToWorldMatrixCache.set(_sensorToWorldMatrix);
|
||||
|
||||
updateJointFromController(controller::Action::LEFT_HAND, _controllerLeftHandMatrixCache);
|
||||
updateJointFromController(controller::Action::RIGHT_HAND, _controllerRightHandMatrixCache);
|
||||
}
|
||||
|
||||
// Update avatar head rotation with sensor data
|
||||
|
@ -2215,3 +2229,31 @@ bool MyAvatar::didTeleport() {
|
|||
bool MyAvatar::hasDriveInput() const {
|
||||
return fabsf(_driveKeys[TRANSLATE_X]) > 0.0f || fabsf(_driveKeys[TRANSLATE_Y]) > 0.0f || fabsf(_driveKeys[TRANSLATE_Z]) > 0.0f;
|
||||
}
|
||||
|
||||
glm::quat MyAvatar::getAbsoluteJointRotationInObjectFrame(int index) const {
|
||||
switch(index) {
|
||||
case CONTROLLER_LEFTHAND_INDEX: {
|
||||
return getLeftHandControllerPoseInAvatarFrame().getRotation();
|
||||
}
|
||||
case CONTROLLER_RIGHTHAND_INDEX: {
|
||||
return getRightHandControllerPoseInAvatarFrame().getRotation();
|
||||
}
|
||||
default: {
|
||||
return Avatar::getAbsoluteJointRotationInObjectFrame(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
|
||||
switch(index) {
|
||||
case CONTROLLER_LEFTHAND_INDEX: {
|
||||
return getLeftHandControllerPoseInAvatarFrame().getTranslation();
|
||||
}
|
||||
case CONTROLLER_RIGHTHAND_INDEX: {
|
||||
return getRightHandControllerPoseInAvatarFrame().getTranslation();
|
||||
}
|
||||
default: {
|
||||
return Avatar::getAbsoluteJointTranslationInObjectFrame(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include <Sound.h>
|
||||
|
||||
#include <controllers/Pose.h>
|
||||
#include <controllers/Actions.h>
|
||||
|
||||
#include "Avatar.h"
|
||||
#include "AtRestDetector.h"
|
||||
|
@ -117,6 +118,9 @@ public:
|
|||
// as it moves through the world.
|
||||
void updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix);
|
||||
|
||||
// read the location of a hand controller and save the transform
|
||||
void updateJointFromController(controller::Action poseKey, ThreadSafeValueCache<glm::mat4>& matrixCache);
|
||||
|
||||
// best called at end of main loop, just before rendering.
|
||||
// update sensor to world matrix from current body position and hmd sensor.
|
||||
// This is so the correct camera can be used for rendering.
|
||||
|
@ -270,6 +274,9 @@ public:
|
|||
Q_INVOKABLE void setCharacterControllerEnabled(bool enabled);
|
||||
Q_INVOKABLE bool getCharacterControllerEnabled();
|
||||
|
||||
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
|
||||
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
|
||||
|
||||
public slots:
|
||||
void increaseSize();
|
||||
void decreaseSize();
|
||||
|
@ -410,9 +417,8 @@ private:
|
|||
bool _useSnapTurn { true };
|
||||
bool _clearOverlayWhenMoving { true };
|
||||
|
||||
// working copy of sensorToWorldMatrix.
|
||||
// See AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
|
||||
glm::mat4 _sensorToWorldMatrix;
|
||||
// working copies -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
|
||||
glm::mat4 _sensorToWorldMatrix { glm::mat4() };
|
||||
|
||||
// cache of the current HMD sensor position and orientation
|
||||
// in sensor space.
|
||||
|
|
|
@ -24,33 +24,34 @@
|
|||
#include "AudioRingBuffer.h"
|
||||
|
||||
static const QString RING_BUFFER_OVERFLOW_DEBUG { "AudioRingBuffer::writeData has overflown the buffer. Overwriting old data." };
|
||||
static const QString DROPPED_SILENT_DEBUG { "AudioRingBuffer::addSilentSamples dropping silent samples to prevent overflow." };
|
||||
|
||||
AudioRingBuffer::AudioRingBuffer(int numFrameSamples, bool randomAccessMode, int numFramesCapacity) :
|
||||
AudioRingBuffer::AudioRingBuffer(int numFrameSamples, int numFramesCapacity) :
|
||||
_numFrameSamples(numFrameSamples),
|
||||
_frameCapacity(numFramesCapacity),
|
||||
_sampleCapacity(numFrameSamples * numFramesCapacity),
|
||||
_bufferLength(numFrameSamples * (numFramesCapacity + 1)),
|
||||
_numFrameSamples(numFrameSamples),
|
||||
_randomAccessMode(randomAccessMode),
|
||||
_overflowCount(0)
|
||||
_bufferLength(numFrameSamples * (numFramesCapacity + 1))
|
||||
{
|
||||
if (numFrameSamples) {
|
||||
_buffer = new int16_t[_bufferLength];
|
||||
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
|
||||
_nextOutput = _buffer;
|
||||
_endOfLastWrite = _buffer;
|
||||
} else {
|
||||
_buffer = NULL;
|
||||
_nextOutput = NULL;
|
||||
_endOfLastWrite = NULL;
|
||||
}
|
||||
|
||||
static QString repeatedMessage = LogHandler::getInstance().addRepeatedMessageRegex(RING_BUFFER_OVERFLOW_DEBUG);
|
||||
static QString repeatedOverflowMessage = LogHandler::getInstance().addRepeatedMessageRegex(RING_BUFFER_OVERFLOW_DEBUG);
|
||||
static QString repeatedDroppedMessage = LogHandler::getInstance().addRepeatedMessageRegex(DROPPED_SILENT_DEBUG);
|
||||
};
|
||||
|
||||
AudioRingBuffer::~AudioRingBuffer() {
|
||||
delete[] _buffer;
|
||||
}
|
||||
|
||||
void AudioRingBuffer::clear() {
|
||||
_endOfLastWrite = _buffer;
|
||||
_nextOutput = _buffer;
|
||||
}
|
||||
|
||||
void AudioRingBuffer::reset() {
|
||||
clear();
|
||||
_overflowCount = 0;
|
||||
|
@ -58,109 +59,82 @@ void AudioRingBuffer::reset() {
|
|||
|
||||
void AudioRingBuffer::resizeForFrameSize(int numFrameSamples) {
|
||||
delete[] _buffer;
|
||||
_numFrameSamples = numFrameSamples;
|
||||
_sampleCapacity = numFrameSamples * _frameCapacity;
|
||||
_bufferLength = numFrameSamples * (_frameCapacity + 1);
|
||||
_numFrameSamples = numFrameSamples;
|
||||
_buffer = new int16_t[_bufferLength];
|
||||
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
|
||||
if (_randomAccessMode) {
|
||||
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
|
||||
}
|
||||
reset();
|
||||
}
|
||||
|
||||
void AudioRingBuffer::clear() {
|
||||
_endOfLastWrite = _buffer;
|
||||
_nextOutput = _buffer;
|
||||
if (numFrameSamples) {
|
||||
_buffer = new int16_t[_bufferLength];
|
||||
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
|
||||
} else {
|
||||
_buffer = nullptr;
|
||||
}
|
||||
|
||||
reset();
|
||||
}
|
||||
|
||||
int AudioRingBuffer::readSamples(int16_t* destination, int maxSamples) {
|
||||
return readData((char*)destination, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) {
|
||||
return writeData((char*)source, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::readData(char *data, int maxSize) {
|
||||
|
||||
// only copy up to the number of samples we have available
|
||||
int numReadSamples = std::min((int)(maxSize / sizeof(int16_t)), samplesAvailable());
|
||||
|
||||
// If we're in random access mode, then we consider our number of available read samples slightly
|
||||
// differently. Namely, if anything has been written, we say we have as many samples as they ask for
|
||||
// otherwise we say we have nothing available
|
||||
if (_randomAccessMode) {
|
||||
numReadSamples = _endOfLastWrite ? (maxSize / sizeof(int16_t)) : 0;
|
||||
}
|
||||
int maxSamples = maxSize / sizeof(int16_t);
|
||||
int numReadSamples = std::min(maxSamples, samplesAvailable());
|
||||
|
||||
if (_nextOutput + numReadSamples > _buffer + _bufferLength) {
|
||||
// we're going to need to do two reads to get this data, it wraps around the edge
|
||||
int numSamplesToEnd = (_buffer + _bufferLength) - _nextOutput;
|
||||
|
||||
// read to the end of the buffer
|
||||
int numSamplesToEnd = (_buffer + _bufferLength) - _nextOutput;
|
||||
memcpy(data, _nextOutput, numSamplesToEnd * sizeof(int16_t));
|
||||
if (_randomAccessMode) {
|
||||
memset(_nextOutput, 0, numSamplesToEnd * sizeof(int16_t)); // clear it
|
||||
}
|
||||
|
||||
// read the rest from the beginning of the buffer
|
||||
memcpy(data + (numSamplesToEnd * sizeof(int16_t)), _buffer, (numReadSamples - numSamplesToEnd) * sizeof(int16_t));
|
||||
if (_randomAccessMode) {
|
||||
memset(_buffer, 0, (numReadSamples - numSamplesToEnd) * sizeof(int16_t)); // clear it
|
||||
}
|
||||
} else {
|
||||
// read the data
|
||||
memcpy(data, _nextOutput, numReadSamples * sizeof(int16_t));
|
||||
if (_randomAccessMode) {
|
||||
memset(_nextOutput, 0, numReadSamples * sizeof(int16_t)); // clear it
|
||||
}
|
||||
}
|
||||
|
||||
// push the position of _nextOutput by the number of samples read
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numReadSamples);
|
||||
shiftReadPosition(numReadSamples);
|
||||
|
||||
return numReadSamples * sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) {
|
||||
return writeData((const char*)source, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::writeData(const char* data, int maxSize) {
|
||||
// make sure we have enough bytes left for this to be the right amount of audio
|
||||
// otherwise we should not copy that data, and leave the buffer pointers where they are
|
||||
int samplesToCopy = std::min((int)(maxSize / sizeof(int16_t)), _sampleCapacity);
|
||||
|
||||
// only copy up to the number of samples we have capacity for
|
||||
int maxSamples = maxSize / sizeof(int16_t);
|
||||
int numWriteSamples = std::min(maxSamples, _sampleCapacity);
|
||||
int samplesRoomFor = _sampleCapacity - samplesAvailable();
|
||||
if (samplesToCopy > samplesRoomFor) {
|
||||
// there's not enough room for this write. erase old data to make room for this new data
|
||||
int samplesToDelete = samplesToCopy - samplesRoomFor;
|
||||
|
||||
if (numWriteSamples > samplesRoomFor) {
|
||||
// there's not enough room for this write. erase old data to make room for this new data
|
||||
int samplesToDelete = numWriteSamples - samplesRoomFor;
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, samplesToDelete);
|
||||
_overflowCount++;
|
||||
|
||||
qCDebug(audio) << qPrintable(RING_BUFFER_OVERFLOW_DEBUG);
|
||||
}
|
||||
|
||||
if (_endOfLastWrite + samplesToCopy <= _buffer + _bufferLength) {
|
||||
memcpy(_endOfLastWrite, data, samplesToCopy * sizeof(int16_t));
|
||||
} else {
|
||||
if (_endOfLastWrite + numWriteSamples > _buffer + _bufferLength) {
|
||||
// we're going to need to do two writes to set this data, it wraps around the edge
|
||||
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
|
||||
|
||||
// write to the end of the buffer
|
||||
memcpy(_endOfLastWrite, data, numSamplesToEnd * sizeof(int16_t));
|
||||
memcpy(_buffer, data + (numSamplesToEnd * sizeof(int16_t)), (samplesToCopy - numSamplesToEnd) * sizeof(int16_t));
|
||||
|
||||
// write the rest to the beginning of the buffer
|
||||
memcpy(_buffer, data + (numSamplesToEnd * sizeof(int16_t)), (numWriteSamples - numSamplesToEnd) * sizeof(int16_t));
|
||||
} else {
|
||||
memcpy(_endOfLastWrite, data, numWriteSamples * sizeof(int16_t));
|
||||
}
|
||||
|
||||
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, samplesToCopy);
|
||||
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, numWriteSamples);
|
||||
|
||||
return samplesToCopy * sizeof(int16_t);
|
||||
}
|
||||
|
||||
int16_t& AudioRingBuffer::operator[](const int index) {
|
||||
return *shiftedPositionAccomodatingWrap(_nextOutput, index);
|
||||
}
|
||||
|
||||
const int16_t& AudioRingBuffer::operator[] (const int index) const {
|
||||
return *shiftedPositionAccomodatingWrap(_nextOutput, index);
|
||||
}
|
||||
|
||||
void AudioRingBuffer::shiftReadPosition(unsigned int numSamples) {
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples);
|
||||
return numWriteSamples * sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::samplesAvailable() const {
|
||||
|
@ -176,35 +150,31 @@ int AudioRingBuffer::samplesAvailable() const {
|
|||
}
|
||||
|
||||
int AudioRingBuffer::addSilentSamples(int silentSamples) {
|
||||
|
||||
// NOTE: This implementation is nearly identical to writeData save for s/memcpy/memset, refer to comments there
|
||||
int numWriteSamples = std::min(silentSamples, _sampleCapacity);
|
||||
int samplesRoomFor = _sampleCapacity - samplesAvailable();
|
||||
if (silentSamples > samplesRoomFor) {
|
||||
// there's not enough room for this write. write as many silent samples as we have room for
|
||||
silentSamples = samplesRoomFor;
|
||||
|
||||
static const QString DROPPED_SILENT_DEBUG {
|
||||
"AudioRingBuffer::addSilentSamples dropping silent samples to prevent overflow."
|
||||
};
|
||||
static QString repeatedMessage = LogHandler::getInstance().addRepeatedMessageRegex(DROPPED_SILENT_DEBUG);
|
||||
if (numWriteSamples > samplesRoomFor) {
|
||||
numWriteSamples = samplesRoomFor;
|
||||
|
||||
qCDebug(audio) << qPrintable(DROPPED_SILENT_DEBUG);
|
||||
}
|
||||
|
||||
// memset zeroes into the buffer, accomodate a wrap around the end
|
||||
// push the _endOfLastWrite to the correct spot
|
||||
if (_endOfLastWrite + silentSamples <= _buffer + _bufferLength) {
|
||||
memset(_endOfLastWrite, 0, silentSamples * sizeof(int16_t));
|
||||
} else {
|
||||
if (_endOfLastWrite + numWriteSamples > _buffer + _bufferLength) {
|
||||
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
|
||||
memset(_endOfLastWrite, 0, numSamplesToEnd * sizeof(int16_t));
|
||||
memset(_buffer, 0, (silentSamples - numSamplesToEnd) * sizeof(int16_t));
|
||||
memset(_buffer, 0, (numWriteSamples - numSamplesToEnd) * sizeof(int16_t));
|
||||
} else {
|
||||
memset(_endOfLastWrite, 0, numWriteSamples * sizeof(int16_t));
|
||||
}
|
||||
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, silentSamples);
|
||||
|
||||
return silentSamples;
|
||||
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, numWriteSamples);
|
||||
|
||||
return numWriteSamples;
|
||||
}
|
||||
|
||||
int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const {
|
||||
|
||||
// NOTE: It is possible to shift out-of-bounds if (|numSamplesShift| > 2 * _bufferLength), but this should not occur
|
||||
if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _bufferLength) {
|
||||
// this shift will wrap the position around to the beginning of the ring
|
||||
return position + numSamplesShift - _bufferLength;
|
||||
|
@ -217,13 +187,15 @@ int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int
|
|||
}
|
||||
|
||||
float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const {
|
||||
// FIXME: This is a bad measure of loudness - normal estimation uses sqrt(sum(x*x))
|
||||
float loudness = 0.0f;
|
||||
const int16_t* sampleAt = frameStart;
|
||||
const int16_t* _bufferLastAt = _buffer + _bufferLength - 1;
|
||||
const int16_t* bufferLastAt = _buffer + _bufferLength - 1;
|
||||
|
||||
for (int i = 0; i < _numFrameSamples; ++i) {
|
||||
loudness += (float) std::abs(*sampleAt);
|
||||
sampleAt = sampleAt == _bufferLastAt ? _buffer : sampleAt + 1;
|
||||
// wrap if necessary
|
||||
sampleAt = sampleAt == bufferLastAt ? _buffer : sampleAt + 1;
|
||||
}
|
||||
loudness /= _numFrameSamples;
|
||||
loudness /= AudioConstants::MAX_SAMPLE_VALUE;
|
||||
|
@ -238,10 +210,6 @@ float AudioRingBuffer::getFrameLoudness(ConstIterator frameStart) const {
|
|||
return getFrameLoudness(&(*frameStart));
|
||||
}
|
||||
|
||||
float AudioRingBuffer::getNextOutputFrameLoudness() const {
|
||||
return getFrameLoudness(_nextOutput);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::writeSamples(ConstIterator source, int maxSamples) {
|
||||
int samplesToCopy = std::min(maxSamples, _sampleCapacity);
|
||||
int samplesRoomFor = _sampleCapacity - samplesAvailable();
|
||||
|
|
|
@ -23,73 +23,69 @@ const int DEFAULT_RING_BUFFER_FRAME_CAPACITY = 10;
|
|||
|
||||
class AudioRingBuffer {
|
||||
public:
|
||||
AudioRingBuffer(int numFrameSamples, bool randomAccessMode = false, int numFramesCapacity = DEFAULT_RING_BUFFER_FRAME_CAPACITY);
|
||||
AudioRingBuffer(int numFrameSamples, int numFramesCapacity = DEFAULT_RING_BUFFER_FRAME_CAPACITY);
|
||||
~AudioRingBuffer();
|
||||
|
||||
void reset();
|
||||
void resizeForFrameSize(int numFrameSamples);
|
||||
// disallow copying
|
||||
AudioRingBuffer(const AudioRingBuffer&) = delete;
|
||||
AudioRingBuffer(AudioRingBuffer&&) = delete;
|
||||
AudioRingBuffer& operator=(const AudioRingBuffer&) = delete;
|
||||
|
||||
/// Invalidate any data in the buffer
|
||||
void clear();
|
||||
|
||||
int getSampleCapacity() const { return _sampleCapacity; }
|
||||
int getFrameCapacity() const { return _frameCapacity; }
|
||||
/// Clear and reset the overflow count
|
||||
void reset();
|
||||
|
||||
/// Resize frame size (causes a reset())
|
||||
// FIXME: discards any data in the buffer
|
||||
void resizeForFrameSize(int numFrameSamples);
|
||||
|
||||
/// Read up to maxSamples into destination (will only read up to samplesAvailable())
|
||||
/// Returns number of read samples
|
||||
int readSamples(int16_t* destination, int maxSamples);
|
||||
|
||||
/// Write up to maxSamples from source (will only write up to sample capacity)
|
||||
/// Returns number of written samples
|
||||
int writeSamples(const int16_t* source, int maxSamples);
|
||||
|
||||
int readData(char* data, int maxSize);
|
||||
int writeData(const char* data, int maxSize);
|
||||
/// Write up to maxSamples silent samples (will only write until other data exists in the buffer)
|
||||
/// This method will not overwrite existing data in the buffer, instead dropping silent samples that would overflow
|
||||
/// Returns number of written silent samples
|
||||
int addSilentSamples(int maxSamples);
|
||||
|
||||
int16_t& operator[](const int index);
|
||||
const int16_t& operator[] (const int index) const;
|
||||
/// Read up to maxSize into destination
|
||||
/// Returns number of read bytes
|
||||
int readData(char* destination, int maxSize);
|
||||
|
||||
void shiftReadPosition(unsigned int numSamples);
|
||||
/// Write up to maxSize from source
|
||||
/// Returns number of written bytes
|
||||
int writeData(const char* source, int maxSize);
|
||||
|
||||
float getNextOutputFrameLoudness() const;
|
||||
/// Returns a reference to the index-th sample offset from the current read sample
|
||||
int16_t& operator[](const int index) { return *shiftedPositionAccomodatingWrap(_nextOutput, index); }
|
||||
const int16_t& operator[] (const int index) const { return *shiftedPositionAccomodatingWrap(_nextOutput, index); }
|
||||
|
||||
/// Essentially discards the next numSamples from the ring buffer
|
||||
/// NOTE: This is not checked - it is possible to shift past written data
|
||||
/// Use samplesAvailable() to see the distance a valid shift can go
|
||||
void shiftReadPosition(unsigned int numSamples) { _nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples); }
|
||||
|
||||
int samplesAvailable() const;
|
||||
int framesAvailable() const { return (_numFrameSamples == 0) ? 0 : samplesAvailable() / _numFrameSamples; }
|
||||
float getNextOutputFrameLoudness() const { return getFrameLoudness(_nextOutput); }
|
||||
|
||||
|
||||
int getNumFrameSamples() const { return _numFrameSamples; }
|
||||
int getFrameCapacity() const { return _frameCapacity; }
|
||||
int getSampleCapacity() const { return _sampleCapacity; }
|
||||
/// Return times the ring buffer has overwritten old data
|
||||
int getOverflowCount() const { return _overflowCount; }
|
||||
|
||||
int getOverflowCount() const { return _overflowCount; } /// how many times has the ring buffer has overwritten old data
|
||||
|
||||
int addSilentSamples(int samples);
|
||||
|
||||
private:
|
||||
float getFrameLoudness(const int16_t* frameStart) const;
|
||||
|
||||
protected:
|
||||
// disallow copying of AudioRingBuffer objects
|
||||
AudioRingBuffer(const AudioRingBuffer&);
|
||||
AudioRingBuffer& operator= (const AudioRingBuffer&);
|
||||
|
||||
int16_t* shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const;
|
||||
|
||||
int _frameCapacity;
|
||||
int _sampleCapacity;
|
||||
int _bufferLength; // actual length of _buffer: will be one frame larger than _sampleCapacity
|
||||
int _numFrameSamples;
|
||||
int16_t* _nextOutput;
|
||||
int16_t* _endOfLastWrite;
|
||||
int16_t* _buffer;
|
||||
bool _randomAccessMode; /// will this ringbuffer be used for random access? if so, do some special processing
|
||||
|
||||
int _overflowCount; /// how many times has the ring buffer has overwritten old data
|
||||
|
||||
public:
|
||||
class ConstIterator { //public std::iterator < std::forward_iterator_tag, int16_t > {
|
||||
class ConstIterator {
|
||||
public:
|
||||
ConstIterator()
|
||||
: _bufferLength(0),
|
||||
_bufferFirst(NULL),
|
||||
_bufferLast(NULL),
|
||||
_at(NULL) {}
|
||||
ConstIterator(int16_t* bufferFirst, int capacity, int16_t* at)
|
||||
: _bufferLength(capacity),
|
||||
_bufferFirst(bufferFirst),
|
||||
_bufferLast(bufferFirst + capacity - 1),
|
||||
_at(at) {}
|
||||
ConstIterator();
|
||||
ConstIterator(int16_t* bufferFirst, int capacity, int16_t* at);
|
||||
ConstIterator(const ConstIterator& rhs) = default;
|
||||
|
||||
bool isNull() const { return _at == NULL; }
|
||||
|
@ -98,95 +94,143 @@ public:
|
|||
bool operator!=(const ConstIterator& rhs) { return _at != rhs._at; }
|
||||
const int16_t& operator*() { return *_at; }
|
||||
|
||||
ConstIterator& operator=(const ConstIterator& rhs) {
|
||||
_bufferLength = rhs._bufferLength;
|
||||
_bufferFirst = rhs._bufferFirst;
|
||||
_bufferLast = rhs._bufferLast;
|
||||
_at = rhs._at;
|
||||
return *this;
|
||||
}
|
||||
ConstIterator& operator=(const ConstIterator& rhs);
|
||||
ConstIterator& operator++();
|
||||
ConstIterator operator++(int);
|
||||
ConstIterator& operator--();
|
||||
ConstIterator operator--(int);
|
||||
const int16_t& operator[] (int i);
|
||||
ConstIterator operator+(int i);
|
||||
ConstIterator operator-(int i);
|
||||
|
||||
ConstIterator& operator++() {
|
||||
_at = (_at == _bufferLast) ? _bufferFirst : _at + 1;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ConstIterator operator++(int) {
|
||||
ConstIterator tmp(*this);
|
||||
++(*this);
|
||||
return tmp;
|
||||
}
|
||||
|
||||
ConstIterator& operator--() {
|
||||
_at = (_at == _bufferFirst) ? _bufferLast : _at - 1;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ConstIterator operator--(int) {
|
||||
ConstIterator tmp(*this);
|
||||
--(*this);
|
||||
return tmp;
|
||||
}
|
||||
|
||||
const int16_t& operator[] (int i) {
|
||||
return *atShiftedBy(i);
|
||||
}
|
||||
|
||||
ConstIterator operator+(int i) {
|
||||
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(i));
|
||||
}
|
||||
|
||||
ConstIterator operator-(int i) {
|
||||
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(-i));
|
||||
}
|
||||
|
||||
void readSamples(int16_t* dest, int numSamples) {
|
||||
auto samplesToEnd = _bufferLast - _at + 1;
|
||||
|
||||
if (samplesToEnd >= numSamples) {
|
||||
memcpy(dest, _at, numSamples * sizeof(int16_t));
|
||||
_at += numSamples;
|
||||
} else {
|
||||
auto samplesFromStart = numSamples - samplesToEnd;
|
||||
memcpy(dest, _at, samplesToEnd * sizeof(int16_t));
|
||||
memcpy(dest + samplesToEnd, _bufferFirst, samplesFromStart * sizeof(int16_t));
|
||||
|
||||
_at = _bufferFirst + samplesFromStart;
|
||||
}
|
||||
}
|
||||
|
||||
void readSamplesWithFade(int16_t* dest, int numSamples, float fade) {
|
||||
int16_t* at = _at;
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
*dest = (float)*at * fade;
|
||||
++dest;
|
||||
at = (at == _bufferLast) ? _bufferFirst : at + 1;
|
||||
}
|
||||
}
|
||||
void readSamples(int16_t* dest, int numSamples);
|
||||
void readSamplesWithFade(int16_t* dest, int numSamples, float fade);
|
||||
|
||||
private:
|
||||
int16_t* atShiftedBy(int i) {
|
||||
i = (_at - _bufferFirst + i) % _bufferLength;
|
||||
if (i < 0) {
|
||||
i += _bufferLength;
|
||||
}
|
||||
return _bufferFirst + i;
|
||||
}
|
||||
int16_t* atShiftedBy(int i);
|
||||
|
||||
private:
|
||||
int _bufferLength;
|
||||
int16_t* _bufferFirst;
|
||||
int16_t* _bufferLast;
|
||||
int16_t* _at;
|
||||
};
|
||||
|
||||
ConstIterator nextOutput() const { return ConstIterator(_buffer, _bufferLength, _nextOutput); }
|
||||
ConstIterator lastFrameWritten() const { return ConstIterator(_buffer, _bufferLength, _endOfLastWrite) - _numFrameSamples; }
|
||||
|
||||
float getFrameLoudness(ConstIterator frameStart) const;
|
||||
ConstIterator nextOutput() const;
|
||||
ConstIterator lastFrameWritten() const;
|
||||
|
||||
int writeSamples(ConstIterator source, int maxSamples);
|
||||
int writeSamplesWithFade(ConstIterator source, int maxSamples, float fade);
|
||||
|
||||
float getFrameLoudness(ConstIterator frameStart) const;
|
||||
|
||||
protected:
|
||||
int16_t* shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const;
|
||||
float getFrameLoudness(const int16_t* frameStart) const;
|
||||
|
||||
int _numFrameSamples;
|
||||
int _frameCapacity;
|
||||
int _sampleCapacity;
|
||||
int _bufferLength; // actual _buffer length (_sampleCapacity + 1)
|
||||
int _overflowCount{ 0 }; // times the ring buffer has overwritten data
|
||||
|
||||
int16_t* _nextOutput{ nullptr };
|
||||
int16_t* _endOfLastWrite{ nullptr };
|
||||
int16_t* _buffer{ nullptr };
|
||||
};
|
||||
|
||||
// inline the iterator:
|
||||
inline AudioRingBuffer::ConstIterator::ConstIterator() :
|
||||
_bufferLength(0),
|
||||
_bufferFirst(NULL),
|
||||
_bufferLast(NULL),
|
||||
_at(NULL) {}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator::ConstIterator(int16_t* bufferFirst, int capacity, int16_t* at) :
|
||||
_bufferLength(capacity),
|
||||
_bufferFirst(bufferFirst),
|
||||
_bufferLast(bufferFirst + capacity - 1),
|
||||
_at(at) {}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator& AudioRingBuffer::ConstIterator::operator=(const ConstIterator& rhs) {
|
||||
_bufferLength = rhs._bufferLength;
|
||||
_bufferFirst = rhs._bufferFirst;
|
||||
_bufferLast = rhs._bufferLast;
|
||||
_at = rhs._at;
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator& AudioRingBuffer::ConstIterator::operator++() {
|
||||
_at = (_at == _bufferLast) ? _bufferFirst : _at + 1;
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator AudioRingBuffer::ConstIterator::operator++(int) {
|
||||
ConstIterator tmp(*this);
|
||||
++(*this);
|
||||
return tmp;
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator& AudioRingBuffer::ConstIterator::operator--() {
|
||||
_at = (_at == _bufferFirst) ? _bufferLast : _at - 1;
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator AudioRingBuffer::ConstIterator::operator--(int) {
|
||||
ConstIterator tmp(*this);
|
||||
--(*this);
|
||||
return tmp;
|
||||
}
|
||||
|
||||
inline const int16_t& AudioRingBuffer::ConstIterator::operator[] (int i) {
|
||||
return *atShiftedBy(i);
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator AudioRingBuffer::ConstIterator::operator+(int i) {
|
||||
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(i));
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator AudioRingBuffer::ConstIterator::operator-(int i) {
|
||||
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(-i));
|
||||
}
|
||||
|
||||
inline int16_t* AudioRingBuffer::ConstIterator::atShiftedBy(int i) {
|
||||
i = (_at - _bufferFirst + i) % _bufferLength;
|
||||
if (i < 0) {
|
||||
i += _bufferLength;
|
||||
}
|
||||
return _bufferFirst + i;
|
||||
}
|
||||
|
||||
inline void AudioRingBuffer::ConstIterator::readSamples(int16_t* dest, int numSamples) {
|
||||
auto samplesToEnd = _bufferLast - _at + 1;
|
||||
|
||||
if (samplesToEnd >= numSamples) {
|
||||
memcpy(dest, _at, numSamples * sizeof(int16_t));
|
||||
_at += numSamples;
|
||||
} else {
|
||||
auto samplesFromStart = numSamples - samplesToEnd;
|
||||
memcpy(dest, _at, samplesToEnd * sizeof(int16_t));
|
||||
memcpy(dest + samplesToEnd, _bufferFirst, samplesFromStart * sizeof(int16_t));
|
||||
|
||||
_at = _bufferFirst + samplesFromStart;
|
||||
}
|
||||
}
|
||||
|
||||
inline void AudioRingBuffer::ConstIterator::readSamplesWithFade(int16_t* dest, int numSamples, float fade) {
|
||||
int16_t* at = _at;
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
*dest = (float)*at * fade;
|
||||
++dest;
|
||||
at = (at == _bufferLast) ? _bufferFirst : at + 1;
|
||||
}
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator AudioRingBuffer::nextOutput() const {
|
||||
return ConstIterator(_buffer, _bufferLength, _nextOutput);
|
||||
}
|
||||
|
||||
inline AudioRingBuffer::ConstIterator AudioRingBuffer::lastFrameWritten() const {
|
||||
return ConstIterator(_buffer, _bufferLength, _endOfLastWrite) - _numFrameSamples;
|
||||
}
|
||||
|
||||
#endif // hifi_AudioRingBuffer_h
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
const int STARVE_HISTORY_CAPACITY = 50;
|
||||
|
||||
InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity, const Settings& settings) :
|
||||
_ringBuffer(numFrameSamples, false, numFramesCapacity),
|
||||
_ringBuffer(numFrameSamples, numFramesCapacity),
|
||||
_lastPopSucceeded(false),
|
||||
_lastPopOutput(),
|
||||
_dynamicJitterBuffers(settings._dynamicJitterBuffers),
|
||||
|
|
|
@ -374,6 +374,16 @@ QByteArray AvatarData::toByteArray(bool cullSmallChanges, bool sendAll) {
|
|||
}
|
||||
}
|
||||
|
||||
// faux joints
|
||||
Transform controllerLeftHandTransform = Transform(getControllerLeftHandMatrix());
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, controllerLeftHandTransform.getRotation());
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerLeftHandTransform.getTranslation(),
|
||||
TRANSLATION_COMPRESSION_RADIX);
|
||||
Transform controllerRightHandTransform = Transform(getControllerRightHandMatrix());
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, controllerRightHandTransform.getRotation());
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerRightHandTransform.getTranslation(),
|
||||
TRANSLATION_COMPRESSION_RADIX);
|
||||
|
||||
#ifdef WANT_DEBUG
|
||||
if (sendAll) {
|
||||
qDebug() << "AvatarData::toByteArray" << cullSmallChanges << sendAll
|
||||
|
@ -429,6 +439,20 @@ bool AvatarData::shouldLogError(const quint64& now) {
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
const unsigned char* unpackFauxJoint(const unsigned char* sourceBuffer, ThreadSafeValueCache<glm::mat4>& matrixCache) {
|
||||
glm::quat orientation;
|
||||
glm::vec3 position;
|
||||
Transform transform;
|
||||
sourceBuffer += unpackOrientationQuatFromSixBytes(sourceBuffer, orientation);
|
||||
sourceBuffer += unpackFloatVec3FromSignedTwoByteFixed(sourceBuffer, position, TRANSLATION_COMPRESSION_RADIX);
|
||||
transform.setTranslation(position);
|
||||
transform.setRotation(orientation);
|
||||
matrixCache.set(transform.getMatrix());
|
||||
return sourceBuffer;
|
||||
}
|
||||
|
||||
|
||||
#define PACKET_READ_CHECK(ITEM_NAME, SIZE_TO_READ) \
|
||||
if ((endPosition - sourceBuffer) < (int)SIZE_TO_READ) { \
|
||||
if (shouldLogError(now)) { \
|
||||
|
@ -655,6 +679,10 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
}
|
||||
#endif
|
||||
|
||||
// faux joints
|
||||
sourceBuffer = unpackFauxJoint(sourceBuffer, _controllerLeftHandMatrixCache);
|
||||
sourceBuffer = unpackFauxJoint(sourceBuffer, _controllerRightHandMatrixCache);
|
||||
|
||||
int numBytesRead = sourceBuffer - startPosition;
|
||||
_averageBytesReceived.updateAverage(numBytesRead);
|
||||
return numBytesRead;
|
||||
|
@ -915,7 +943,24 @@ void AvatarData::clearJointsData() {
|
|||
}
|
||||
}
|
||||
|
||||
int AvatarData::getFauxJointIndex(const QString& name) const {
|
||||
if (name == "_SENSOR_TO_WORLD_MATRIX") {
|
||||
return SENSOR_TO_WORLD_MATRIX_INDEX;
|
||||
}
|
||||
if (name == "_CONTROLLER_LEFTHAND") {
|
||||
return CONTROLLER_LEFTHAND_INDEX;
|
||||
}
|
||||
if (name == "_CONTROLLER_RIGHTHAND") {
|
||||
return CONTROLLER_RIGHTHAND_INDEX;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
int AvatarData::getJointIndex(const QString& name) const {
|
||||
int result = getFauxJointIndex(name);
|
||||
if (result != -1) {
|
||||
return result;
|
||||
}
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
return _jointIndices.value(name) - 1;
|
||||
}
|
||||
|
@ -1743,6 +1788,17 @@ glm::mat4 AvatarData::getSensorToWorldMatrix() const {
|
|||
return _sensorToWorldMatrixCache.get();
|
||||
}
|
||||
|
||||
// thread-safe
|
||||
glm::mat4 AvatarData::getControllerLeftHandMatrix() const {
|
||||
return _controllerLeftHandMatrixCache.get();
|
||||
}
|
||||
|
||||
// thread-safe
|
||||
glm::mat4 AvatarData::getControllerRightHandMatrix() const {
|
||||
return _controllerRightHandMatrixCache.get();
|
||||
}
|
||||
|
||||
|
||||
QScriptValue RayToAvatarIntersectionResultToScriptValue(QScriptEngine* engine, const RayToAvatarIntersectionResult& value) {
|
||||
QScriptValue obj = engine->newObject();
|
||||
obj.setProperty("intersects", value.intersects);
|
||||
|
|
|
@ -173,6 +173,8 @@ class AvatarData : public QObject, public SpatiallyNestable {
|
|||
Q_PROPERTY(QUuid sessionUUID READ getSessionUUID)
|
||||
|
||||
Q_PROPERTY(glm::mat4 sensorToWorldMatrix READ getSensorToWorldMatrix)
|
||||
Q_PROPERTY(glm::mat4 controllerLeftHandMatrix READ getControllerLeftHandMatrix)
|
||||
Q_PROPERTY(glm::mat4 controllerRightHandMatrix READ getControllerRightHandMatrix)
|
||||
|
||||
public:
|
||||
|
||||
|
@ -356,6 +358,8 @@ public:
|
|||
|
||||
// thread safe
|
||||
Q_INVOKABLE glm::mat4 getSensorToWorldMatrix() const;
|
||||
Q_INVOKABLE glm::mat4 getControllerLeftHandMatrix() const;
|
||||
Q_INVOKABLE glm::mat4 getControllerRightHandMatrix() const;
|
||||
|
||||
public slots:
|
||||
void sendAvatarDataPacket();
|
||||
|
@ -369,6 +373,8 @@ public slots:
|
|||
virtual bool setAbsoluteJointRotationInObjectFrame(int index, const glm::quat& rotation) override { return false; }
|
||||
virtual bool setAbsoluteJointTranslationInObjectFrame(int index, const glm::vec3& translation) override { return false; }
|
||||
|
||||
float getTargetScale() { return _targetScale; }
|
||||
|
||||
protected:
|
||||
glm::vec3 _handPosition;
|
||||
|
||||
|
@ -433,6 +439,10 @@ protected:
|
|||
|
||||
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
|
||||
ThreadSafeValueCache<glm::mat4> _sensorToWorldMatrixCache { glm::mat4() };
|
||||
ThreadSafeValueCache<glm::mat4> _controllerLeftHandMatrixCache { glm::mat4() };
|
||||
ThreadSafeValueCache<glm::mat4> _controllerRightHandMatrixCache { glm::mat4() };
|
||||
|
||||
int getFauxJointIndex(const QString& name) const;
|
||||
|
||||
private:
|
||||
friend void avatarStateFromFrame(const QByteArray& frameData, AvatarData* _avatar);
|
||||
|
@ -519,5 +529,10 @@ Q_DECLARE_METATYPE(RayToAvatarIntersectionResult)
|
|||
QScriptValue RayToAvatarIntersectionResultToScriptValue(QScriptEngine* engine, const RayToAvatarIntersectionResult& results);
|
||||
void RayToAvatarIntersectionResultFromScriptValue(const QScriptValue& object, RayToAvatarIntersectionResult& results);
|
||||
|
||||
// faux joint indexes (-1 means invalid)
|
||||
const int SENSOR_TO_WORLD_MATRIX_INDEX = 65534; // -2
|
||||
const int CONTROLLER_RIGHTHAND_INDEX = 65533; // -3
|
||||
const int CONTROLLER_LEFTHAND_INDEX = 65532; // -4
|
||||
|
||||
|
||||
#endif // hifi_AvatarData_h
|
||||
|
|
|
@ -325,7 +325,6 @@ QString UserInputMapper::getActionName(Action action) const {
|
|||
return QString();
|
||||
}
|
||||
|
||||
|
||||
QVector<QString> UserInputMapper::getActionNames() const {
|
||||
Locker locker(_lock);
|
||||
QVector<QString> result;
|
||||
|
@ -335,6 +334,12 @@ QVector<QString> UserInputMapper::getActionNames() const {
|
|||
return result;
|
||||
}
|
||||
|
||||
Pose UserInputMapper::getPoseState(Action action) const {
|
||||
assert(QThread::currentThread() == thread());
|
||||
return _poseStates[toInt(action)];
|
||||
}
|
||||
|
||||
|
||||
bool UserInputMapper::triggerHapticPulse(float strength, float duration, controller::Hand hand) {
|
||||
Locker locker(_lock);
|
||||
bool toReturn = false;
|
||||
|
|
|
@ -81,7 +81,7 @@ namespace controller {
|
|||
QVector<Action> getAllActions() const;
|
||||
QString getActionName(Action action) const;
|
||||
float getActionState(Action action) const { return _actionStates[toInt(action)]; }
|
||||
Pose getPoseState(Action action) const { return _poseStates[toInt(action)]; }
|
||||
Pose getPoseState(Action action) const;
|
||||
int findAction(const QString& actionName) const;
|
||||
QVector<QString> getActionNames() const;
|
||||
Input inputFromAction(Action action) const { return getActionInputs()[toInt(action)].first; }
|
||||
|
|
|
@ -69,6 +69,8 @@ public:
|
|||
virtual bool wantVsync() const { return true; }
|
||||
void setVsyncEnabled(bool vsyncEnabled) { _vsyncEnabled = vsyncEnabled; }
|
||||
bool isVsyncEnabled() const { return _vsyncEnabled; }
|
||||
// Three threads, one for rendering, one for texture transfers, one reserved for the GL driver
|
||||
int getRequiredThreadCount() const override { return 3; }
|
||||
|
||||
protected:
|
||||
friend class PresentThread;
|
||||
|
|
|
@ -527,9 +527,11 @@ void HmdDisplayPlugin::compositeExtra() {
|
|||
if (_presentHandPoses[index] == IDENTITY_MATRIX) {
|
||||
return;
|
||||
}
|
||||
const auto& points = _presentHandLaserPoints[index];
|
||||
const auto& lasers = _presentHandLasers[index];
|
||||
geometryCache->renderGlowLine(batch, points.first, points.second, lasers.color);
|
||||
const auto& laser = _presentHandLasers[index];
|
||||
if (laser.valid()) {
|
||||
const auto& points = _presentHandLaserPoints[index];
|
||||
geometryCache->renderGlowLine(batch, points.first, points.second, laser.color);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -100,6 +100,9 @@ EntityActionType EntityActionInterface::actionTypeFromString(QString actionTypeS
|
|||
if (normalizedActionTypeString == "hold") {
|
||||
return ACTION_TYPE_HOLD;
|
||||
}
|
||||
if (normalizedActionTypeString == "traveloriented") {
|
||||
return ACTION_TYPE_TRAVEL_ORIENTED;
|
||||
}
|
||||
|
||||
qDebug() << "Warning -- EntityActionInterface::actionTypeFromString got unknown action-type name" << actionTypeString;
|
||||
return ACTION_TYPE_NONE;
|
||||
|
@ -115,6 +118,8 @@ QString EntityActionInterface::actionTypeToString(EntityActionType actionType) {
|
|||
return "spring";
|
||||
case ACTION_TYPE_HOLD:
|
||||
return "hold";
|
||||
case ACTION_TYPE_TRAVEL_ORIENTED:
|
||||
return "travel-oriented";
|
||||
}
|
||||
assert(false);
|
||||
return "none";
|
||||
|
|
|
@ -28,7 +28,8 @@ enum EntityActionType {
|
|||
ACTION_TYPE_NONE = 0,
|
||||
ACTION_TYPE_OFFSET = 1000,
|
||||
ACTION_TYPE_SPRING = 2000,
|
||||
ACTION_TYPE_HOLD = 3000
|
||||
ACTION_TYPE_HOLD = 3000,
|
||||
ACTION_TYPE_TRAVEL_ORIENTED = 4000
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -205,8 +205,10 @@ void Context::create() {
|
|||
formatAttribs.push_back(24);
|
||||
formatAttribs.push_back(WGL_STENCIL_BITS_ARB);
|
||||
formatAttribs.push_back(8);
|
||||
formatAttribs.push_back(WGL_FRAMEBUFFER_SRGB_CAPABLE_ARB);
|
||||
formatAttribs.push_back(GL_TRUE);
|
||||
#ifdef NATIVE_SRGB_FRAMEBUFFER
|
||||
// formatAttribs.push_back(WGL_FRAMEBUFFER_SRGB_CAPABLE_ARB);
|
||||
// formatAttribs.push_back(GL_TRUE);
|
||||
#endif
|
||||
// terminate the list
|
||||
formatAttribs.push_back(0);
|
||||
UINT numFormats;
|
||||
|
|
|
@ -47,12 +47,12 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
|||
case PacketType::EntityAdd:
|
||||
case PacketType::EntityEdit:
|
||||
case PacketType::EntityData:
|
||||
return VERSION_WEB_ENTITIES_SUPPORT_DPI;
|
||||
return VERSION_ENTITIES_ARROW_ACTION;
|
||||
case PacketType::AvatarIdentity:
|
||||
case PacketType::AvatarData:
|
||||
case PacketType::BulkAvatarData:
|
||||
case PacketType::KillAvatar:
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::SensorToWorldMat);
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::HandControllerJoints);
|
||||
case PacketType::ICEServerHeartbeat:
|
||||
return 18; // ICE Server Heartbeat signing
|
||||
case PacketType::AssetGetInfo:
|
||||
|
|
|
@ -187,13 +187,15 @@ const PacketVersion VERSION_ENTITIES_PROPERLY_ENCODE_SHAPE_EDITS = 60;
|
|||
const PacketVersion VERSION_MODEL_ENTITIES_SUPPORT_STATIC_MESH = 61;
|
||||
const PacketVersion VERSION_MODEL_ENTITIES_SUPPORT_SIMPLE_HULLS = 62;
|
||||
const PacketVersion VERSION_WEB_ENTITIES_SUPPORT_DPI = 63;
|
||||
const PacketVersion VERSION_ENTITIES_ARROW_ACTION = 64;
|
||||
|
||||
enum class AvatarMixerPacketVersion : PacketVersion {
|
||||
TranslationSupport = 17,
|
||||
SoftAttachmentSupport,
|
||||
AvatarEntities,
|
||||
AbsoluteSixByteRotations,
|
||||
SensorToWorldMat
|
||||
SensorToWorldMat,
|
||||
HandControllerJoints
|
||||
};
|
||||
|
||||
enum class DomainConnectRequestVersion : PacketVersion {
|
||||
|
|
206
libraries/physics/src/ObjectActionTravelOriented.cpp
Normal file
206
libraries/physics/src/ObjectActionTravelOriented.cpp
Normal file
|
@ -0,0 +1,206 @@
|
|||
//
|
||||
// ObjectActionTravelOriented.cpp
|
||||
// libraries/physics/src
|
||||
//
|
||||
// Created by Seth Alves 2015-6-5
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
#include "QVariantGLM.h"
|
||||
#include "ObjectActionTravelOriented.h"
|
||||
|
||||
const uint16_t ObjectActionTravelOriented::actionVersion = 1;
|
||||
|
||||
|
||||
ObjectActionTravelOriented::ObjectActionTravelOriented(const QUuid& id, EntityItemPointer ownerEntity) :
|
||||
ObjectAction(ACTION_TYPE_TRAVEL_ORIENTED, id, ownerEntity) {
|
||||
#if WANT_DEBUG
|
||||
qDebug() << "ObjectActionTravelOriented::ObjectActionTravelOriented";
|
||||
#endif
|
||||
}
|
||||
|
||||
ObjectActionTravelOriented::~ObjectActionTravelOriented() {
|
||||
#if WANT_DEBUG
|
||||
qDebug() << "ObjectActionTravelOriented::~ObjectActionTravelOriented";
|
||||
#endif
|
||||
}
|
||||
|
||||
void ObjectActionTravelOriented::updateActionWorker(btScalar deltaTimeStep) {
|
||||
withReadLock([&] {
|
||||
auto ownerEntity = _ownerEntity.lock();
|
||||
if (!ownerEntity) {
|
||||
return;
|
||||
}
|
||||
void* physicsInfo = ownerEntity->getPhysicsInfo();
|
||||
if (!physicsInfo) {
|
||||
return;
|
||||
}
|
||||
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
|
||||
btRigidBody* rigidBody = motionState->getRigidBody();
|
||||
if (!rigidBody) {
|
||||
qDebug() << "ObjectActionTravelOriented::updateActionWorker no rigidBody";
|
||||
return;
|
||||
}
|
||||
const float MAX_TIMESCALE = 600.0f; // 10 min is a long time
|
||||
if (_angularTimeScale > MAX_TIMESCALE) {
|
||||
return;
|
||||
}
|
||||
|
||||
// find normalized velocity
|
||||
glm::vec3 velocity = bulletToGLM(rigidBody->getLinearVelocity());
|
||||
float speed = glm::length(velocity);
|
||||
const float TRAVEL_ORIENTED_TOO_SLOW = 0.001f; // meters / second
|
||||
if (speed < TRAVEL_ORIENTED_TOO_SLOW) {
|
||||
return;
|
||||
}
|
||||
glm::vec3 direction = glm::normalize(velocity);
|
||||
|
||||
// find current angle of "forward"
|
||||
btQuaternion bodyRotation = rigidBody->getOrientation();
|
||||
glm::quat orientation = bulletToGLM(bodyRotation);
|
||||
glm::vec3 forwardInWorldFrame = glm::normalize(orientation * _forward);
|
||||
|
||||
// find the rotation that would line up direction and forward
|
||||
glm::quat neededRotation = glm::rotation(forwardInWorldFrame, direction);
|
||||
glm::quat rotationalTarget = neededRotation * orientation;
|
||||
|
||||
btVector3 targetAngularVelocity(0.0f, 0.0f, 0.0f);
|
||||
|
||||
auto alignmentDot = bodyRotation.dot(glmToBullet(rotationalTarget));
|
||||
const float ALMOST_ONE = 0.99999f;
|
||||
if (glm::abs(alignmentDot) < ALMOST_ONE) {
|
||||
btQuaternion target = glmToBullet(rotationalTarget);
|
||||
if (alignmentDot < 0.0f) {
|
||||
target = -target;
|
||||
}
|
||||
// if dQ is the incremental rotation that gets an object from Q0 to Q1 then:
|
||||
//
|
||||
// Q1 = dQ * Q0
|
||||
//
|
||||
// solving for dQ gives:
|
||||
//
|
||||
// dQ = Q1 * Q0^
|
||||
btQuaternion deltaQ = target * bodyRotation.inverse();
|
||||
float speed = deltaQ.getAngle() / _angularTimeScale;
|
||||
targetAngularVelocity = speed * deltaQ.getAxis();
|
||||
if (speed > rigidBody->getAngularSleepingThreshold()) {
|
||||
rigidBody->activate();
|
||||
}
|
||||
}
|
||||
// this action is aggresively critically damped and defeats the current velocity
|
||||
rigidBody->setAngularVelocity(targetAngularVelocity);
|
||||
});
|
||||
}
|
||||
|
||||
const float MIN_TIMESCALE = 0.1f;
|
||||
|
||||
|
||||
bool ObjectActionTravelOriented::updateArguments(QVariantMap arguments) {
|
||||
glm::vec3 forward;
|
||||
float angularTimeScale;
|
||||
|
||||
bool needUpdate = false;
|
||||
bool somethingChanged = ObjectAction::updateArguments(arguments);
|
||||
withReadLock([&]{
|
||||
bool ok = true;
|
||||
forward = EntityActionInterface::extractVec3Argument("travel oriented action", arguments, "forward", ok, true);
|
||||
if (!ok) {
|
||||
forward = _forward;
|
||||
}
|
||||
ok = true;
|
||||
angularTimeScale =
|
||||
EntityActionInterface::extractFloatArgument("travel oriented action", arguments, "angularTimeScale", ok, false);
|
||||
if (!ok) {
|
||||
angularTimeScale = _angularTimeScale;
|
||||
}
|
||||
|
||||
if (somethingChanged ||
|
||||
forward != _forward ||
|
||||
angularTimeScale != _angularTimeScale) {
|
||||
// something changed
|
||||
needUpdate = true;
|
||||
}
|
||||
});
|
||||
|
||||
if (needUpdate) {
|
||||
withWriteLock([&] {
|
||||
_forward = forward;
|
||||
_angularTimeScale = glm::max(MIN_TIMESCALE, glm::abs(angularTimeScale));
|
||||
_active = (_forward != glm::vec3());
|
||||
|
||||
auto ownerEntity = _ownerEntity.lock();
|
||||
if (ownerEntity) {
|
||||
ownerEntity->setActionDataDirty(true);
|
||||
ownerEntity->setActionDataNeedsTransmit(true);
|
||||
}
|
||||
});
|
||||
activateBody();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
QVariantMap ObjectActionTravelOriented::getArguments() {
|
||||
QVariantMap arguments = ObjectAction::getArguments();
|
||||
withReadLock([&] {
|
||||
arguments["forward"] = glmToQMap(_forward);
|
||||
arguments["angularTimeScale"] = _angularTimeScale;
|
||||
});
|
||||
return arguments;
|
||||
}
|
||||
|
||||
QByteArray ObjectActionTravelOriented::serialize() const {
|
||||
QByteArray serializedActionArguments;
|
||||
QDataStream dataStream(&serializedActionArguments, QIODevice::WriteOnly);
|
||||
|
||||
dataStream << ACTION_TYPE_TRAVEL_ORIENTED;
|
||||
dataStream << getID();
|
||||
dataStream << ObjectActionTravelOriented::actionVersion;
|
||||
|
||||
withReadLock([&] {
|
||||
dataStream << _forward;
|
||||
dataStream << _angularTimeScale;
|
||||
|
||||
dataStream << localTimeToServerTime(_expires);
|
||||
dataStream << _tag;
|
||||
});
|
||||
|
||||
return serializedActionArguments;
|
||||
}
|
||||
|
||||
void ObjectActionTravelOriented::deserialize(QByteArray serializedArguments) {
|
||||
QDataStream dataStream(serializedArguments);
|
||||
|
||||
EntityActionType type;
|
||||
dataStream >> type;
|
||||
assert(type == getType());
|
||||
|
||||
QUuid id;
|
||||
dataStream >> id;
|
||||
assert(id == getID());
|
||||
|
||||
uint16_t serializationVersion;
|
||||
dataStream >> serializationVersion;
|
||||
if (serializationVersion != ObjectActionTravelOriented::actionVersion) {
|
||||
assert(false);
|
||||
return;
|
||||
}
|
||||
|
||||
withWriteLock([&] {
|
||||
dataStream >> _forward;
|
||||
dataStream >> _angularTimeScale;
|
||||
|
||||
quint64 serverExpires;
|
||||
dataStream >> serverExpires;
|
||||
_expires = serverTimeToLocalTime(serverExpires);
|
||||
|
||||
dataStream >> _tag;
|
||||
|
||||
_active = (_forward != glm::vec3());
|
||||
});
|
||||
}
|
39
libraries/physics/src/ObjectActionTravelOriented.h
Normal file
39
libraries/physics/src/ObjectActionTravelOriented.h
Normal file
|
@ -0,0 +1,39 @@
|
|||
//
|
||||
// ObjectActionTravelOriented.h
|
||||
// libraries/physics/src
|
||||
//
|
||||
// Created by Seth Alves 2016-8-28
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_ObjectActionTravelOriented_h
|
||||
#define hifi_ObjectActionTravelOriented_h
|
||||
|
||||
#include "ObjectAction.h"
|
||||
|
||||
class ObjectActionTravelOriented : public ObjectAction {
|
||||
public:
|
||||
ObjectActionTravelOriented(const QUuid& id, EntityItemPointer ownerEntity);
|
||||
virtual ~ObjectActionTravelOriented();
|
||||
|
||||
virtual bool updateArguments(QVariantMap arguments) override;
|
||||
virtual QVariantMap getArguments() override;
|
||||
|
||||
virtual void updateActionWorker(float deltaTimeStep) override;
|
||||
|
||||
virtual QByteArray serialize() const override;
|
||||
virtual void deserialize(QByteArray serializedArguments) override;
|
||||
|
||||
protected:
|
||||
static const uint16_t actionVersion;
|
||||
|
||||
glm::vec3 _forward { glm::vec3() }; // the vector in object space that should point in the direction of travel
|
||||
float _angularTimeScale { 0.1f };
|
||||
|
||||
glm::vec3 _angularVelocityTarget;
|
||||
};
|
||||
|
||||
#endif // hifi_ObjectActionTravelOriented_h
|
|
@ -128,6 +128,7 @@ public:
|
|||
Present = QEvent::User + 1
|
||||
};
|
||||
|
||||
virtual int getRequiredThreadCount() const { return 0; }
|
||||
virtual bool isHmd() const { return false; }
|
||||
virtual int getHmdScreen() const { return -1; }
|
||||
/// By default, all HMDs are stereo
|
||||
|
|
|
@ -65,13 +65,21 @@ bool FileScriptingInterface::isTempDir(QString tempDir) {
|
|||
folderName = "/" + testDir.section("/", -1);
|
||||
QString testContainer = testDir;
|
||||
testContainer.remove(folderName);
|
||||
if (testContainer == tempContainer) return true;
|
||||
return false;
|
||||
return (testContainer == tempContainer);
|
||||
}
|
||||
|
||||
// checks whether the webview is displaying a Clara.io page for Marketplaces.qml
|
||||
bool FileScriptingInterface::isClaraLink(QUrl url) {
|
||||
return (url.toString().contains("clara.io") && !url.toString().contains("clara.io/signup"));
|
||||
}
|
||||
|
||||
bool FileScriptingInterface::isZippedFbx(QUrl url) {
|
||||
if (url.toString().contains(".zip") && url.toString().contains("fbx")) return true;
|
||||
return false;
|
||||
return (url.toString().endsWith("fbx.zip"));
|
||||
}
|
||||
|
||||
// checks whether a user tries to download a file that is not in .fbx format
|
||||
bool FileScriptingInterface::isZipped(QUrl url) {
|
||||
return (url.toString().endsWith(".zip"));
|
||||
}
|
||||
|
||||
// this function is not in use
|
||||
|
|
|
@ -25,6 +25,8 @@ public:
|
|||
|
||||
public slots:
|
||||
bool isZippedFbx(QUrl url);
|
||||
bool isZipped(QUrl url);
|
||||
bool isClaraLink(QUrl url);
|
||||
QString convertUrlToPath(QUrl url);
|
||||
void runUnzip(QString path, QUrl url);
|
||||
QString getTempDir();
|
||||
|
|
|
@ -58,6 +58,9 @@ public:
|
|||
void unsuppressKeyboard() override;
|
||||
bool isKeyboardVisible() override;
|
||||
|
||||
// Needs an additional thread for VR submission
|
||||
int getRequiredThreadCount() const override { return Parent::getRequiredThreadCount() + 1; }
|
||||
|
||||
protected:
|
||||
bool internalActivate() override;
|
||||
void internalDeactivate() override;
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
/* global setEntityCustomData, getEntityCustomData, flatten, Xform, Script, Quat, Vec3, MyAvatar, Entities, Overlays, Settings, Reticle, Controller, Camera, Messages, Mat4 */
|
||||
|
||||
(function() { // BEGIN LOCAL_SCOPE
|
||||
|
||||
|
@ -24,6 +25,9 @@ var WANT_DEBUG = false;
|
|||
var WANT_DEBUG_STATE = false;
|
||||
var WANT_DEBUG_SEARCH_NAME = null;
|
||||
|
||||
var FORCE_IGNORE_IK = true;
|
||||
var SHOW_GRAB_POINT_SPHERE = true;
|
||||
|
||||
//
|
||||
// these tune time-averaging and "on" value for analog trigger
|
||||
//
|
||||
|
@ -59,6 +63,7 @@ var EQUIP_SPHERE_COLOR = {
|
|||
var EQUIP_SPHERE_ALPHA = 0.15;
|
||||
var EQUIP_SPHERE_SCALE_FACTOR = 0.65;
|
||||
|
||||
|
||||
//
|
||||
// distant manipulation
|
||||
//
|
||||
|
@ -87,21 +92,21 @@ var COLORS_GRAB_DISTANCE_HOLD = {
|
|||
blue: 214
|
||||
};
|
||||
|
||||
|
||||
var PICK_MAX_DISTANCE = 500; // max length of pick-ray
|
||||
|
||||
//
|
||||
// near grabbing
|
||||
//
|
||||
|
||||
var EQUIP_RADIUS = 0.1; // radius used for palm vs equip-hotspot for equipping.
|
||||
var EQUIP_RADIUS = 0.2; // radius used for palm vs equip-hotspot for equipping.
|
||||
// if EQUIP_HOTSPOT_RENDER_RADIUS is greater than zero, the hotspot will appear before the hand
|
||||
// has reached the required position, and then grow larger once the hand is close enough to equip.
|
||||
var EQUIP_HOTSPOT_RENDER_RADIUS = 0.0; // radius used for palm vs equip-hotspot for rendering hot-spots
|
||||
var MAX_EQUIP_HOTSPOT_RADIUS = 1.0;
|
||||
|
||||
var NEAR_GRABBING_ACTION_TIMEFRAME = 0.05; // how quickly objects move to their new position
|
||||
|
||||
var NEAR_GRAB_RADIUS = 0.15; // radius used for palm vs object for near grabbing.
|
||||
var NEAR_GRAB_RADIUS = 0.07; // radius used for palm vs object for near grabbing.
|
||||
var NEAR_GRAB_MAX_DISTANCE = 1.0; // you cannot grab objects that are this far away from your hand
|
||||
|
||||
var NEAR_GRAB_PICK_RADIUS = 0.25; // radius used for search ray vs object for near grabbing.
|
||||
|
@ -112,6 +117,13 @@ var NEAR_GRABBING_KINEMATIC = true; // force objects to be kinematic when near-g
|
|||
// if an equipped item is "adjusted" to be too far from the hand it's in, it will be unequipped.
|
||||
var CHECK_TOO_FAR_UNEQUIP_TIME = 0.3; // seconds, duration between checks
|
||||
|
||||
|
||||
var GRAB_POINT_SPHERE_OFFSET = { x: 0.0, y: 0.2, z: 0.0 };
|
||||
var GRAB_POINT_SPHERE_RADIUS = NEAR_GRAB_RADIUS;
|
||||
var GRAB_POINT_SPHERE_COLOR = { red: 20, green: 90, blue: 238 };
|
||||
var GRAB_POINT_SPHERE_ALPHA = 0.85;
|
||||
|
||||
|
||||
//
|
||||
// other constants
|
||||
//
|
||||
|
@ -168,7 +180,7 @@ var USE_BLACKLIST = true;
|
|||
var blacklist = [];
|
||||
|
||||
var FORBIDDEN_GRAB_NAMES = ["Grab Debug Entity", "grab pointer"];
|
||||
var FORBIDDEN_GRAB_TYPES = ['Unknown', 'Light', 'PolyLine', 'Zone'];
|
||||
var FORBIDDEN_GRAB_TYPES = ["Unknown", "Light", "PolyLine", "Zone"];
|
||||
|
||||
// states for the state machine
|
||||
var STATE_OFF = 0;
|
||||
|
@ -183,7 +195,6 @@ var STATE_ENTITY_TOUCHING = 7;
|
|||
// "collidesWith" is specified by comma-separated list of group names
|
||||
// the possible group names are: static, dynamic, kinematic, myAvatar, otherAvatar
|
||||
var COLLIDES_WITH_WHILE_GRABBED = "dynamic,otherAvatar";
|
||||
var COLLIDES_WITH_WHILE_MULTI_GRABBED = "dynamic";
|
||||
|
||||
var HEART_BEAT_INTERVAL = 5 * MSECS_PER_SEC;
|
||||
var HEART_BEAT_TIMEOUT = 15 * MSECS_PER_SEC;
|
||||
|
@ -273,11 +284,9 @@ function projectOntoEntityXYPlane(entityID, worldPos) {
|
|||
y: (1 - normalizedPos.y) * props.dimensions.y }; // flip y-axis
|
||||
}
|
||||
|
||||
function handLaserIntersectEntity(entityID, hand) {
|
||||
var standardControllerValue = (hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
|
||||
var pose = Controller.getPoseValue(standardControllerValue);
|
||||
var worldHandPosition = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, pose.translation), MyAvatar.position);
|
||||
var worldHandRotation = Quat.multiply(MyAvatar.orientation, pose.rotation);
|
||||
function handLaserIntersectEntity(entityID, start) {
|
||||
var worldHandPosition = start.position;
|
||||
var worldHandRotation = start.orientation;
|
||||
|
||||
var props = entityPropertiesCache.getProps(entityID);
|
||||
|
||||
|
@ -355,7 +364,7 @@ function entityIsGrabbedByOther(entityID) {
|
|||
for (var actionIndex = 0; actionIndex < actionIDs.length; actionIndex++) {
|
||||
var actionID = actionIDs[actionIndex];
|
||||
var actionArguments = Entities.getActionArguments(entityID, actionID);
|
||||
var tag = actionArguments["tag"];
|
||||
var tag = actionArguments.tag;
|
||||
if (tag == getTag()) {
|
||||
// we see a grab-*uuid* shaped tag, but it's our tag, so that's okay.
|
||||
continue;
|
||||
|
@ -680,9 +689,7 @@ EquipHotspotBuddy.prototype.update = function(deltaTime, timestamp) {
|
|||
|
||||
if (overlayInfoSet.timestamp != timestamp && overlayInfoSet.currentSize <= 0.05) {
|
||||
// this is an old overlay, that has finished fading out, delete it!
|
||||
overlayInfoSet.overlays.forEach(function(overlay) {
|
||||
Overlays.deleteOverlay(overlay);
|
||||
});
|
||||
overlayInfoSet.overlays.forEach(Overlays.deleteOverlay);
|
||||
delete this.map[keys[i]];
|
||||
} else {
|
||||
// update overlay position, rotation to follow the object it's attached to.
|
||||
|
@ -714,16 +721,36 @@ var equipHotspotBuddy = new EquipHotspotBuddy();
|
|||
|
||||
function MyController(hand) {
|
||||
this.hand = hand;
|
||||
if (this.hand === RIGHT_HAND) {
|
||||
this.getHandPosition = MyAvatar.getRightPalmPosition;
|
||||
// this.getHandRotation = MyAvatar.getRightPalmRotation;
|
||||
} else {
|
||||
this.getHandPosition = MyAvatar.getLeftPalmPosition;
|
||||
// this.getHandRotation = MyAvatar.getLeftPalmRotation;
|
||||
}
|
||||
this.getHandRotation = function() {
|
||||
var controllerHandInput = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
|
||||
return Quat.multiply(MyAvatar.orientation, Controller.getPoseValue(controllerHandInput).rotation);
|
||||
this.autoUnequipCounter = 0;
|
||||
|
||||
// handPosition is where the avatar's hand appears to be, in-world.
|
||||
this.getHandPosition = function () {
|
||||
if (this.hand === RIGHT_HAND) {
|
||||
return MyAvatar.getRightPalmPosition();
|
||||
} else {
|
||||
return MyAvatar.getLeftPalmPosition();
|
||||
}
|
||||
};
|
||||
this.getHandRotation = function () {
|
||||
if (this.hand === RIGHT_HAND) {
|
||||
return MyAvatar.getRightPalmRotation();
|
||||
} else {
|
||||
return MyAvatar.getLeftPalmRotation();
|
||||
}
|
||||
};
|
||||
// controllerLocation is where the controller would be, in-world.
|
||||
this.getControllerLocation = function (doOffset) {
|
||||
var standardControllerValue = (hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
|
||||
var pose = Controller.getPoseValue(standardControllerValue);
|
||||
|
||||
var orientation = Quat.multiply(MyAvatar.orientation, pose.rotation);
|
||||
var position = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, pose.translation), MyAvatar.position);
|
||||
// add to the real position so the grab-point is out in front of the hand, a bit
|
||||
if (doOffset) {
|
||||
position = Vec3.sum(position, Vec3.multiplyQbyV(orientation, GRAB_POINT_SPHERE_OFFSET));
|
||||
}
|
||||
|
||||
return {position: position, orientation: orientation};
|
||||
};
|
||||
|
||||
this.actionID = null; // action this script created...
|
||||
|
@ -830,6 +857,39 @@ function MyController(hand) {
|
|||
}
|
||||
};
|
||||
|
||||
this.grabPointSphereOn = function() {
|
||||
if (!SHOW_GRAB_POINT_SPHERE) {
|
||||
return;
|
||||
}
|
||||
if (!MyAvatar.sessionUUID) {
|
||||
return;
|
||||
}
|
||||
if (!this.grabPointSphere) {
|
||||
this.grabPointSphere = Overlays.addOverlay("sphere", {
|
||||
localPosition: GRAB_POINT_SPHERE_OFFSET,
|
||||
localRotation: { x: 0, y: 0, z: 0, w: 1 },
|
||||
dimensions: GRAB_POINT_SPHERE_RADIUS,
|
||||
color: GRAB_POINT_SPHERE_COLOR,
|
||||
alpha: GRAB_POINT_SPHERE_ALPHA,
|
||||
solid: true,
|
||||
visible: true,
|
||||
ignoreRayIntersection: true,
|
||||
drawInFront: false,
|
||||
parentID: MyAvatar.sessionUUID,
|
||||
parentJointIndex: MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||
"_CONTROLLER_RIGHTHAND" :
|
||||
"_CONTROLLER_LEFTHAND")
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
this.grabPointSphereOff = function() {
|
||||
if (this.grabPointSphere) {
|
||||
Overlays.deleteOverlay(this.grabPointSphere);
|
||||
this.grabPointSphere = null;
|
||||
}
|
||||
};
|
||||
|
||||
this.searchSphereOn = function(location, size, color) {
|
||||
|
||||
var rotation = Quat.lookAt(location, Camera.getPosition(), Vec3.UP);
|
||||
|
@ -905,10 +965,14 @@ function MyController(hand) {
|
|||
var searchSphereLocation = Vec3.sum(distantPickRay.origin,
|
||||
Vec3.multiply(distantPickRay.direction, this.searchSphereDistance));
|
||||
this.searchSphereOn(searchSphereLocation, SEARCH_SPHERE_SIZE * this.searchSphereDistance,
|
||||
(this.triggerSmoothedGrab() || this.secondarySqueezed()) ? COLORS_GRAB_SEARCHING_FULL_SQUEEZE : COLORS_GRAB_SEARCHING_HALF_SQUEEZE);
|
||||
(this.triggerSmoothedGrab() || this.secondarySqueezed()) ?
|
||||
COLORS_GRAB_SEARCHING_FULL_SQUEEZE :
|
||||
COLORS_GRAB_SEARCHING_HALF_SQUEEZE);
|
||||
if (PICK_WITH_HAND_RAY) {
|
||||
this.overlayLineOn(handPosition, searchSphereLocation,
|
||||
(this.triggerSmoothedGrab() || this.secondarySqueezed()) ? COLORS_GRAB_SEARCHING_FULL_SQUEEZE : COLORS_GRAB_SEARCHING_HALF_SQUEEZE);
|
||||
(this.triggerSmoothedGrab() || this.secondarySqueezed()) ?
|
||||
COLORS_GRAB_SEARCHING_FULL_SQUEEZE :
|
||||
COLORS_GRAB_SEARCHING_HALF_SQUEEZE);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -958,7 +1022,8 @@ function MyController(hand) {
|
|||
this.turnOffVisualizations = function() {
|
||||
|
||||
this.overlayLineOff();
|
||||
|
||||
this.grabPointSphereOff();
|
||||
this.lineOff();
|
||||
this.searchSphereOff();
|
||||
restore2DMode();
|
||||
|
||||
|
@ -1029,19 +1094,20 @@ function MyController(hand) {
|
|||
}
|
||||
if (!this.waitForTriggerRelease && this.triggerSmoothedSqueezed()) {
|
||||
this.lastPickTime = 0;
|
||||
var controllerHandInput = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
|
||||
this.startingHandRotation = Controller.getPoseValue(controllerHandInput).rotation;
|
||||
this.startingHandRotation = this.getControllerLocation(true).orientation;
|
||||
if (this.triggerSmoothedSqueezed()) {
|
||||
this.setState(STATE_SEARCHING, "trigger squeeze detected");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
var candidateEntities = Entities.findEntities(this.getHandPosition(), EQUIP_HOTSPOT_RENDER_RADIUS);
|
||||
this.grabPointSphereOn();
|
||||
|
||||
var candidateEntities = Entities.findEntities(this.getControllerLocation(true).position, MAX_EQUIP_HOTSPOT_RADIUS);
|
||||
entityPropertiesCache.addEntities(candidateEntities);
|
||||
var potentialEquipHotspot = this.chooseBestEquipHotspot(candidateEntities);
|
||||
if (!this.waitForTriggerRelease) {
|
||||
this.updateEquipHaptics(potentialEquipHotspot, this.getHandPosition());
|
||||
this.updateEquipHaptics(potentialEquipHotspot, this.getControllerLocation(true).position);
|
||||
}
|
||||
|
||||
var nearEquipHotspots = this.chooseNearEquipHotspots(candidateEntities, EQUIP_HOTSPOT_RENDER_RADIUS);
|
||||
|
@ -1060,23 +1126,27 @@ function MyController(hand) {
|
|||
!potentialEquipHotspot && this.prevPotentialEquipHotspot) {
|
||||
Controller.triggerHapticPulse(HAPTIC_TEXTURE_STRENGTH, HAPTIC_TEXTURE_DURATION, this.hand);
|
||||
this.lastHapticPulseLocation = currentLocation;
|
||||
} else if (potentialEquipHotspot && Vec3.distance(this.lastHapticPulseLocation, currentLocation) > HAPTIC_TEXTURE_DISTANCE) {
|
||||
} else if (potentialEquipHotspot &&
|
||||
Vec3.distance(this.lastHapticPulseLocation, currentLocation) > HAPTIC_TEXTURE_DISTANCE) {
|
||||
Controller.triggerHapticPulse(HAPTIC_TEXTURE_STRENGTH, HAPTIC_TEXTURE_DURATION, this.hand);
|
||||
this.lastHapticPulseLocation = currentLocation;
|
||||
}
|
||||
this.prevPotentialEquipHotspot = potentialEquipHotspot;
|
||||
};
|
||||
|
||||
this.heartBeatIsStale = function(data) {
|
||||
var now = Date.now();
|
||||
return data.heartBeat === undefined || now - data.heartBeat > HEART_BEAT_TIMEOUT;
|
||||
};
|
||||
|
||||
// Performs ray pick test from the hand controller into the world
|
||||
// @param {number} which hand to use, RIGHT_HAND or LEFT_HAND
|
||||
// @returns {object} returns object with two keys entityID and distance
|
||||
//
|
||||
this.calcRayPickInfo = function(hand) {
|
||||
|
||||
var standardControllerValue = (hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
|
||||
var pose = Controller.getPoseValue(standardControllerValue);
|
||||
var worldHandPosition = Vec3.sum(Vec3.multiplyQbyV(MyAvatar.orientation, pose.translation), MyAvatar.position);
|
||||
var worldHandRotation = Quat.multiply(MyAvatar.orientation, pose.rotation);
|
||||
var controllerLocation = this.getControllerLocation(true);
|
||||
var worldHandPosition = controllerLocation.position;
|
||||
var worldHandRotation = controllerLocation.orientation;
|
||||
|
||||
var pickRay = {
|
||||
origin: PICK_WITH_HAND_RAY ? worldHandPosition : Camera.position,
|
||||
|
@ -1196,7 +1266,7 @@ function MyController(hand) {
|
|||
var okToEquipFromOtherHand = ((this.getOtherHandController().state == STATE_NEAR_GRABBING ||
|
||||
this.getOtherHandController().state == STATE_DISTANCE_HOLDING) &&
|
||||
this.getOtherHandController().grabbedEntity == hotspot.entityID);
|
||||
if (refCount > 0 && !okToEquipFromOtherHand) {
|
||||
if (refCount > 0 && !this.heartBeatIsStale(grabProps) && !okToEquipFromOtherHand) {
|
||||
if (debug) {
|
||||
print("equip is skipping '" + props.name + "': grabbed by someone else");
|
||||
}
|
||||
|
@ -1213,20 +1283,21 @@ function MyController(hand) {
|
|||
var physical = propsArePhysical(props);
|
||||
var grabbable = false;
|
||||
var debug = (WANT_DEBUG_SEARCH_NAME && props.name === WANT_DEBUG_SEARCH_NAME);
|
||||
var refCount = ("refCount" in grabProps) ? grabProps.refCount : 0;
|
||||
|
||||
if (physical) {
|
||||
// physical things default to grabbable
|
||||
grabbable = true;
|
||||
} else {
|
||||
// non-physical things default to non-grabbable unless they are already grabbed
|
||||
if ("refCount" in grabProps && grabProps.refCount > 0) {
|
||||
if (refCount > 0) {
|
||||
grabbable = true;
|
||||
} else {
|
||||
grabbable = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (grabbableProps.hasOwnProperty("grabbable")) {
|
||||
if (grabbableProps.hasOwnProperty("grabbable") && refCount === 0) {
|
||||
grabbable = grabbableProps.grabbable;
|
||||
}
|
||||
|
||||
|
@ -1321,7 +1392,7 @@ function MyController(hand) {
|
|||
return _this.collectEquipHotspots(entityID);
|
||||
})).filter(function(hotspot) {
|
||||
return (_this.hotspotIsEquippable(hotspot) &&
|
||||
Vec3.distance(hotspot.worldPosition, _this.getHandPosition()) < hotspot.radius + distance);
|
||||
Vec3.distance(hotspot.worldPosition, _this.getControllerLocation(true).position) < hotspot.radius + distance);
|
||||
});
|
||||
return equippableHotspots;
|
||||
};
|
||||
|
@ -1332,8 +1403,8 @@ function MyController(hand) {
|
|||
if (equippableHotspots.length > 0) {
|
||||
// sort by distance
|
||||
equippableHotspots.sort(function(a, b) {
|
||||
var aDistance = Vec3.distance(a.worldPosition, this.getHandPosition());
|
||||
var bDistance = Vec3.distance(b.worldPosition, this.getHandPosition());
|
||||
var aDistance = Vec3.distance(a.worldPosition, this.getControllerLocation(true).position);
|
||||
var bDistance = Vec3.distance(b.worldPosition, this.getControllerLocation(true).position);
|
||||
return aDistance - bDistance;
|
||||
});
|
||||
return equippableHotspots[0];
|
||||
|
@ -1359,6 +1430,8 @@ function MyController(hand) {
|
|||
this.isInitialGrab = false;
|
||||
this.shouldResetParentOnRelease = false;
|
||||
|
||||
this.grabPointSphereOn();
|
||||
|
||||
this.checkForStrayChildren();
|
||||
|
||||
if (this.triggerSmoothedReleased()) {
|
||||
|
@ -1366,7 +1439,7 @@ function MyController(hand) {
|
|||
return;
|
||||
}
|
||||
|
||||
var handPosition = this.getHandPosition();
|
||||
var handPosition = this.getControllerLocation(true).position;
|
||||
|
||||
var rayPickInfo = this.calcRayPickInfo(this.hand);
|
||||
|
||||
|
@ -1374,10 +1447,10 @@ function MyController(hand) {
|
|||
entityPropertiesCache.addEntity(rayPickInfo.entityID);
|
||||
}
|
||||
|
||||
var candidateEntities = Entities.findEntities(handPosition, NEAR_GRAB_RADIUS);
|
||||
entityPropertiesCache.addEntities(candidateEntities);
|
||||
var candidateHotSpotEntities = Entities.findEntities(handPosition, MAX_EQUIP_HOTSPOT_RADIUS);
|
||||
entityPropertiesCache.addEntities(candidateHotSpotEntities);
|
||||
|
||||
var potentialEquipHotspot = this.chooseBestEquipHotspot(candidateEntities);
|
||||
var potentialEquipHotspot = this.chooseBestEquipHotspot(candidateHotSpotEntities);
|
||||
if (potentialEquipHotspot) {
|
||||
if (this.triggerSmoothedGrab()) {
|
||||
this.grabbedHotspot = potentialEquipHotspot;
|
||||
|
@ -1387,6 +1460,7 @@ function MyController(hand) {
|
|||
}
|
||||
}
|
||||
|
||||
var candidateEntities = Entities.findEntities(handPosition, NEAR_GRAB_RADIUS);
|
||||
var grabbableEntities = candidateEntities.filter(function(entity) {
|
||||
return _this.entityIsNearGrabbable(entity, handPosition, NEAR_GRAB_MAX_DISTANCE);
|
||||
});
|
||||
|
@ -1546,18 +1620,13 @@ function MyController(hand) {
|
|||
};
|
||||
|
||||
this.distanceHoldingEnter = function() {
|
||||
Messages.sendLocalMessage('Hifi-Teleport-Disabler','both');
|
||||
Messages.sendLocalMessage('Hifi-Teleport-Disabler', 'both');
|
||||
this.clearEquipHaptics();
|
||||
this.grabPointSphereOff();
|
||||
|
||||
// controller pose is in avatar frame
|
||||
var device = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
|
||||
var avatarControllerPose = Controller.getPoseValue(device);
|
||||
var worldControllerPosition = this.getControllerLocation(true).position;
|
||||
|
||||
// transform it into world frame
|
||||
var worldControllerPosition = Vec3.sum(MyAvatar.position,
|
||||
Vec3.multiplyQbyV(MyAvatar.orientation, avatarControllerPose.translation));
|
||||
|
||||
// also transform the position into room space
|
||||
// transform the position into room space
|
||||
var worldToSensorMat = Mat4.inverse(MyAvatar.getSensorToWorldMatrix());
|
||||
var roomControllerPosition = Mat4.transformPoint(worldToSensorMat, worldControllerPosition);
|
||||
|
||||
|
@ -1620,14 +1689,10 @@ function MyController(hand) {
|
|||
|
||||
this.heartBeat(this.grabbedEntity);
|
||||
|
||||
// controller pose is in avatar frame
|
||||
var device = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
|
||||
var avatarControllerPose = Controller.getPoseValue(device);
|
||||
|
||||
// transform it into world frame
|
||||
var worldControllerPosition = Vec3.sum(MyAvatar.position,
|
||||
Vec3.multiplyQbyV(MyAvatar.orientation, avatarControllerPose.translation));
|
||||
var worldControllerRotation = Quat.multiply(MyAvatar.orientation, avatarControllerPose.rotation);
|
||||
var controllerLocation = this.getControllerLocation(true);
|
||||
var worldControllerPosition = controllerLocation.position;
|
||||
var worldControllerRotation = controllerLocation.orientation;
|
||||
|
||||
// also transform the position into room space
|
||||
var worldToSensorMat = Mat4.inverse(MyAvatar.getSensorToWorldMatrix());
|
||||
|
@ -1698,8 +1763,6 @@ function MyController(hand) {
|
|||
}
|
||||
}
|
||||
|
||||
var handPosition = this.getHandPosition();
|
||||
|
||||
// visualizations
|
||||
|
||||
var rayPickInfo = this.calcRayPickInfo(this.hand);
|
||||
|
@ -1764,10 +1827,7 @@ function MyController(hand) {
|
|||
};
|
||||
|
||||
this.dropGestureProcess = function(deltaTime) {
|
||||
var standardControllerValue = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
|
||||
var pose = Controller.getPoseValue(standardControllerValue);
|
||||
var worldHandRotation = Quat.multiply(MyAvatar.orientation, pose.rotation);
|
||||
|
||||
var worldHandRotation = this.getControllerLocation(true).orientation;
|
||||
var localHandUpAxis = this.hand === RIGHT_HAND ? {
|
||||
x: 1,
|
||||
y: 0,
|
||||
|
@ -1806,14 +1866,14 @@ function MyController(hand) {
|
|||
this.nearGrabbingEnter = function() {
|
||||
if (this.hand === 0) {
|
||||
Messages.sendLocalMessage('Hifi-Teleport-Disabler', 'left');
|
||||
|
||||
}
|
||||
if (this.hand === 1) {
|
||||
Messages.sendLocalMessage('Hifi-Teleport-Disabler', 'right');
|
||||
|
||||
}
|
||||
this.grabPointSphereOff();
|
||||
this.lineOff();
|
||||
this.overlayLineOff();
|
||||
this.searchSphereOff();
|
||||
|
||||
this.dropGestureReset();
|
||||
this.clearEquipHaptics();
|
||||
|
@ -1835,12 +1895,23 @@ function MyController(hand) {
|
|||
var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES);
|
||||
this.activateEntity(this.grabbedEntity, grabbedProperties, false);
|
||||
|
||||
// var handRotation = this.getHandRotation();
|
||||
var handRotation = (this.hand === RIGHT_HAND) ? MyAvatar.getRightPalmRotation() : MyAvatar.getLeftPalmRotation();
|
||||
var handPosition = this.getHandPosition();
|
||||
|
||||
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, this.grabbedEntity, DEFAULT_GRABBABLE_DATA);
|
||||
this.ignoreIK = grabbableData.ignoreIK ? grabbableData.ignoreIK : false;
|
||||
if (FORCE_IGNORE_IK) {
|
||||
this.ignoreIK = true;
|
||||
} else {
|
||||
this.ignoreIK = grabbableData.ignoreIK ? grabbableData.ignoreIK : false;
|
||||
}
|
||||
|
||||
var handRotation;
|
||||
var handPosition;
|
||||
if (this.ignoreIK) {
|
||||
var controllerLocation = this.getControllerLocation(false);
|
||||
handRotation = controllerLocation.orientation;
|
||||
handPosition = controllerLocation.position;
|
||||
} else {
|
||||
handRotation = this.getHandRotation();
|
||||
handPosition = this.getHandPosition();
|
||||
}
|
||||
|
||||
var hasPresetPosition = false;
|
||||
if (this.state == STATE_HOLD && this.grabbedHotspot) {
|
||||
|
@ -1879,12 +1950,21 @@ function MyController(hand) {
|
|||
}
|
||||
Messages.sendMessage('Hifi-Object-Manipulation', JSON.stringify({
|
||||
action: 'grab',
|
||||
grabbedEntity: this.grabbedEntity
|
||||
grabbedEntity: this.grabbedEntity,
|
||||
joint: this.hand === RIGHT_HAND ? "RightHand" : "LeftHand"
|
||||
}));
|
||||
} else {
|
||||
// grab entity via parenting
|
||||
this.actionID = null;
|
||||
var handJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ? "RightHand" : "LeftHand");
|
||||
var handJointIndex;
|
||||
if (this.ignoreIK) {
|
||||
handJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||
"_CONTROLLER_RIGHTHAND" :
|
||||
"_CONTROLLER_LEFTHAND");
|
||||
} else {
|
||||
handJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ? "RightHand" : "LeftHand");
|
||||
}
|
||||
|
||||
var reparentProps = {
|
||||
parentID: MyAvatar.sessionUUID,
|
||||
parentJointIndex: handJointIndex,
|
||||
|
@ -1892,14 +1972,15 @@ function MyController(hand) {
|
|||
angularVelocity: {x: 0, y: 0, z: 0}
|
||||
};
|
||||
if (hasPresetPosition) {
|
||||
reparentProps["localPosition"] = this.offsetPosition;
|
||||
reparentProps["localRotation"] = this.offsetRotation;
|
||||
reparentProps.localPosition = this.offsetPosition;
|
||||
reparentProps.localRotation = this.offsetRotation;
|
||||
}
|
||||
Entities.editEntity(this.grabbedEntity, reparentProps);
|
||||
|
||||
Messages.sendMessage('Hifi-Object-Manipulation', JSON.stringify({
|
||||
action: 'equip',
|
||||
grabbedEntity: this.grabbedEntity
|
||||
grabbedEntity: this.grabbedEntity,
|
||||
joint: this.hand === RIGHT_HAND ? "RightHand" : "LeftHand"
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -1937,6 +2018,8 @@ function MyController(hand) {
|
|||
|
||||
this.nearGrabbing = function(deltaTime, timestamp) {
|
||||
|
||||
this.grabPointSphereOff();
|
||||
|
||||
if (this.state == STATE_NEAR_GRABBING && !this.triggerClicked) {
|
||||
this.callEntityMethodOnGrabbed("releaseGrab");
|
||||
this.setState(STATE_OFF, "trigger released");
|
||||
|
@ -1967,9 +2050,10 @@ function MyController(hand) {
|
|||
|
||||
// store the offset attach points into preferences.
|
||||
if (USE_ATTACH_POINT_SETTINGS && this.grabbedHotspot && this.grabbedEntity) {
|
||||
var props = Entities.getEntityProperties(this.grabbedEntity, ["localPosition", "localRotation"]);
|
||||
if (props && props.localPosition && props.localRotation) {
|
||||
storeAttachPointForHotspotInSettings(this.grabbedHotspot, this.hand, props.localPosition, props.localRotation);
|
||||
var prefprops = Entities.getEntityProperties(this.grabbedEntity, ["localPosition", "localRotation"]);
|
||||
if (prefprops && prefprops.localPosition && prefprops.localRotation) {
|
||||
storeAttachPointForHotspotInSettings(this.grabbedHotspot, this.hand,
|
||||
prefprops.localPosition, prefprops.localRotation);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1988,22 +2072,34 @@ function MyController(hand) {
|
|||
"position", "rotation", "dimensions",
|
||||
"registrationPoint"]);
|
||||
if (!props.position) {
|
||||
// server may have reset, taking our equipped entity with it. move back to "off" stte
|
||||
// server may have reset, taking our equipped entity with it. move back to "off" state
|
||||
this.callEntityMethodOnGrabbed("releaseGrab");
|
||||
this.setState(STATE_OFF, "entity has no position property");
|
||||
return;
|
||||
}
|
||||
|
||||
var now = Date.now();
|
||||
if (now - this.lastUnequipCheckTime > MSECS_PER_SEC * CHECK_TOO_FAR_UNEQUIP_TIME) {
|
||||
if (this.state == STATE_HOLD && now - this.lastUnequipCheckTime > MSECS_PER_SEC * CHECK_TOO_FAR_UNEQUIP_TIME) {
|
||||
this.lastUnequipCheckTime = now;
|
||||
|
||||
if (props.parentID == MyAvatar.sessionUUID) {
|
||||
var handPosition = this.getHandPosition();
|
||||
var handPosition;
|
||||
if (this.ignoreIK) {
|
||||
handPosition = this.getControllerLocation(false).position;
|
||||
} else {
|
||||
handPosition = this.getHandPosition();
|
||||
}
|
||||
|
||||
var TEAR_AWAY_DISTANCE = 0.1;
|
||||
var dist = distanceBetweenPointAndEntityBoundingBox(handPosition, props);
|
||||
if (dist > TEAR_AWAY_DISTANCE) {
|
||||
this.autoUnequipCounter += 1;
|
||||
} else {
|
||||
this.autoUnequipCounter = 0;
|
||||
}
|
||||
|
||||
if (this.autoUnequipCounter > 1) {
|
||||
// for whatever reason, the held/equipped entity has been pulled away. ungrab or unequip.
|
||||
print("handControllerGrab -- autoreleasing held or equipped item because it is far from hand." +
|
||||
props.parentID + ", dist = " + dist);
|
||||
|
||||
|
@ -2076,16 +2172,15 @@ function MyController(hand) {
|
|||
};
|
||||
|
||||
this.nearTriggerEnter = function() {
|
||||
|
||||
this.clearEquipHaptics();
|
||||
|
||||
this.grabPointSphereOff();
|
||||
Controller.triggerShortHapticPulse(1.0, this.hand);
|
||||
this.callEntityMethodOnGrabbed("startNearTrigger");
|
||||
};
|
||||
|
||||
this.farTriggerEnter = function() {
|
||||
this.clearEquipHaptics();
|
||||
|
||||
this.grabPointSphereOff();
|
||||
this.callEntityMethodOnGrabbed("startFarTrigger");
|
||||
};
|
||||
|
||||
|
@ -2105,10 +2200,9 @@ function MyController(hand) {
|
|||
return;
|
||||
}
|
||||
|
||||
var handPosition = this.getHandPosition();
|
||||
var pickRay = {
|
||||
origin: handPosition,
|
||||
direction: Quat.getUp(this.getHandRotation())
|
||||
origin: this.getControllerLocation().position,
|
||||
direction: Quat.getUp(this.getControllerLocation().orientation)
|
||||
};
|
||||
|
||||
var now = Date.now();
|
||||
|
@ -2137,7 +2231,7 @@ function MyController(hand) {
|
|||
|
||||
this.entityTouchingEnter = function() {
|
||||
// test for intersection between controller laser and web entity plane.
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.hand);
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.getControllerLocation(true));
|
||||
if (intersectInfo) {
|
||||
var pointerEvent = {
|
||||
type: "Press",
|
||||
|
@ -2162,7 +2256,7 @@ function MyController(hand) {
|
|||
|
||||
this.entityTouchingExit = function() {
|
||||
// test for intersection between controller laser and web entity plane.
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.hand);
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.getControllerLocation(true));
|
||||
if (intersectInfo) {
|
||||
var pointerEvent;
|
||||
if (this.deadspotExpired) {
|
||||
|
@ -2200,7 +2294,7 @@ function MyController(hand) {
|
|||
}
|
||||
|
||||
// test for intersection between controller laser and web entity plane.
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.hand);
|
||||
var intersectInfo = handLaserIntersectEntity(this.grabbedEntity, this.getControllerLocation(true));
|
||||
if (intersectInfo) {
|
||||
|
||||
if (Entities.keyboardFocusEntity != this.grabbedEntity) {
|
||||
|
@ -2237,7 +2331,7 @@ function MyController(hand) {
|
|||
};
|
||||
|
||||
this.release = function() {
|
||||
Messages.sendLocalMessage('Hifi-Teleport-Disabler','none');
|
||||
Messages.sendLocalMessage('Hifi-Teleport-Disabler', 'none');
|
||||
this.turnOffVisualizations();
|
||||
|
||||
var noVelocity = false;
|
||||
|
@ -2249,16 +2343,7 @@ function MyController(hand) {
|
|||
// If this looks like the release after adjusting something still held in the other hand, print the position
|
||||
// and rotation of the held thing to help content creators set the userData.
|
||||
var grabData = getEntityCustomData(GRAB_USER_DATA_KEY, this.grabbedEntity, {});
|
||||
if (grabData.refCount > 1) {
|
||||
var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, ["localPosition", "localRotation"]);
|
||||
if (grabbedProperties && grabbedProperties.localPosition && grabbedProperties.localRotation) {
|
||||
print((this.hand === RIGHT_HAND ? '"LeftHand"' : '"RightHand"') + ":" +
|
||||
'[{"x":' + grabbedProperties.localPosition.x + ', "y":' + grabbedProperties.localPosition.y +
|
||||
', "z":' + grabbedProperties.localPosition.z + '}, {"x":' + grabbedProperties.localRotation.x +
|
||||
', "y":' + grabbedProperties.localRotation.y + ', "z":' + grabbedProperties.localRotation.z +
|
||||
', "w":' + grabbedProperties.localRotation.w + '}]');
|
||||
}
|
||||
}
|
||||
this.printNewOffsets = (grabData.refCount > 1);
|
||||
|
||||
if (this.actionID !== null) {
|
||||
Entities.deleteAction(this.grabbedEntity, this.actionID);
|
||||
|
@ -2272,17 +2357,17 @@ function MyController(hand) {
|
|||
noVelocity = true;
|
||||
}
|
||||
}
|
||||
|
||||
this.deactivateEntity(this.grabbedEntity, noVelocity);
|
||||
|
||||
Messages.sendMessage('Hifi-Object-Manipulation', JSON.stringify({
|
||||
action: 'release',
|
||||
grabbedEntity: this.grabbedEntity,
|
||||
joint: this.hand === RIGHT_HAND ? "RightHand" : "LeftHand"
|
||||
}));
|
||||
}
|
||||
|
||||
this.deactivateEntity(this.grabbedEntity, noVelocity);
|
||||
this.actionID = null;
|
||||
|
||||
Messages.sendMessage('Hifi-Object-Manipulation', JSON.stringify({
|
||||
action: 'release',
|
||||
grabbedEntity: this.grabbedEntity,
|
||||
joint: this.hand === RIGHT_HAND ? "RightHand" : "LeftHand"
|
||||
}));
|
||||
|
||||
this.grabbedEntity = null;
|
||||
this.grabbedHotspot = null;
|
||||
|
||||
|
@ -2293,13 +2378,14 @@ function MyController(hand) {
|
|||
|
||||
this.cleanup = function() {
|
||||
this.release();
|
||||
this.grabPointSphereOff();
|
||||
};
|
||||
|
||||
this.heartBeat = function(entityID) {
|
||||
var now = Date.now();
|
||||
if (now - this.lastHeartBeat > HEART_BEAT_INTERVAL) {
|
||||
var data = getEntityCustomData(GRAB_USER_DATA_KEY, entityID, {});
|
||||
data["heartBeat"] = now;
|
||||
data.heartBeat = now;
|
||||
setEntityCustomData(GRAB_USER_DATA_KEY, entityID, data);
|
||||
this.lastHeartBeat = now;
|
||||
}
|
||||
|
@ -2308,12 +2394,14 @@ function MyController(hand) {
|
|||
this.resetAbandonedGrab = function(entityID) {
|
||||
print("cleaning up abandoned grab on " + entityID);
|
||||
var data = getEntityCustomData(GRAB_USER_DATA_KEY, entityID, {});
|
||||
data["refCount"] = 1;
|
||||
data.refCount = 1;
|
||||
setEntityCustomData(GRAB_USER_DATA_KEY, entityID, data);
|
||||
this.deactivateEntity(entityID, false);
|
||||
};
|
||||
|
||||
this.activateEntity = function(entityID, grabbedProperties, wasLoaded) {
|
||||
this.autoUnequipCounter = 0;
|
||||
|
||||
if (this.entityActivated) {
|
||||
return;
|
||||
}
|
||||
|
@ -2327,29 +2415,29 @@ function MyController(hand) {
|
|||
// get re-instated after all the grabs have been released) be correct.
|
||||
Script.clearTimeout(delayedDeactivateTimeout);
|
||||
delayedDeactivateTimeout = null;
|
||||
grabbedProperties["collidesWith"] = delayedDeactivateFunc();
|
||||
grabbedProperties.collidesWith = delayedDeactivateFunc();
|
||||
}
|
||||
|
||||
var data = getEntityCustomData(GRAB_USER_DATA_KEY, entityID, {});
|
||||
var now = Date.now();
|
||||
|
||||
if (wasLoaded) {
|
||||
data["refCount"] = 1;
|
||||
data.refCount = 1;
|
||||
} else {
|
||||
data["refCount"] = data["refCount"] ? data["refCount"] + 1 : 1;
|
||||
data.refCount = data.refCount ? data.refCount + 1 : 1;
|
||||
|
||||
// zero gravity and set ignoreForCollisions in a way that lets us put them back, after all grabs are done
|
||||
if (data["refCount"] == 1) {
|
||||
data["heartBeat"] = now;
|
||||
if (data.refCount == 1) {
|
||||
data.heartBeat = now;
|
||||
this.lastHeartBeat = now;
|
||||
|
||||
this.isInitialGrab = true;
|
||||
data["gravity"] = grabbedProperties.gravity;
|
||||
data["collidesWith"] = grabbedProperties.collidesWith;
|
||||
data["collisionless"] = grabbedProperties.collisionless;
|
||||
data["dynamic"] = grabbedProperties.dynamic;
|
||||
data["parentID"] = wasLoaded ? NULL_UUID : grabbedProperties.parentID;
|
||||
data["parentJointIndex"] = grabbedProperties.parentJointIndex;
|
||||
data.gravity = grabbedProperties.gravity;
|
||||
data.collidesWith = grabbedProperties.collidesWith;
|
||||
data.collisionless = grabbedProperties.collisionless;
|
||||
data.dynamic = grabbedProperties.dynamic;
|
||||
data.parentID = wasLoaded ? NULL_UUID : grabbedProperties.parentID;
|
||||
data.parentJointIndex = grabbedProperties.parentJointIndex;
|
||||
|
||||
var whileHeldProperties = {
|
||||
gravity: {
|
||||
|
@ -2363,9 +2451,8 @@ function MyController(hand) {
|
|||
"collidesWith": COLLIDES_WITH_WHILE_GRABBED
|
||||
};
|
||||
Entities.editEntity(entityID, whileHeldProperties);
|
||||
} else if (data["refCount"] > 1) {
|
||||
if (data["heartBeat"] === undefined ||
|
||||
now - data["heartBeat"] > HEART_BEAT_TIMEOUT) {
|
||||
} else if (data.refCount > 1) {
|
||||
if (this.heartBeatIsStale(data)) {
|
||||
// this entity has userData suggesting it is grabbed, but nobody is updating the hearbeat.
|
||||
// deactivate it before grabbing.
|
||||
this.resetAbandonedGrab(entityID);
|
||||
|
@ -2380,7 +2467,8 @@ function MyController(hand) {
|
|||
// bootstrap themselves with the held object. This happens because the meaning of "otherAvatar" in
|
||||
// the collision mask hinges on who the physics simulation owner is.
|
||||
Entities.editEntity(entityID, {
|
||||
"collidesWith": COLLIDES_WITH_WHILE_MULTI_GRABBED
|
||||
// "collidesWith": removeAvatarsFromCollidesWith(grabbedProperties.collidesWith)
|
||||
collisionless: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -2393,6 +2481,10 @@ function MyController(hand) {
|
|||
// unhook them.
|
||||
var handJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ? "RightHand" : "LeftHand");
|
||||
var children = Entities.getChildrenIDsOfJoint(MyAvatar.sessionUUID, handJointIndex);
|
||||
var controllerJointIndex = MyAvatar.getJointIndex(this.hand === RIGHT_HAND ?
|
||||
"_CONTROLLER_RIGHTHAND" :
|
||||
"_CONTROLLER_LEFTHAND");
|
||||
children.concat(Entities.getChildrenIDsOfJoint(MyAvatar.sessionUUID, controllerJointIndex));
|
||||
children.forEach(function(childID) {
|
||||
print("disconnecting stray child of hand: (" + _this.hand + ") " + childID);
|
||||
Entities.editEntity(childID, {
|
||||
|
@ -2411,9 +2503,9 @@ function MyController(hand) {
|
|||
collidesWith: collidesWith
|
||||
});
|
||||
var data = getEntityCustomData(GRAB_USER_DATA_KEY, entityID, {});
|
||||
if (data && data["refCount"]) {
|
||||
data["refCount"] = data["refCount"] - 1;
|
||||
if (data["refCount"] < 1) {
|
||||
if (data && data.refCount) {
|
||||
data.refCount = data.refCount - 1;
|
||||
if (data.refCount < 1) {
|
||||
data = null;
|
||||
}
|
||||
} else {
|
||||
|
@ -2433,24 +2525,24 @@ function MyController(hand) {
|
|||
|
||||
var data = getEntityCustomData(GRAB_USER_DATA_KEY, entityID, {});
|
||||
var doDelayedDeactivate = false;
|
||||
if (data && data["refCount"]) {
|
||||
data["refCount"] = data["refCount"] - 1;
|
||||
if (data["refCount"] < 1) {
|
||||
if (data && data.refCount) {
|
||||
data.refCount = data.refCount - 1;
|
||||
if (data.refCount < 1) {
|
||||
deactiveProps = {
|
||||
gravity: data["gravity"],
|
||||
gravity: data.gravity,
|
||||
// don't set collidesWith myAvatar back right away, because thrown things tend to bounce off the
|
||||
// avatar's capsule.
|
||||
collidesWith: removeMyAvatarFromCollidesWith(data["collidesWith"]),
|
||||
collisionless: data["collisionless"],
|
||||
dynamic: data["dynamic"],
|
||||
parentID: data["parentID"],
|
||||
parentJointIndex: data["parentJointIndex"]
|
||||
collidesWith: removeMyAvatarFromCollidesWith(data.collidesWith),
|
||||
collisionless: data.collisionless,
|
||||
dynamic: data.dynamic,
|
||||
parentID: data.parentID,
|
||||
parentJointIndex: data.parentJointIndex
|
||||
};
|
||||
|
||||
doDelayedDeactivate = (data["collidesWith"].indexOf("myAvatar") >= 0);
|
||||
doDelayedDeactivate = (data.collidesWith.indexOf("myAvatar") >= 0);
|
||||
|
||||
if (doDelayedDeactivate) {
|
||||
var delayedCollidesWith = data["collidesWith"];
|
||||
var delayedCollidesWith = data.collidesWith;
|
||||
var delayedEntityID = entityID;
|
||||
delayedDeactivateFunc = function() {
|
||||
// set collidesWith back to original value a bit later than the rest
|
||||
|
@ -2470,19 +2562,19 @@ function MyController(hand) {
|
|||
|
||||
if (!noVelocity &&
|
||||
parentID == MyAvatar.sessionUUID &&
|
||||
Vec3.length(data["gravity"]) > 0.0 &&
|
||||
data["dynamic"] &&
|
||||
data["parentID"] == NULL_UUID &&
|
||||
!data["collisionless"]) {
|
||||
deactiveProps["velocity"] = this.currentVelocity;
|
||||
Vec3.length(data.gravity) > 0.0 &&
|
||||
data.dynamic &&
|
||||
data.parentID == NULL_UUID &&
|
||||
!data.collisionless) {
|
||||
deactiveProps.velocity = this.currentVelocity;
|
||||
}
|
||||
if (noVelocity) {
|
||||
deactiveProps["velocity"] = {
|
||||
deactiveProps.velocity = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0
|
||||
};
|
||||
deactiveProps["angularVelocity"] = {
|
||||
deactiveProps.angularVelocity = {
|
||||
x: 0.0,
|
||||
y: 0.0,
|
||||
z: 0.0
|
||||
|
@ -2508,6 +2600,17 @@ function MyController(hand) {
|
|||
}
|
||||
};
|
||||
Entities.editEntity(entityID, deactiveProps);
|
||||
|
||||
if (this.printNewOffsets) {
|
||||
var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, ["localPosition", "localRotation"]);
|
||||
if (grabbedProperties && grabbedProperties.localPosition && grabbedProperties.localRotation) {
|
||||
print((this.hand === RIGHT_HAND ? '"LeftHand"' : '"RightHand"') + ":" +
|
||||
'[{"x":' + grabbedProperties.localPosition.x + ', "y":' + grabbedProperties.localPosition.y +
|
||||
', "z":' + grabbedProperties.localPosition.z + '}, {"x":' + grabbedProperties.localRotation.x +
|
||||
', "y":' + grabbedProperties.localRotation.y + ', "z":' + grabbedProperties.localRotation.z +
|
||||
', "w":' + grabbedProperties.localRotation.w + '}]');
|
||||
}
|
||||
}
|
||||
} else if (noVelocity) {
|
||||
Entities.editEntity(entityID, {
|
||||
velocity: {
|
||||
|
@ -2520,7 +2623,7 @@ function MyController(hand) {
|
|||
y: 0.0,
|
||||
z: 0.0
|
||||
},
|
||||
dynamic: data["dynamic"]
|
||||
dynamic: data.dynamic
|
||||
});
|
||||
}
|
||||
} else {
|
||||
|
@ -2567,9 +2670,13 @@ function update(deltaTime) {
|
|||
|
||||
if (handToDisable !== LEFT_HAND && handToDisable !== 'both') {
|
||||
leftController.update(deltaTime, timestamp);
|
||||
} else {
|
||||
leftController.release();
|
||||
}
|
||||
if (handToDisable !== RIGHT_HAND && handToDisable !== 'both') {
|
||||
rightController.update(deltaTime, timestamp);
|
||||
} else {
|
||||
rightController.release();
|
||||
}
|
||||
equipHotspotBuddy.update(deltaTime, timestamp);
|
||||
entityPropertiesCache.update();
|
||||
|
@ -2620,7 +2727,7 @@ var handleHandMessages = function(channel, message, sender) {
|
|||
selectedController.nearGrabbingEnter();
|
||||
|
||||
} catch (e) {
|
||||
print("WARNING: error parsing Hifi-Hand-Grab message");
|
||||
print("WARNING: handControllerGrab.js -- error parsing Hifi-Hand-Grab message: " + message);
|
||||
}
|
||||
|
||||
} else if (channel === 'Hifi-Hand-RayPick-Blacklist') {
|
||||
|
@ -2640,7 +2747,7 @@ var handleHandMessages = function(channel, message, sender) {
|
|||
}
|
||||
|
||||
} catch (e) {
|
||||
print("WARNING: error parsing Hifi-Hand-RayPick-Blacklist message");
|
||||
print("WARNING: handControllerGrab.js -- error parsing Hifi-Hand-RayPick-Blacklist message: " + message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,7 +94,6 @@ function Teleporter() {
|
|||
|
||||
this.initialize = function() {
|
||||
this.createMappings();
|
||||
this.disableGrab();
|
||||
};
|
||||
|
||||
this.createMappings = function() {
|
||||
|
@ -218,10 +217,6 @@ function Teleporter() {
|
|||
this.updateConnected = null;
|
||||
this.inCoolIn = false;
|
||||
inTeleportMode = false;
|
||||
|
||||
Script.setTimeout(function() {
|
||||
_this.enableGrab();
|
||||
}, 200);
|
||||
};
|
||||
|
||||
this.update = function() {
|
||||
|
@ -494,14 +489,6 @@ function Teleporter() {
|
|||
});
|
||||
};
|
||||
|
||||
this.disableGrab = function() {
|
||||
Messages.sendLocalMessage('Hifi-Hand-Disabler', this.teleportHand);
|
||||
};
|
||||
|
||||
this.enableGrab = function() {
|
||||
Messages.sendLocalMessage('Hifi-Hand-Disabler', 'none');
|
||||
};
|
||||
|
||||
this.triggerHaptics = function() {
|
||||
var hand = this.teleportHand === 'left' ? 0 : 1;
|
||||
var haptic = Controller.triggerShortHapticPulse(0.2, hand);
|
||||
|
|
98
scripts/system/html/css/marketplaces.css
Normal file
98
scripts/system/html/css/marketplaces.css
Normal file
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
//
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
*/
|
||||
body {
|
||||
background: white;
|
||||
padding: 0 0 0 0;
|
||||
font-family:Raleway-SemiBold;
|
||||
}
|
||||
.marketplaces-container {
|
||||
display: inline-block;
|
||||
color: black;
|
||||
width: 94%;
|
||||
margin-left: 3%;
|
||||
height: 100%;
|
||||
}
|
||||
.marketplaces-title {
|
||||
margin-top: 45px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
.marketplaces-intro-text {
|
||||
margin-bottom: 60px;
|
||||
}
|
||||
.marketplace-tile {
|
||||
float:left;
|
||||
width: 100%;
|
||||
}
|
||||
.marketplace-tile-first-column {
|
||||
text-align: center;
|
||||
float: left;
|
||||
width: 33%;
|
||||
}
|
||||
.marketplace-tile-second-column {
|
||||
float: left;
|
||||
margin-left:4%;
|
||||
width: 62%;
|
||||
}
|
||||
.exploreButton {
|
||||
font-size: 16px !important;
|
||||
width: 200px !important;
|
||||
height: 45px !important;
|
||||
margin-top: 20px;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
.tile-divider {
|
||||
width: 100%;
|
||||
margin-left: 0%;
|
||||
display: block;
|
||||
height: 1px;
|
||||
border: 0;
|
||||
border-top: 1px solid lightgrey;
|
||||
margin: 1em 0;
|
||||
padding: 0;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
.marketplace-tile-description {
|
||||
margin-top: 15px;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
.marketplace-tile-image {
|
||||
margin-top:15px;
|
||||
max-width: 256px;
|
||||
height: 128px;
|
||||
margin-bottom:60px;
|
||||
-webkit-box-shadow: -1px 4px 16px 0px rgba(0, 0, 0, 0.48);
|
||||
-moz-box-shadow: -1px 4px 16px 0px rgba(0, 0, 0, 0.48);
|
||||
box-shadow: -1px 4px 16px 0px rgba(0, 0, 0, 0.48);
|
||||
}
|
||||
.marketplace-clara-steps {
|
||||
padding-left: 15px;
|
||||
}
|
||||
.marketplace-clara-steps > li {
|
||||
margin-top: 5px;
|
||||
}
|
||||
@media (max-width:768px) {
|
||||
.marketplace-tile-first-column {
|
||||
float: left;
|
||||
width: 100%;
|
||||
}
|
||||
.marketplace-tile-second-column {
|
||||
float: left;
|
||||
width: 100%;
|
||||
}
|
||||
.exploreButton-holder {
|
||||
width:100%;
|
||||
text-align:center;
|
||||
}
|
||||
.tile-divider {
|
||||
width: 100%;
|
||||
margin-left: 0%;
|
||||
}
|
||||
.marketplace-tile-image{
|
||||
margin-bottom:15px;
|
||||
}
|
||||
}
|
BIN
scripts/system/html/img/clara-tile.png
Normal file
BIN
scripts/system/html/img/clara-tile.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.9 KiB |
BIN
scripts/system/html/img/hifi-marketplace-tile.png
Normal file
BIN
scripts/system/html/img/hifi-marketplace-tile.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 6.7 KiB |
12
scripts/system/html/js/marketplaces.js
Normal file
12
scripts/system/html/js/marketplaces.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
function loaded() {
|
||||
bindExploreButtons();
|
||||
}
|
||||
|
||||
function bindExploreButtons() {
|
||||
$('#exploreClaraMarketplace').on('click', function() {
|
||||
window.location = "https://clara.io/library?public=true"
|
||||
})
|
||||
$('#exploreHifiMarketplace').on('click', function() {
|
||||
window.location = "http://www.highfidelity.com/marketplace"
|
||||
})
|
||||
}
|
65
scripts/system/html/marketplaces.html
Normal file
65
scripts/system/html/marketplaces.html
Normal file
|
@ -0,0 +1,65 @@
|
|||
<!--
|
||||
// marketplaces.html
|
||||
//
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
-->
|
||||
<html>
|
||||
<head>
|
||||
<title>Marketplaces</title>
|
||||
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
|
||||
<link rel="stylesheet" type="text/css" href="css/edit-style.css">
|
||||
<link rel="stylesheet" type="text/css" href="css/marketplaces.css">
|
||||
<script src="js/jquery-2.1.4.min.js"></script>
|
||||
<script type="text/javascript" src="qrc:///qtwebchannel/qwebchannel.js"></script>
|
||||
<script type="text/javascript" src="js/eventBridgeLoader.js"></script>
|
||||
<script src="js/marketplaces.js"></script>
|
||||
</head>
|
||||
<body onload='loaded();'>
|
||||
<div class="marketplaces-container">
|
||||
<h2 class="marketplaces-title">
|
||||
Marketplaces
|
||||
</h2>
|
||||
<div class="marketplaces-intro-text">
|
||||
<p>
|
||||
You can bring content into High Fidelity forom anywhere you want. Here are a few places that support direct import of content right now. If you'd like to suggest a Market to include here, <a href="mailto:contact@highfidelity.io">let us know.</a>
|
||||
</p>
|
||||
</div>
|
||||
<div class="marketplace-tile">
|
||||
<div class="marketplace-tile-first-column">
|
||||
<img class="marketplace-tile-image" src="img/hifi-marketplace-tile.png">
|
||||
</div>
|
||||
<div class="marketplace-tile-second-column">
|
||||
<p class="marketplace-tile-description">This is the default High Fidelity marketplace. Viewing and downloading content from here is fully supported in Interface.</p>
|
||||
<div class="exploreButton-holder">
|
||||
<input class="blue exploreButton" type="button" value="Explore" id="exploreHifiMarketplace"></input></div>
|
||||
</div>
|
||||
<hr class="tile-divider">
|
||||
</div>
|
||||
<div class="marketplace-tile">
|
||||
<div class="marketplace-tile-first-column">
|
||||
<img class="marketplace-tile-image" src="img/clara-tile.png">
|
||||
</div>
|
||||
<div class="marketplace-tile-second-column">
|
||||
<p class="marketplace-tile-description">Clara.io has thousands of models available for importing into High Fidelity. Follow these steps for the best experience:</p>
|
||||
<ol class="marketplace-clara-steps">
|
||||
<li><a href="http://www.clara.io/signup">Create an account here </a>or log in as an existing user.</li>
|
||||
<li>Choose a model from the list and click Download -> Autodesk FBX.</li>
|
||||
<li>After the file processes, click Download.</li>
|
||||
<li>Add the model to your asset server, then find it from the list and choose Add To World.</li>
|
||||
</ol>
|
||||
<div class="exploreButton-holder">
|
||||
<input class="blue exploreButton" type="button" value="Explore" id="exploreClaraMarketplace"></input>
|
||||
</div>
|
||||
</div>
|
||||
<hr class="tile-divider">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -14,15 +14,15 @@
|
|||
(function() { // BEGIN LOCAL_SCOPE
|
||||
|
||||
var toolIconUrl = Script.resolvePath("../assets/images/tools/");
|
||||
var qml = Script.resolvePath("../../../resources/qml/MarketplaceComboBox.qml")
|
||||
var qml = Script.resolvePath("../../../resources/qml/Marketplaces.qml")
|
||||
|
||||
var MARKETPLACE_URL = "https://metaverse.highfidelity.com/marketplace";
|
||||
|
||||
var marketplaceWindow = new OverlayWindow({
|
||||
title: "Marketplace",
|
||||
source: qml,
|
||||
width: 900,
|
||||
height: 700,
|
||||
width: 1000,
|
||||
height: 900,
|
||||
toolWindow: false,
|
||||
visible: false,
|
||||
});
|
||||
|
|
28
tests/render-texture-load/CMakeLists.txt
Normal file
28
tests/render-texture-load/CMakeLists.txt
Normal file
|
@ -0,0 +1,28 @@
|
|||
|
||||
set(TARGET_NAME render-texture-load)
|
||||
|
||||
if (WIN32)
|
||||
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /ignore:4049 /ignore:4217")
|
||||
endif()
|
||||
|
||||
# This is not a testcase -- just set it up as a regular hifi project
|
||||
setup_hifi_project(Quick Gui OpenGL)
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "Tests/manual-tests/")
|
||||
|
||||
# link in the shared libraries
|
||||
link_hifi_libraries(shared octree gl gpu gpu-gl render model model-networking networking render-utils fbx entities entities-renderer animation audio avatars script-engine physics)
|
||||
|
||||
package_libraries_for_deployment()
|
||||
|
||||
target_zlib()
|
||||
add_dependency_external_projects(quazip)
|
||||
find_package(QuaZip REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} SYSTEM PUBLIC ${QUAZIP_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${QUAZIP_LIBRARIES})
|
||||
|
||||
if (WIN32)
|
||||
add_paths_to_fixup_libs(${QUAZIP_DLL_PATH})
|
||||
endif ()
|
||||
|
||||
|
||||
target_bullet()
|
585
tests/render-texture-load/src/main.cpp
Normal file
585
tests/render-texture-load/src/main.cpp
Normal file
|
@ -0,0 +1,585 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/01
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <sstream>
|
||||
|
||||
#include <gl/Config.h>
|
||||
#include <gl/Context.h>
|
||||
|
||||
#include <QtCore/QDir>
|
||||
#include <QtCore/QElapsedTimer>
|
||||
#include <QtCore/QLoggingCategory>
|
||||
#include <QtCore/QTimer>
|
||||
#include <QtCore/QThread>
|
||||
#include <QtCore/QThreadPool>
|
||||
#include <QtCore/QObject>
|
||||
#include <QtCore/QByteArray>
|
||||
#include <QtCore/QTemporaryDir>
|
||||
#include <QtCore/QTemporaryFile>
|
||||
#include <QtNetwork/QNetworkAccessManager>
|
||||
#include <QtNetwork/QNetworkRequest>
|
||||
#include <QtNetwork/QNetworkReply>
|
||||
|
||||
#include <QtGui/QGuiApplication>
|
||||
#include <QtGui/QResizeEvent>
|
||||
#include <QtGui/QWindow>
|
||||
|
||||
#include <QtWidgets/QFileDialog>
|
||||
#include <QtWidgets/QInputDialog>
|
||||
#include <QtWidgets/QMessageBox>
|
||||
#include <QtWidgets/QApplication>
|
||||
|
||||
#include <quazip5/quazip.h>
|
||||
#include <quazip5/JlCompress.h>
|
||||
|
||||
|
||||
#include <shared/RateCounter.h>
|
||||
#include <AssetClient.h>
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include <gpu/gl/GLBackend.h>
|
||||
#include <gpu/gl/GLFramebuffer.h>
|
||||
#include <gpu/gl/GLTexture.h>
|
||||
#include <gpu/StandardShaderLib.h>
|
||||
|
||||
#include <AddressManager.h>
|
||||
#include <NodeList.h>
|
||||
#include <TextureCache.h>
|
||||
#include <FramebufferCache.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <RenderShadowTask.h>
|
||||
#include <RenderDeferredTask.h>
|
||||
|
||||
extern QThread* RENDER_THREAD;
|
||||
|
||||
static const QString DATA_SET = "https://hifi-content.s3.amazonaws.com/austin/textures.zip";
|
||||
static const QTemporaryDir DATA_DIR;
|
||||
|
||||
|
||||
class FileDownloader : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
using Handler = std::function<void(const QByteArray& data)>;
|
||||
|
||||
FileDownloader(QUrl url, const Handler& handler, QObject *parent = 0) : QObject(parent), _handler(handler) {
|
||||
connect(&_accessManager, SIGNAL(finished(QNetworkReply*)), this, SLOT(fileDownloaded(QNetworkReply*)));
|
||||
_accessManager.get(QNetworkRequest(url));
|
||||
}
|
||||
|
||||
void waitForDownload() {
|
||||
while (!_complete) {
|
||||
QCoreApplication::processEvents();
|
||||
}
|
||||
}
|
||||
|
||||
private slots:
|
||||
void fileDownloaded(QNetworkReply* pReply) {
|
||||
_handler(pReply->readAll());
|
||||
pReply->deleteLater();
|
||||
_complete = true;
|
||||
}
|
||||
|
||||
private:
|
||||
QNetworkAccessManager _accessManager;
|
||||
Handler _handler;
|
||||
bool _complete { false };
|
||||
};
|
||||
|
||||
class RenderThread : public GenericThread {
|
||||
using Parent = GenericThread;
|
||||
public:
|
||||
gl::Context _context;
|
||||
gpu::PipelinePointer _presentPipeline;
|
||||
gpu::ContextPointer _gpuContext; // initialized during window creation
|
||||
std::atomic<size_t> _presentCount;
|
||||
QElapsedTimer _elapsed;
|
||||
std::atomic<uint16_t> _fps{ 1 };
|
||||
RateCounter<200> _fpsCounter;
|
||||
std::mutex _mutex;
|
||||
std::shared_ptr<gpu::Backend> _backend;
|
||||
std::vector<uint64_t> _frameTimes;
|
||||
size_t _frameIndex;
|
||||
std::mutex _frameLock;
|
||||
std::queue<gpu::FramePointer> _pendingFrames;
|
||||
gpu::FramePointer _activeFrame;
|
||||
QSize _size;
|
||||
static const size_t FRAME_TIME_BUFFER_SIZE{ 1024 };
|
||||
|
||||
void submitFrame(const gpu::FramePointer& frame) {
|
||||
std::unique_lock<std::mutex> lock(_frameLock);
|
||||
_pendingFrames.push(frame);
|
||||
}
|
||||
|
||||
|
||||
void initialize(QWindow* window, gl::Context& initContext) {
|
||||
setObjectName("RenderThread");
|
||||
_context.setWindow(window);
|
||||
_context.create();
|
||||
_context.makeCurrent();
|
||||
window->setSurfaceType(QSurface::OpenGLSurface);
|
||||
_context.makeCurrent(_context.qglContext(), window);
|
||||
// GPU library init
|
||||
gpu::Context::init<gpu::gl::GLBackend>();
|
||||
_gpuContext = std::make_shared<gpu::Context>();
|
||||
_backend = _gpuContext->getBackend();
|
||||
_context.makeCurrent();
|
||||
DependencyManager::get<DeferredLightingEffect>()->init();
|
||||
_context.makeCurrent();
|
||||
initContext.create();
|
||||
_context.doneCurrent();
|
||||
std::unique_lock<std::mutex> lock(_mutex);
|
||||
Parent::initialize();
|
||||
_context.moveToThread(_thread);
|
||||
}
|
||||
|
||||
void setup() override {
|
||||
RENDER_THREAD = QThread::currentThread();
|
||||
|
||||
// Wait until the context has been moved to this thread
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(_mutex);
|
||||
}
|
||||
|
||||
_context.makeCurrent();
|
||||
glewExperimental = true;
|
||||
glewInit();
|
||||
glGetError();
|
||||
|
||||
//wglSwapIntervalEXT(0);
|
||||
_frameTimes.resize(FRAME_TIME_BUFFER_SIZE, 0);
|
||||
{
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::StandardShaderLib::getDrawTexturePS();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
_presentPipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
//_textOverlay = new TextOverlay(glm::uvec2(800, 600));
|
||||
glViewport(0, 0, 800, 600);
|
||||
(void)CHECK_GL_ERROR();
|
||||
_elapsed.start();
|
||||
}
|
||||
|
||||
void shutdown() override {
|
||||
_activeFrame.reset();
|
||||
while (!_pendingFrames.empty()) {
|
||||
_gpuContext->consumeFrameUpdates(_pendingFrames.front());
|
||||
_pendingFrames.pop();
|
||||
}
|
||||
_presentPipeline.reset();
|
||||
_gpuContext.reset();
|
||||
}
|
||||
|
||||
void renderFrame(gpu::FramePointer& frame) {
|
||||
++_presentCount;
|
||||
_context.makeCurrent();
|
||||
_backend->recycle();
|
||||
_backend->syncCache();
|
||||
if (frame && !frame->batches.empty()) {
|
||||
_gpuContext->executeFrame(frame);
|
||||
|
||||
{
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
gpu::Batch presentBatch;
|
||||
presentBatch.setViewportTransform({ 0, 0, _size.width(), _size.height() });
|
||||
presentBatch.enableStereo(false);
|
||||
presentBatch.resetViewTransform();
|
||||
presentBatch.setFramebuffer(gpu::FramebufferPointer());
|
||||
presentBatch.setResourceTexture(0, frame->framebuffer->getRenderBuffer(0));
|
||||
presentBatch.setPipeline(_presentPipeline);
|
||||
presentBatch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
_gpuContext->executeBatch(presentBatch);
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
_context.makeCurrent();
|
||||
_context.swapBuffers();
|
||||
_fpsCounter.increment();
|
||||
static size_t _frameCount{ 0 };
|
||||
++_frameCount;
|
||||
if (_elapsed.elapsed() >= 500) {
|
||||
_fps = _fpsCounter.rate();
|
||||
_frameCount = 0;
|
||||
_elapsed.restart();
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
_context.doneCurrent();
|
||||
}
|
||||
|
||||
void report() {
|
||||
uint64_t total = 0;
|
||||
for (const auto& t : _frameTimes) {
|
||||
total += t;
|
||||
}
|
||||
auto averageFrameTime = total / FRAME_TIME_BUFFER_SIZE;
|
||||
qDebug() << "Average frame " << averageFrameTime;
|
||||
|
||||
std::list<std::pair<uint64_t, size_t>> sortedHighFrames;
|
||||
for (size_t i = 0; i < _frameTimes.size(); ++i) {
|
||||
const auto& t = _frameTimes[i];
|
||||
if (t > averageFrameTime * 6) {
|
||||
sortedHighFrames.push_back({ t, i } );
|
||||
}
|
||||
}
|
||||
|
||||
sortedHighFrames.sort();
|
||||
for (const auto& p : sortedHighFrames) {
|
||||
qDebug() << "Long frame " << p.first << " " << p.second;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool process() override {
|
||||
std::queue<gpu::FramePointer> pendingFrames;
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(_frameLock);
|
||||
pendingFrames.swap(_pendingFrames);
|
||||
}
|
||||
|
||||
while (!pendingFrames.empty()) {
|
||||
_activeFrame = pendingFrames.front();
|
||||
if (_activeFrame) {
|
||||
_gpuContext->consumeFrameUpdates(_activeFrame);
|
||||
}
|
||||
pendingFrames.pop();
|
||||
}
|
||||
|
||||
if (!_activeFrame) {
|
||||
QThread::msleep(1);
|
||||
return true;
|
||||
}
|
||||
|
||||
{
|
||||
auto start = usecTimestampNow();
|
||||
renderFrame(_activeFrame);
|
||||
auto duration = usecTimestampNow() - start;
|
||||
auto frameBufferIndex = _frameIndex % FRAME_TIME_BUFFER_SIZE;
|
||||
_frameTimes[frameBufferIndex] = duration;
|
||||
++_frameIndex;
|
||||
if (0 == _frameIndex % FRAME_TIME_BUFFER_SIZE) {
|
||||
report();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
QString fileForPath(const QString& name) {
|
||||
QCryptographicHash hash(QCryptographicHash::Md5);
|
||||
hash.addData(name.toLocal8Bit().data(), name.length());
|
||||
QString hashStr = QString(hash.result().toHex());
|
||||
auto dot = name.lastIndexOf('.');
|
||||
QString extension = name.right(name.length() - dot);
|
||||
QString result = DATA_DIR.path() + "/" + hashStr + extension;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Create a simple OpenGL window that renders text in various ways
|
||||
class QTestWindow : public QWindow {
|
||||
public:
|
||||
//"/-17.2049,-8.08629,-19.4153/0,0.881994,0,-0.47126"
|
||||
static void setup() {
|
||||
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
|
||||
//DependencyManager::registerInheritance<SpatialParentFinder, ParentFinder>();
|
||||
DependencyManager::set<AddressManager>();
|
||||
DependencyManager::set<NodeList>(NodeType::Agent, 0);
|
||||
DependencyManager::set<DeferredLightingEffect>();
|
||||
DependencyManager::set<ResourceCacheSharedItems>();
|
||||
DependencyManager::set<TextureCache>();
|
||||
DependencyManager::set<FramebufferCache>();
|
||||
DependencyManager::set<GeometryCache>();
|
||||
DependencyManager::set<ModelCache>();
|
||||
DependencyManager::set<PathUtils>();
|
||||
}
|
||||
|
||||
struct TextureLoad {
|
||||
uint32_t time;
|
||||
QString file;
|
||||
QString src;
|
||||
};
|
||||
|
||||
QTestWindow() {
|
||||
|
||||
_currentTexture = _textures.end();
|
||||
{
|
||||
QStringList stringList;
|
||||
QFile textFile("h:/textures/loads.txt");
|
||||
textFile.open(QFile::ReadOnly);
|
||||
//... (open the file for reading, etc.)
|
||||
QTextStream textStream(&textFile);
|
||||
while (true) {
|
||||
QString line = textStream.readLine();
|
||||
if (line.isNull())
|
||||
break;
|
||||
else
|
||||
stringList.append(line);
|
||||
}
|
||||
|
||||
for (QString s : stringList) {
|
||||
auto index = s.indexOf(" ");
|
||||
QString timeStr = s.left(index);
|
||||
auto time = timeStr.toUInt();
|
||||
QString path = s.right(s.length() - index).trimmed();
|
||||
path = fileForPath(path);
|
||||
qDebug() << "Path " << path;
|
||||
if (!QFileInfo(path).exists()) {
|
||||
continue;
|
||||
}
|
||||
_textureLoads.push({ time, path, s });
|
||||
}
|
||||
}
|
||||
|
||||
installEventFilter(this);
|
||||
QThreadPool::globalInstance()->setMaxThreadCount(2);
|
||||
QThread::currentThread()->setPriority(QThread::HighestPriority);
|
||||
ResourceManager::init();
|
||||
setFlags(Qt::MSWindowsOwnDC | Qt::Window | Qt::Dialog | Qt::WindowMinMaxButtonsHint | Qt::WindowTitleHint);
|
||||
_size = QSize(800, 600);
|
||||
_renderThread._size = _size;
|
||||
setGeometry(QRect(QPoint(), _size));
|
||||
create();
|
||||
show();
|
||||
QCoreApplication::processEvents();
|
||||
// Create the initial context
|
||||
_renderThread.initialize(this, _initContext);
|
||||
_initContext.makeCurrent();
|
||||
// FIXME use a wait condition
|
||||
QThread::msleep(1000);
|
||||
_renderThread.submitFrame(gpu::FramePointer());
|
||||
_initContext.makeCurrent();
|
||||
{
|
||||
auto vs = gpu::StandardShaderLib::getDrawUnitQuadTexcoordVS();
|
||||
auto ps = gpu::StandardShaderLib::getDrawTexturePS();
|
||||
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
|
||||
gpu::Shader::makeProgram(*program);
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
state->setDepthTest(gpu::State::DepthTest(false));
|
||||
state->setScissorEnable(true);
|
||||
_simplePipeline = gpu::Pipeline::create(program, state);
|
||||
}
|
||||
|
||||
QTimer* timer = new QTimer(this);
|
||||
timer->setInterval(0);
|
||||
connect(timer, &QTimer::timeout, this, [this] {
|
||||
draw();
|
||||
});
|
||||
timer->start();
|
||||
_ready = true;
|
||||
}
|
||||
|
||||
virtual ~QTestWindow() {
|
||||
DependencyManager::destroy<FramebufferCache>();
|
||||
DependencyManager::destroy<TextureCache>();
|
||||
DependencyManager::destroy<ModelCache>();
|
||||
DependencyManager::destroy<GeometryCache>();
|
||||
ResourceManager::cleanup();
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
bool eventFilter(QObject *obj, QEvent *event) override {
|
||||
if (event->type() == QEvent::Close) {
|
||||
_renderThread.terminate();
|
||||
}
|
||||
|
||||
return QWindow::eventFilter(obj, event);
|
||||
}
|
||||
|
||||
void keyPressEvent(QKeyEvent* event) override {
|
||||
}
|
||||
|
||||
void keyReleaseEvent(QKeyEvent* event) override {
|
||||
}
|
||||
|
||||
void mouseMoveEvent(QMouseEvent* event) override {
|
||||
}
|
||||
|
||||
void resizeEvent(QResizeEvent* ev) override {
|
||||
resizeWindow(ev->size());
|
||||
}
|
||||
|
||||
private:
|
||||
std::queue<TextureLoad> _textureLoads;
|
||||
std::list<gpu::TexturePointer> _textures;
|
||||
std::list<gpu::TexturePointer>::iterator _currentTexture;
|
||||
|
||||
uint16_t _fps;
|
||||
gpu::PipelinePointer _simplePipeline;
|
||||
|
||||
void draw() {
|
||||
if (!_ready) {
|
||||
return;
|
||||
}
|
||||
if (!isVisible()) {
|
||||
return;
|
||||
}
|
||||
if (_renderCount.load() != 0 && _renderCount.load() >= _renderThread._presentCount.load()) {
|
||||
QThread::usleep(1);
|
||||
return;
|
||||
}
|
||||
_renderCount = _renderThread._presentCount.load();
|
||||
update();
|
||||
|
||||
QSize windowSize = _size;
|
||||
auto framebufferCache = DependencyManager::get<FramebufferCache>();
|
||||
framebufferCache->setFrameBufferSize(windowSize);
|
||||
|
||||
// Final framebuffer that will be handled to the display-plugin
|
||||
render();
|
||||
|
||||
if (_fps != _renderThread._fps) {
|
||||
_fps = _renderThread._fps;
|
||||
updateText();
|
||||
}
|
||||
}
|
||||
|
||||
void updateText() {
|
||||
setTitle(QString("FPS %1").arg(_fps));
|
||||
}
|
||||
|
||||
void update() {
|
||||
auto now = usecTimestampNow();
|
||||
static auto last = now;
|
||||
auto delta = (now - last) / USECS_PER_MSEC;
|
||||
if (!_textureLoads.empty()) {
|
||||
const auto& front = _textureLoads.front();
|
||||
if (delta >= front.time) {
|
||||
QFileInfo fileInfo(front.file);
|
||||
if (!fileInfo.exists()) {
|
||||
qDebug() << "Missing file " << front.file;
|
||||
} else {
|
||||
qDebug() << "Loading " << front.src;
|
||||
_textures.push_back(DependencyManager::get<TextureCache>()->getImageTexture(front.file));
|
||||
_currentTexture = _textures.begin();
|
||||
}
|
||||
_textureLoads.pop();
|
||||
if (_textureLoads.empty()) {
|
||||
qDebug() << "Done";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void render() {
|
||||
auto& gpuContext = _renderThread._gpuContext;
|
||||
gpuContext->beginFrame();
|
||||
gpu::doInBatch(gpuContext, [&](gpu::Batch& batch) {
|
||||
batch.resetStages();
|
||||
});
|
||||
PROFILE_RANGE(__FUNCTION__);
|
||||
auto framebuffer = DependencyManager::get<FramebufferCache>()->getFramebuffer();
|
||||
|
||||
gpu::doInBatch(gpuContext, [&](gpu::Batch& batch) {
|
||||
batch.enableStereo(false);
|
||||
batch.setFramebuffer(framebuffer);
|
||||
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(1, 0, 0, 1));
|
||||
auto vpsize = framebuffer->getSize();
|
||||
auto vppos = ivec2(0);
|
||||
batch.setViewportTransform(ivec4(vppos, vpsize));
|
||||
if (_currentTexture != _textures.end()) {
|
||||
++_currentTexture;
|
||||
}
|
||||
if (_currentTexture == _textures.end()) {
|
||||
_currentTexture = _textures.begin();
|
||||
}
|
||||
|
||||
if (_currentTexture != _textures.end()) {
|
||||
batch.setResourceTexture(0, *_currentTexture);
|
||||
}
|
||||
batch.setPipeline(_simplePipeline);
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
});
|
||||
|
||||
auto frame = gpuContext->endFrame();
|
||||
frame->framebuffer = framebuffer;
|
||||
frame->framebufferRecycler = [](const gpu::FramebufferPointer& framebuffer){
|
||||
DependencyManager::get<FramebufferCache>()->releaseFramebuffer(framebuffer);
|
||||
};
|
||||
_renderThread.submitFrame(frame);
|
||||
if (!_renderThread.isThreaded()) {
|
||||
_renderThread.process();
|
||||
}
|
||||
}
|
||||
|
||||
void resizeWindow(const QSize& size) {
|
||||
_size = size;
|
||||
if (!_ready) {
|
||||
return;
|
||||
}
|
||||
_renderThread._size = size;
|
||||
}
|
||||
|
||||
private:
|
||||
QSize _size;
|
||||
std::atomic<size_t> _renderCount;
|
||||
gl::OffscreenContext _initContext;
|
||||
RenderThread _renderThread;
|
||||
ViewFrustum _viewFrustum; // current state of view frustum, perspective, orientation, etc.
|
||||
bool _ready { false };
|
||||
};
|
||||
|
||||
void messageHandler(QtMsgType type, const QMessageLogContext& context, const QString& message) {
|
||||
if (!message.isEmpty()) {
|
||||
#ifdef Q_OS_WIN
|
||||
OutputDebugStringA(message.toLocal8Bit().constData());
|
||||
OutputDebugStringA("\n");
|
||||
#endif
|
||||
std::cout << message.toLocal8Bit().constData() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
const char * LOG_FILTER_RULES = R"V0G0N(
|
||||
hifi.gpu=true
|
||||
)V0G0N";
|
||||
|
||||
void unzipTestData(const QByteArray& zipData) {
|
||||
QTemporaryFile zipFile;
|
||||
if (zipFile.open()) {
|
||||
zipFile.write(zipData);
|
||||
zipFile.close();
|
||||
}
|
||||
qDebug() << zipFile.fileName();
|
||||
if (!DATA_DIR.isValid()) {
|
||||
qFatal("Unable to create temp dir");
|
||||
}
|
||||
|
||||
//auto files = JlCompress::getFileList(zipData);
|
||||
auto files = JlCompress::extractDir(zipFile.fileName(), DATA_DIR.path());
|
||||
qDebug() << DATA_DIR.path();
|
||||
|
||||
}
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
QApplication app(argc, argv);
|
||||
QCoreApplication::setApplicationName("RenderPerf");
|
||||
QCoreApplication::setOrganizationName("High Fidelity");
|
||||
QCoreApplication::setOrganizationDomain("highfidelity.com");
|
||||
qInstallMessageHandler(messageHandler);
|
||||
QLoggingCategory::setFilterRules(LOG_FILTER_RULES);
|
||||
|
||||
|
||||
FileDownloader(DATA_SET, [&](const QByteArray& data) {
|
||||
qDebug() << "Fetched size " << data.size();
|
||||
unzipTestData(data);
|
||||
}).waitForDownload();
|
||||
|
||||
QTestWindow::setup();
|
||||
QTestWindow window;
|
||||
app.exec();
|
||||
return 0;
|
||||
}
|
||||
|
||||
#include "main.moc"
|
Loading…
Reference in a new issue