Merge branch 'master' of https://github.com/worklist/hifi into JS_Rotation_FIX

This commit is contained in:
Thijs Wenker 2015-10-15 14:50:20 +02:00
commit 8111432088
40 changed files with 3027 additions and 1761 deletions

View file

@ -176,7 +176,7 @@
"type": "checkbox",
"label": "Enabled",
"help": "Assigns an asset-server in your domain to serve files to clients via the ATP protocol (over UDP)",
"default": false,
"default": true,
"advanced": true
}
]

View file

@ -480,7 +480,7 @@ function MyController(hand, triggerAction) {
var offsetPosition = Vec3.multiplyQbyV(Quat.inverse(Quat.multiply(handRotation, offsetRotation)), offset);
this.actionID = NULL_ACTION_ID;
this.actionID = Entities.addAction("kinematic-hold", this.grabbedEntity, {
this.actionID = Entities.addAction("hold", this.grabbedEntity, {
hand: this.hand === RIGHT_HAND ? "right" : "left",
timeScale: NEAR_GRABBING_ACTION_TIMEFRAME,
relativePosition: offsetPosition,

File diff suppressed because it is too large Load diff

View file

@ -118,8 +118,16 @@ CameraManager = function() {
that.targetYaw = 0;
that.targetPitch = 0;
that.focalPoint = { x: 0, y: 0, z: 0 };
that.targetFocalPoint = { x: 0, y: 0, z: 0 };
that.focalPoint = {
x: 0,
y: 0,
z: 0
};
that.targetFocalPoint = {
x: 0,
y: 0,
z: 0
};
easing = false;
easingTime = 0;
@ -127,13 +135,18 @@ CameraManager = function() {
that.previousCameraMode = null;
that.lastMousePosition = { x: 0, y: 0 };
that.lastMousePosition = {
x: 0,
y: 0
};
that.enable = function() {
if (Camera.mode == "independent" || that.enabled) return;
for (var i = 0; i < CAPTURED_KEYS.length; i++) {
Controller.captureKeyEvents({ text: CAPTURED_KEYS[i] });
Controller.captureKeyEvents({
text: CAPTURED_KEYS[i]
});
}
that.enabled = true;
@ -143,7 +156,7 @@ CameraManager = function() {
that.zoomDistance = INITIAL_ZOOM_DISTANCE;
that.targetZoomDistance = that.zoomDistance + 3.0;
var focalPoint = Vec3.sum(Camera.getPosition(),
Vec3.multiply(that.zoomDistance, Quat.getFront(Camera.getOrientation())));
Vec3.multiply(that.zoomDistance, Quat.getFront(Camera.getOrientation())));
// Determine the correct yaw and pitch to keep the camera in the same location
var dPos = Vec3.subtract(focalPoint, Camera.getPosition());
@ -169,7 +182,9 @@ CameraManager = function() {
if (!that.enabled) return;
for (var i = 0; i < CAPTURED_KEYS.length; i++) {
Controller.releaseKeyEvents({ text: CAPTURED_KEYS[i] });
Controller.releaseKeyEvents({
text: CAPTURED_KEYS[i]
});
}
that.enabled = false;
@ -335,19 +350,27 @@ CameraManager = function() {
var hasDragged = false;
that.mousePressEvent = function(event) {
if (cameraTool.mousePressEvent(event)) {
return true;
}
if (!that.enabled) return;
if (event.isRightButton || (event.isLeftButton && event.isControl && !event.isShifted)) {
that.mode = MODE_ORBIT;
} else if (event.isMiddleButton || (event.isLeftButton && event.isControl && event.isShifted)) {
that.mode = MODE_PAN;
}
if (that.mode != MODE_INACTIVE) {
if (that.mode !== MODE_INACTIVE) {
hasDragged = false;
return true;
@ -357,10 +380,12 @@ CameraManager = function() {
}
that.mouseReleaseEvent = function(event) {
if (!that.enabled) return;
Window.setCursorVisible(true);
that.mode = MODE_INACTIVE;
Window.setCursorVisible(true);
}
that.keyPressEvent = function(event) {
@ -396,15 +421,31 @@ CameraManager = function() {
return;
}
var yRot = Quat.angleAxis(that.yaw, { x: 0, y: 1, z: 0 });
var xRot = Quat.angleAxis(that.pitch, { x: 1, y: 0, z: 0 });
var yRot = Quat.angleAxis(that.yaw, {
x: 0,
y: 1,
z: 0
});
var xRot = Quat.angleAxis(that.pitch, {
x: 1,
y: 0,
z: 0
});
var q = Quat.multiply(yRot, xRot);
var pos = Vec3.multiply(Quat.getFront(q), that.zoomDistance);
Camera.setPosition(Vec3.sum(that.focalPoint, pos));
yRot = Quat.angleAxis(that.yaw - 180, { x: 0, y: 1, z: 0 });
xRot = Quat.angleAxis(-that.pitch, { x: 1, y: 0, z: 0 });
yRot = Quat.angleAxis(that.yaw - 180, {
x: 0,
y: 1,
z: 0
});
xRot = Quat.angleAxis(-that.pitch, {
x: 1,
y: 0,
z: 0
});
q = Quat.multiply(yRot, xRot);
if (easing) {
@ -483,7 +524,7 @@ CameraManager = function() {
}
});
Controller.keyReleaseEvent.connect(function (event) {
Controller.keyReleaseEvent.connect(function(event) {
if (event.text == "ESC" && that.enabled) {
Camera.mode = lastAvatarCameraMode;
cameraManager.disable(true);
@ -503,9 +544,21 @@ CameraManager = function() {
CameraTool = function(cameraManager) {
var that = {};
var RED = { red: 191, green: 78, blue: 38 };
var GREEN = { red: 26, green: 193, blue: 105 };
var BLUE = { red: 0, green: 131, blue: 204 };
var RED = {
red: 191,
green: 78,
blue: 38
};
var GREEN = {
red: 26,
green: 193,
blue: 105
};
var BLUE = {
red: 0,
green: 131,
blue: 204
};
var BORDER_WIDTH = 1;
@ -513,10 +566,10 @@ CameraTool = function(cameraManager) {
var ORIENTATION_OVERLAY_HALF_SIZE = ORIENTATION_OVERLAY_SIZE / 2;
var ORIENTATION_OVERLAY_CUBE_SIZE = 10.5,
var ORIENTATION_OVERLAY_OFFSET = {
x: 30,
y: 30,
}
var ORIENTATION_OVERLAY_OFFSET = {
x: 30,
y: 30,
}
var UI_WIDTH = 70;
var UI_HEIGHT = 70;
@ -536,7 +589,11 @@ CameraTool = function(cameraManager) {
height: UI_HEIGHT + BORDER_WIDTH * 2,
alpha: 0,
text: "",
backgroundColor: { red: 101, green: 101, blue: 101 },
backgroundColor: {
red: 101,
green: 101,
blue: 101
},
backgroundAlpha: 1.0,
visible: false,
});
@ -548,7 +605,11 @@ CameraTool = function(cameraManager) {
height: UI_HEIGHT,
alpha: 0,
text: "",
backgroundColor: { red: 51, green: 51, blue: 51 },
backgroundColor: {
red: 51,
green: 51,
blue: 51
},
backgroundAlpha: 1.0,
visible: false,
});
@ -556,7 +617,11 @@ CameraTool = function(cameraManager) {
var defaultCubeProps = {
size: ORIENTATION_OVERLAY_CUBE_SIZE,
alpha: 1,
color: { red: 255, green: 0, blue: 0 },
color: {
red: 255,
green: 0,
blue: 0
},
solid: true,
visible: true,
drawOnHUD: true,
@ -564,10 +629,26 @@ CameraTool = function(cameraManager) {
var defaultLineProps = {
lineWidth: 1.5,
alpha: 1,
position: { x: 0, y: 0, z: 0 },
start: { x: 0, y: 0, z: 0 },
end: { x: 0, y: 0, z: 0 },
color: { red: 255, green: 0, blue: 0 },
position: {
x: 0,
y: 0,
z: 0
},
start: {
x: 0,
y: 0,
z: 0
},
end: {
x: 0,
y: 0,
z: 0
},
color: {
red: 255,
green: 0,
blue: 0
},
visible: false,
drawOnHUD: true,
};
@ -582,30 +663,66 @@ CameraTool = function(cameraManager) {
var OOHS = ORIENTATION_OVERLAY_HALF_SIZE;
var cubeX = orientationOverlay.createOverlay("cube", mergeObjects(defaultCubeProps, {
position: { x: -OOHS, y: OOHS, z: OOHS },
position: {
x: -OOHS,
y: OOHS,
z: OOHS
},
color: RED,
}));
var cubeY = orientationOverlay.createOverlay("cube", mergeObjects(defaultCubeProps, {
position: { x: OOHS, y: -OOHS, z: OOHS },
position: {
x: OOHS,
y: -OOHS,
z: OOHS
},
color: GREEN,
}));
var cubeZ = orientationOverlay.createOverlay("cube", mergeObjects(defaultCubeProps, {
position: { x: OOHS, y: OOHS, z: -OOHS },
position: {
x: OOHS,
y: OOHS,
z: -OOHS
},
color: BLUE,
}));
orientationOverlay.createOverlay("line3d", mergeObjects(defaultLineProps, {
start: { x: -OOHS, y: OOHS, z: OOHS },
end: { x: OOHS, y: OOHS, z: OOHS },
start: {
x: -OOHS,
y: OOHS,
z: OOHS
},
end: {
x: OOHS,
y: OOHS,
z: OOHS
},
color: RED,
}));
orientationOverlay.createOverlay("line3d", mergeObjects(defaultLineProps, {
start: { x: OOHS, y: -OOHS, z: OOHS },
end: { x: OOHS, y: OOHS, z: OOHS },
start: {
x: OOHS,
y: -OOHS,
z: OOHS
},
end: {
x: OOHS,
y: OOHS,
z: OOHS
},
color: GREEN,
}));
orientationOverlay.createOverlay("line3d", mergeObjects(defaultLineProps, {
start: { x: OOHS, y: OOHS, z: -OOHS },
end: { x: OOHS, y: OOHS, z: OOHS },
start: {
x: OOHS,
y: OOHS,
z: -OOHS
},
end: {
x: OOHS,
y: OOHS,
z: OOHS
},
color: BLUE,
}));
@ -645,7 +762,10 @@ CameraTool = function(cameraManager) {
}
that.mousePressEvent = function(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
var clickedOverlay = Overlays.getOverlayAtPoint({
x: event.x,
y: event.y
});
if (clickedOverlay == cubeX) {
targetPitch = 0;
@ -666,12 +786,18 @@ CameraTool = function(cameraManager) {
};
that.setVisible = function(visible) {
orientationOverlay.setProperties({ visible: visible });
Overlays.editOverlay(background, { visible: visible });
Overlays.editOverlay(backgroundBorder, { visible: visible });
orientationOverlay.setProperties({
visible: visible
});
Overlays.editOverlay(background, {
visible: visible
});
Overlays.editOverlay(backgroundBorder, {
visible: visible
});
};
that.setVisible(false);
return that;
};
};

View file

@ -16,8 +16,9 @@ var scriptURL = Script.resolvePath('wallTarget.js');
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/target.fbx';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/target_collision_hull.obj';
var MINIMUM_MOVE_LENGTH = 0.05;
var RESET_DISTANCE = 0.5;
var RESET_DISTANCE = 1;
var TARGET_USER_DATA_KEY = 'hifi-ping_pong_target';
var NUMBER_OF_TARGETS = 6;
var TARGETS_PER_ROW = 3;
@ -60,6 +61,8 @@ var targets = [];
var originalPositions = [];
var lastPositions = [];
function addTargets() {
var i;
var row = -1;
@ -77,6 +80,7 @@ function addTargets() {
position.y = startPosition.y - (row * VERTICAL_SPACING);
originalPositions.push(position);
lastPositions.push(position);
var targetProperties = {
name: 'Target',
@ -103,7 +107,11 @@ function testTargetDistanceFromStart() {
var distance = Vec3.subtract(originalPosition, currentPosition);
var length = Vec3.length(distance);
if (length > RESET_DISTANCE) {
var moving = Vec3.length(Vec3.subtract(currentPosition, lastPositions[index]));
lastPositions[index] = currentPosition;
if (length > RESET_DISTANCE && moving < MINIMUM_MOVE_LENGTH) {
Entities.deleteEntity(target);
@ -117,10 +125,16 @@ function testTargetDistanceFromStart() {
compoundShapeURL: COLLISION_HULL_URL,
position: originalPositions[index],
rotation: rotation,
script: scriptURL
script: scriptURL,
userData: JSON.stringify({
grabbableKey: {
grabbable: false
}
})
};
targets[index] = Entities.addEntity(targetProperties);
}
});
}
@ -142,7 +156,7 @@ function deleteTargets() {
}
Entities.deletingEntity.connect(deleteEntity);
var distanceCheckInterval = Script.setInterval(testTargetDistanceFromStart, 1000);
var distanceCheckInterval = Script.setInterval(testTargetDistanceFromStart, 500);
addTargets();

View file

@ -16,24 +16,38 @@
{
"jointName": "RightHand",
"positionVar": "rightHandPosition",
"rotationVar": "rightHandRotation"
"rotationVar": "rightHandRotation",
"typeVar": "rightHandType"
},
{
"jointName": "LeftHand",
"positionVar": "leftHandPosition",
"rotationVar": "leftHandRotation"
"rotationVar": "leftHandRotation",
"typeVar": "leftHandType"
},
{
"jointName": "RightFoot",
"positionVar": "rightFootPosition",
"rotationVar": "rightFootRotation",
"typeVar": "rightFootType"
},
{
"jointName": "LeftFoot",
"positionVar": "leftFootPosition",
"rotationVar": "leftFootRotation",
"typeVar": "leftFootType"
},
{
"jointName": "Neck",
"positionVar": "neckPosition",
"rotationVar": "neckRotation",
"typeVar": "headAndNeckType"
"typeVar": "neckType"
},
{
"jointName": "Head",
"positionVar": "headPosition",
"rotationVar": "headRotation",
"typeVar": "headAndNeckType"
"typeVar": "headType"
}
]
},
@ -51,7 +65,7 @@
"id": "spineLean",
"type": "manipulator",
"data": {
"alpha": 1.0,
"alpha": 0.0,
"joints": [
{ "var": "lean", "jointName": "Spine" }
]

View file

@ -126,6 +126,7 @@
#include "Stars.h"
#include "ui/AddressBarDialog.h"
#include "ui/AvatarInputs.h"
#include "ui/AssetUploadDialogFactory.h"
#include "ui/DataWebDialog.h"
#include "ui/DialogsManager.h"
#include "ui/LoginDialog.h"
@ -159,6 +160,7 @@ static const QString SVO_JSON_EXTENSION = ".svo.json";
static const QString JS_EXTENSION = ".js";
static const QString FST_EXTENSION = ".fst";
static const QString FBX_EXTENSION = ".fbx";
static const QString OBJ_EXTENSION = ".obj";
static const int MIRROR_VIEW_TOP_PADDING = 5;
static const int MIRROR_VIEW_LEFT_PADDING = 10;
@ -179,9 +181,6 @@ static const unsigned int THROTTLED_SIM_FRAMERATE = 15;
static const int TARGET_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / TARGET_SIM_FRAMERATE;
static const int THROTTLED_SIM_FRAME_PERIOD_MS = MSECS_PER_SECOND / THROTTLED_SIM_FRAMERATE;
const QString CHECK_VERSION_URL = "https://highfidelity.com/latestVersion.xml";
const QString SKIP_FILENAME = QStandardPaths::writableLocation(QStandardPaths::DataLocation) + "/hifi.skipversion";
#ifndef __APPLE__
static const QString DESKTOP_LOCATION = QStandardPaths::writableLocation(QStandardPaths::DesktopLocation);
#else
@ -197,8 +196,7 @@ const QHash<QString, Application::AcceptURLMethod> Application::_acceptedExtensi
{ SVO_EXTENSION, &Application::importSVOFromURL },
{ SVO_JSON_EXTENSION, &Application::importSVOFromURL },
{ JS_EXTENSION, &Application::askToLoadScript },
{ FST_EXTENSION, &Application::askToSetAvatarUrl },
{ FBX_EXTENSION, &Application::askToUploadAsset }
{ FST_EXTENSION, &Application::askToSetAvatarUrl }
};
#ifdef Q_OS_WIN
@ -1111,13 +1109,10 @@ void Application::paintGL() {
}
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
if (isHMDMode()) {
glm::quat hmdRotation = extractRotation(myAvatar->getHMDSensorMatrix());
_myCamera.setRotation(myAvatar->getWorldAlignedOrientation() * hmdRotation);
// Ignore MenuOption::CenterPlayerInView in HMD view
glm::vec3 hmdOffset = extractTranslation(myAvatar->getHMDSensorMatrix());
_myCamera.setPosition(myAvatar->getDefaultEyePosition()
+ myAvatar->getOrientation()
* (myAvatar->getScale() * myAvatar->getBoomLength() * glm::vec3(0.0f, 0.0f, 1.0f) + hmdOffset));
auto hmdWorldMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
_myCamera.setRotation(glm::normalize(glm::quat_cast(hmdWorldMat)));
auto worldBoomOffset = myAvatar->getOrientation() * (myAvatar->getScale() * myAvatar->getBoomLength() * glm::vec3(0.0f, 0.0f, 1.0f));
_myCamera.setPosition(extractTranslation(hmdWorldMat) + worldBoomOffset);
} else {
_myCamera.setRotation(myAvatar->getHead()->getOrientation());
if (Menu::getInstance()->isOptionChecked(MenuOption::CenterPlayerInView)) {
@ -1561,7 +1556,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
cursor->setIcon(Cursor::Icon::DEFAULT);
}
} else {
resetSensors();
resetSensors(true);
}
break;
}
@ -1892,16 +1887,6 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
computePickRay(mappedEvent.x(), mappedEvent.y()));
sendEvent(this, &actionEvent);
} else if (event->button() == Qt::RightButton) {
// "right click" on controllers to toggle the overlay
if (deviceID > 0) {
_overlayConductor.setEnabled(!_overlayConductor.getEnabled());
}
} else if (event->button() == Qt::MiddleButton) {
// mouse middle click to toggle the overlay
if (deviceID == 0) {
_overlayConductor.setEnabled(!_overlayConductor.getEnabled());
}
}
}
}
@ -2023,21 +2008,14 @@ void Application::dropEvent(QDropEvent *event) {
const QMimeData* mimeData = event->mimeData();
for (auto& url : mimeData->urls()) {
QString urlString = url.toString();
if (canAcceptURL(urlString) && acceptURL(urlString)) {
if (acceptURL(urlString, true)) {
event->acceptProposedAction();
}
}
}
void Application::dragEnterEvent(QDragEnterEvent* event) {
const QMimeData* mimeData = event->mimeData();
for (auto& url : mimeData->urls()) {
auto urlString = url.toString();
if (canAcceptURL(urlString)) {
event->acceptProposedAction();
break;
}
}
event->acceptProposedAction();
}
bool Application::acceptSnapshot(const QString& urlString) {
@ -2770,6 +2748,8 @@ void Application::update(float deltaTime) {
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
updateDialogs(deltaTime); // update various stats dialogs if present
_avatarUpdate->synchronousProcess();
{
PerformanceTimer perfTimer("physics");
myAvatar->relayDriveKeysToCharacterController();
@ -2831,8 +2811,6 @@ void Application::update(float deltaTime) {
_overlays.update(deltaTime);
}
_avatarUpdate->synchronousProcess();
// Update _viewFrustum with latest camera and view frustum data...
// NOTE: we get this from the view frustum, to make it simpler, since the
// loadViewFrumstum() method will get the correct details from the camera
@ -3600,7 +3578,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
renderArgs->_viewport = originalViewport;
}
void Application::resetSensors() {
void Application::resetSensors(bool andReload) {
DependencyManager::get<Faceshift>()->reset();
DependencyManager::get<DdeFaceTracker>()->reset();
DependencyManager::get<EyeTracker>()->reset();
@ -3612,7 +3590,7 @@ void Application::resetSensors() {
QPoint windowCenter = mainWindow->geometry().center();
_glWidget->cursor().setPos(currentScreen, windowCenter);
getMyAvatar()->reset();
getMyAvatar()->reset(andReload);
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "reset", Qt::QueuedConnection);
}
@ -3973,26 +3951,26 @@ bool Application::canAcceptURL(const QString& urlString) {
return false;
}
bool Application::acceptURL(const QString& urlString) {
bool Application::acceptURL(const QString& urlString, bool defaultUpload) {
if (urlString.startsWith(HIFI_URL_SCHEME)) {
// this is a hifi URL - have the AddressManager handle it
QMetaObject::invokeMethod(DependencyManager::get<AddressManager>().data(), "handleLookupString",
Qt::AutoConnection, Q_ARG(const QString&, urlString));
return true;
} else {
QUrl url(urlString);
QHashIterator<QString, AcceptURLMethod> i(_acceptedExtensions);
QString lowerPath = url.path().toLower();
while (i.hasNext()) {
i.next();
if (lowerPath.endsWith(i.key())) {
AcceptURLMethod method = i.value();
(this->*method)(urlString);
return true;
}
}
QUrl url(urlString);
QHashIterator<QString, AcceptURLMethod> i(_acceptedExtensions);
QString lowerPath = url.path().toLower();
while (i.hasNext()) {
i.next();
if (lowerPath.endsWith(i.key())) {
AcceptURLMethod method = i.value();
return (this->*method)(urlString);
}
}
return false;
return defaultUpload && askToUploadAsset(urlString);
}
void Application::setSessionUUID(const QUuid& sessionUUID) {
@ -4076,8 +4054,36 @@ bool Application::askToUploadAsset(const QString& filename) {
QUrl url { filename };
if (auto upload = DependencyManager::get<AssetClient>()->createUpload(url.toLocalFile())) {
QMessageBox messageBox;
messageBox.setWindowTitle("Asset upload");
messageBox.setText("You are about to upload the following file to the asset server:\n" +
url.toDisplayString());
messageBox.setInformativeText("Do you want to continue?");
messageBox.setStandardButtons(QMessageBox::Ok | QMessageBox::Cancel);
messageBox.setDefaultButton(QMessageBox::Ok);
// Option to drop model in world for models
if (filename.endsWith(FBX_EXTENSION) || filename.endsWith(OBJ_EXTENSION)) {
auto checkBox = new QCheckBox(&messageBox);
checkBox->setText("Add to scene");
messageBox.setCheckBox(checkBox);
}
if (messageBox.exec() != QMessageBox::Ok) {
upload->deleteLater();
return false;
}
// connect to the finished signal so we know when the AssetUpload is done
QObject::connect(upload, &AssetUpload::finished, this, &Application::assetUploadFinished);
if (messageBox.checkBox() && (messageBox.checkBox()->checkState() == Qt::Checked)) {
// Custom behavior for models
QObject::connect(upload, &AssetUpload::finished, this, &Application::modelUploadFinished);
} else {
QObject::connect(upload, &AssetUpload::finished,
&AssetUploadDialogFactory::getInstance(),
&AssetUploadDialogFactory::handleUploadFinished);
}
// start the upload now
upload->start();
@ -4089,47 +4095,26 @@ bool Application::askToUploadAsset(const QString& filename) {
return false;
}
void Application::assetUploadFinished(AssetUpload* upload, const QString& hash) {
if (upload->getError() != AssetUpload::NoError) {
// figure out the right error message for the message box
QString additionalError;
void Application::modelUploadFinished(AssetUpload* upload, const QString& hash) {
auto filename = QFileInfo(upload->getFilename()).fileName();
if ((upload->getError() == AssetUpload::NoError) &&
(filename.endsWith(FBX_EXTENSION) || filename.endsWith(OBJ_EXTENSION))) {
switch (upload->getError()) {
case AssetUpload::PermissionDenied:
additionalError = "You do not have permission to upload content to this asset-server.";
break;
case AssetUpload::TooLarge:
additionalError = "The uploaded content was too large and could not be stored in the asset-server.";
break;
case AssetUpload::FileOpenError:
additionalError = "The file could not be opened. Please check your permissions and try again.";
break;
case AssetUpload::NetworkError:
additionalError = "The file could not be opened. Please check your network connectivity.";
break;
default:
// not handled, do not show a message box
return;
}
auto entities = DependencyManager::get<EntityScriptingInterface>();
// display a message box with the error
auto filename = QFileInfo(upload->getFilename()).fileName();
QString errorMessage = QString("Failed to upload %1.\n\n%2").arg(filename, additionalError);
QMessageBox::warning(_window, "Failed Upload", errorMessage);
EntityItemProperties properties;
properties.setType(EntityTypes::Model);
properties.setModelURL(QString("%1:%2.%3").arg(URL_SCHEME_ATP).arg(hash).arg(upload->getExtension()));
properties.setPosition(_myCamera.getPosition() + _myCamera.getOrientation() * Vectors::FRONT * 2.0f);
properties.setName(QUrl(upload->getFilename()).fileName());
entities->addEntity(properties);
upload->deleteLater();
} else {
AssetUploadDialogFactory::getInstance().handleUploadFinished(upload, hash);
}
auto entities = DependencyManager::get<EntityScriptingInterface>();
auto myAvatar = getMyAvatar();
EntityItemProperties properties;
properties.setType(EntityTypes::Model);
properties.setModelURL(QString("%1:%2.%3").arg(ATP_SCHEME).arg(hash).arg(upload->getExtension()));
properties.setPosition(myAvatar->getPosition() + myAvatar->getOrientation() * Vectors::FRONT * 2.0f);
properties.setName(QUrl(upload->getFilename()).fileName());
entities->addEntity(properties);
upload->deleteLater();
}
ScriptEngine* Application::loadScript(const QString& scriptFilename, bool isUserLoaded,

View file

@ -211,7 +211,7 @@ public:
void setScriptsLocation(const QString& scriptsLocation);
bool canAcceptURL(const QString& url);
bool acceptURL(const QString& url);
bool acceptURL(const QString& url, bool defaultUpload = false);
void setMaxOctreePacketsPerSecond(int maxOctreePPS);
int getMaxOctreePacketsPerSecond();
@ -274,7 +274,7 @@ public slots:
void setRawAvatarUpdateThreading();
void setRawAvatarUpdateThreading(bool isThreaded);
void resetSensors();
void resetSensors(bool andReload = false);
void setActiveFaceTracker();
#ifdef HAVE_IVIEWHMD
@ -329,7 +329,7 @@ private slots:
bool askToSetAvatarUrl(const QString& url);
bool askToLoadScript(const QString& scriptFilenameOrURL);
bool askToUploadAsset(const QString& asset);
void assetUploadFinished(AssetUpload* upload, const QString& hash);
void modelUploadFinished(AssetUpload* upload, const QString& hash);
void setSessionUUID(const QUuid& sessionUUID);
void domainChanged(const QString& domainHostname);

View file

@ -24,6 +24,7 @@
#include "Application.h"
#include "AccountManager.h"
#include "assets/ATPAssetMigrator.h"
#include "audio/AudioScope.h"
#include "avatar/AvatarManager.h"
#include "devices/DdeFaceTracker.h"
@ -354,7 +355,7 @@ Menu::Menu() {
MenuWrapper* assetDeveloperMenu = developerMenu->addMenu("Assets");
auto& assetDialogFactory = AssetUploadDialogFactory::getInstance();
assetDialogFactory.setParent(this);
assetDialogFactory.setDialogParent(this);
QAction* assetUpload = addActionToQMenuAndActionHash(assetDeveloperMenu,
MenuOption::UploadAsset,
@ -365,6 +366,13 @@ Menu::Menu() {
// disable the asset upload action by default - it gets enabled only if asset server becomes present
assetUpload->setEnabled(false);
auto& atpMigrator = ATPAssetMigrator::getInstance();
atpMigrator.setDialogParent(this);
QAction* assetMigration = addActionToQMenuAndActionHash(assetDeveloperMenu, MenuOption::AssetMigration,
0, &atpMigrator,
SLOT(loadEntityServerFile()));
MenuWrapper* avatarDebugMenu = developerMenu->addMenu("Avatar");
MenuWrapper* faceTrackingMenu = avatarDebugMenu->addMenu("Face Tracking");
@ -461,6 +469,7 @@ Menu::Menu() {
0, false,
&ConnexionClient::getInstance(),
SLOT(toggleConnexion(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ComfortMode, 0, true);
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false);

View file

@ -135,6 +135,7 @@ namespace MenuOption {
const QString AnimDebugDrawAnimPose = "Debug Draw Animation";
const QString AnimDebugDrawBindPose = "Debug Draw Bind Pose";
const QString Antialiasing = "Antialiasing";
const QString AssetMigration = "ATP Asset Migration";
const QString Atmosphere = "Atmosphere";
const QString Attachments = "Attachments...";
const QString AudioNoiseReduction = "Audio Noise Reduction";
@ -159,6 +160,7 @@ namespace MenuOption {
const QString CenterPlayerInView = "Center Player In View";
const QString Chat = "Chat...";
const QString Collisions = "Collisions";
const QString ComfortMode = "Comfort Mode";
const QString Connexion = "Activate 3D Connexion Devices";
const QString Console = "Console...";
const QString ControlWithSpeech = "Control With Speech";

View file

@ -144,7 +144,7 @@ void PluginContainerProxy::unsetFullscreen(const QScreen* avoid) {
void PluginContainerProxy::requestReset() {
// We could signal qApp to sequence this, but it turns out that requestReset is only used from within the main thread anyway.
qApp->resetSensors();
qApp->resetSensors(true);
}
void PluginContainerProxy::showDisplayPluginsTools() {

View file

@ -0,0 +1,273 @@
//
// ATPAssetMigrator.cpp
// interface/src/assets
//
// Created by Stephen Birarda on 2015-10-12.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ATPAssetMigrator.h"
#include <QtCore/QDebug>
#include <QtCore/QJsonDocument>
#include <QtCore/QJsonObject>
#include <QtCore/QLoggingCategory>
#include <QtCore/QTemporaryFile>
#include <QtWidgets/QFileDialog>
#include <QtWidgets/QMessageBox>
#include <Gzip.h>
#include <AssetClient.h>
#include <AssetUpload.h>
#include <ResourceManager.h>
#include "../ui/AssetUploadDialogFactory.h"
Q_DECLARE_LOGGING_CATEGORY(asset_migrator);
Q_LOGGING_CATEGORY(asset_migrator, "hf.asset_migrator");
ATPAssetMigrator& ATPAssetMigrator::getInstance() {
static ATPAssetMigrator instance;
return instance;
}
static const QString ENTITIES_OBJECT_KEY = "Entities";
static const QString MODEL_URL_KEY = "modelURL";
static const QString MESSAGE_BOX_TITLE = "ATP Asset Migration";
void ATPAssetMigrator::loadEntityServerFile() {
auto filename = QFileDialog::getOpenFileName(_dialogParent, "Select an entity-server content file to migrate",
QString(), QString("Entity-Server Content (*.gz)"));
if (!filename.isEmpty()) {
qCDebug(asset_migrator) << "Selected filename for ATP asset migration: " << filename;
static const QString MIGRATION_CONFIRMATION_TEXT {
"The ATP Asset Migration process will scan the selected entity-server file, upload discovered resources to the"\
" current asset-server and then save a new entity-server file with the ATP URLs.\n\nAre you ready to"\
" continue?\n\nMake sure you are connected to the right domain."
};
auto button = QMessageBox::question(_dialogParent, MESSAGE_BOX_TITLE, MIGRATION_CONFIRMATION_TEXT,
QMessageBox::Yes | QMessageBox::No, QMessageBox::Yes);
if (button == QMessageBox::No) {
return;
}
// try to open the file at the given filename
QFile modelsFile { filename };
if (modelsFile.open(QIODevice::ReadOnly)) {
QByteArray compressedJsonData = modelsFile.readAll();
QByteArray jsonData;
if (!gunzip(compressedJsonData, jsonData)) {
QMessageBox::warning(_dialogParent, "Error", "The file at" + filename + "was not in gzip format.");
}
QJsonDocument modelsJSON = QJsonDocument::fromJson(jsonData);
_entitiesArray = modelsJSON.object()["Entities"].toArray();
for (auto jsonValue : _entitiesArray) {
QJsonObject entityObject = jsonValue.toObject();
QString modelURLString = entityObject.value(MODEL_URL_KEY).toString();
if (!modelURLString.isEmpty()) {
QUrl modelURL = QUrl(modelURLString);
if (!_ignoredUrls.contains(modelURL)
&& (modelURL.scheme() == URL_SCHEME_HTTP || modelURL.scheme() == URL_SCHEME_HTTPS
|| modelURL.scheme() == URL_SCHEME_FILE || modelURL.scheme() == URL_SCHEME_FTP)) {
if (_pendingReplacements.contains(modelURL)) {
// we already have a request out for this asset, just store the QJsonValueRef
// so we can do the hash replacement when the request comes back
_pendingReplacements.insert(modelURL, jsonValue);
} else if (_uploadedAssets.contains(modelURL)) {
// we already have a hash for this asset
// so just do the replacement immediately
entityObject[MODEL_URL_KEY] = _uploadedAssets.value(modelURL).toString();
jsonValue = entityObject;
} else if (wantsToMigrateResource(modelURL)) {
auto request = ResourceManager::createResourceRequest(this, modelURL);
if (request) {
qCDebug(asset_migrator) << "Requesting" << modelURL << "for ATP asset migration";
// add this combination of QUrl and QJsonValueRef to our multi hash so we can change the URL
// to an ATP one once ready
_pendingReplacements.insert(modelURL, jsonValue);
connect(request, &ResourceRequest::finished, this, [=]() {
if (request->getResult() == ResourceRequest::Success) {
migrateResource(request);
} else {
QMessageBox::warning(_dialogParent, "Error",
QString("Could not retrieve asset at %1").arg(modelURL.toString()));
}
request->deleteLater();
});
request->send();
} else {
QMessageBox::warning(_dialogParent, "Error",
QString("Could not create request for asset at %1").arg(modelURL.toString()));
}
} else {
_ignoredUrls.insert(modelURL);
}
}
}
}
_doneReading = true;
} else {
QMessageBox::warning(_dialogParent, "Error",
"There was a problem loading that entity-server file for ATP asset migration. Please try again");
}
}
}
void ATPAssetMigrator::migrateResource(ResourceRequest* request) {
// use an asset client to upload the asset
auto assetClient = DependencyManager::get<AssetClient>();
QFileInfo assetInfo { request->getUrl().fileName() };
auto upload = assetClient->createUpload(request->getData(), assetInfo.completeSuffix());
if (upload) {
// add this URL to our hash of AssetUpload to original URL
_originalURLs.insert(upload, request->getUrl());
qCDebug(asset_migrator) << "Starting upload of asset from" << request->getUrl();
// connect to the finished signal so we know when the AssetUpload is done
QObject::connect(upload, &AssetUpload::finished, this, &ATPAssetMigrator::assetUploadFinished);
// start the upload now
upload->start();
} else {
// show a QMessageBox to say that there is no local asset server
QString messageBoxText = QString("Could not upload \n\n%1\n\nbecause you are currently not connected" \
" to a local asset-server.").arg(assetInfo.fileName());
QMessageBox::information(_dialogParent, "Failed to Upload", messageBoxText);
}
}
void ATPAssetMigrator::assetUploadFinished(AssetUpload *upload, const QString& hash) {
if (upload->getError() == AssetUpload::NoError) {
const auto& modelURL = _originalURLs[upload];
// successfully uploaded asset - make any required replacements found in the pending replacements
auto values = _pendingReplacements.values(modelURL);
QString atpURL = getATPUrl(hash, upload->getExtension()).toString();
for (auto value : values) {
// replace the modelURL in this QJsonValueRef with the hash
QJsonObject valueObject = value.toObject();
valueObject[MODEL_URL_KEY] = atpURL;
value = valueObject;
}
// add this URL to our list of uploaded assets
_uploadedAssets.insert(modelURL, atpURL);
// pull the replaced models from _pendingReplacements
_pendingReplacements.remove(modelURL);
// are we out of pending replacements? if so it is time to save the entity-server file
if (_doneReading && _pendingReplacements.empty()) {
saveEntityServerFile();
// reset after the attempted save, success or fail
reset();
}
} else {
AssetUploadDialogFactory::showErrorDialog(upload, _dialogParent);
}
upload->deleteLater();
}
bool ATPAssetMigrator::wantsToMigrateResource(const QUrl& url) {
static bool hasAskedForCompleteMigration { false };
static bool wantsCompleteMigration { false };
if (!hasAskedForCompleteMigration) {
// this is the first resource migration - ask the user if they just want to migrate everything
static const QString COMPLETE_MIGRATION_TEXT { "Do you want to migrate all assets found in this entity-server file?\n\n"\
"Select \"Yes\" to upload all discovered assets to the current asset-server immediately.\n\n"\
"Select \"No\" to be prompted for each discovered asset."
};
auto button = QMessageBox::question(_dialogParent, MESSAGE_BOX_TITLE, COMPLETE_MIGRATION_TEXT,
QMessageBox::Yes | QMessageBox::No, QMessageBox::Yes);
if (button == QMessageBox::Yes) {
wantsCompleteMigration = true;
}
hasAskedForCompleteMigration = true;
}
if (wantsCompleteMigration) {
return true;
} else {
// present a dialog asking the user if they want to migrate this specific resource
auto button = QMessageBox::question(_dialogParent, MESSAGE_BOX_TITLE,
"Would you like to migrate the following resource?\n" + url.toString(),
QMessageBox::Yes | QMessageBox::No, QMessageBox::Yes);
return button == QMessageBox::Yes;
}
}
void ATPAssetMigrator::saveEntityServerFile() {
// show a dialog to ask the user where they want to save the file
QString saveName = QFileDialog::getSaveFileName(_dialogParent, "Save Migrated Entities File");
QFile saveFile { saveName };
if (saveFile.open(QIODevice::WriteOnly)) {
QJsonObject rootObject;
rootObject[ENTITIES_OBJECT_KEY] = _entitiesArray;
QJsonDocument newDocument { rootObject };
QByteArray jsonDataForFile;
if (gzip(newDocument.toJson(), jsonDataForFile, -1)) {
saveFile.write(jsonDataForFile);
saveFile.close();
QMessageBox::information(_dialogParent, "Success",
QString("Your new entities file has been saved at %1").arg(saveName));
} else {
QMessageBox::warning(_dialogParent, "Error", "Could not gzip JSON data for new entities file.");
}
} else {
QMessageBox::warning(_dialogParent, "Error",
QString("Could not open file at %1 to write new entities file to.").arg(saveName));
}
}
void ATPAssetMigrator::reset() {
_entitiesArray = QJsonArray();
_doneReading = false;
_pendingReplacements.clear();
_uploadedAssets.clear();
_originalURLs.clear();
_ignoredUrls.clear();
}

View file

@ -0,0 +1,55 @@
//
// ATPAssetMigrator.h
// interface/src/assets
//
// Created by Stephen Birarda on 2015-10-12.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_ATPAssetMigrator_h
#define hifi_ATPAssetMigrator_h
#include <QtCore/QJsonArray>
#include <QtCore/QObject>
#include <QtCore/QMultiHash>
#include <QtCore/QSet>
class AssetUpload;
class ResourceRequest;
class ATPAssetMigrator : public QObject {
Q_OBJECT
public:
static ATPAssetMigrator& getInstance();
void setDialogParent(QWidget* dialogParent) { _dialogParent = dialogParent; }
public slots:
void loadEntityServerFile();
private slots:
void assetUploadFinished(AssetUpload* upload, const QString& hash);
private:
void migrateResource(ResourceRequest* request);
void saveEntityServerFile();
void reset();
bool wantsToMigrateResource(const QUrl& url);
QWidget* _dialogParent = nullptr;
QJsonArray _entitiesArray;
bool _doneReading { false };
QMultiHash<QUrl, QJsonValueRef> _pendingReplacements;
QHash<QUrl, QUrl> _uploadedAssets;
QHash<AssetUpload*, QUrl> _originalURLs;
QSet<QUrl> _ignoredUrls;
};
#endif // hifi_ATPAssetMigrator_h

View file

@ -140,50 +140,54 @@ QByteArray MyAvatar::toByteArray(bool cullSmallChanges, bool sendAll) {
return AvatarData::toByteArray(cullSmallChanges, sendAll);
}
void MyAvatar::reset() {
void MyAvatar::reset(bool andReload) {
// Gather animation mode...
// This should be simpler when we have only graph animations always on.
bool isRig = _rig->getEnableRig();
// seting rig animation to true, below, will clear the graph animation menu item, so grab it now.
bool isGraph = _rig->getEnableAnimGraph() || Menu::getInstance()->isOptionChecked(MenuOption::EnableAnimGraph);
// ... and get to sane configuration where other activity won't bother us.
qApp->setRawAvatarUpdateThreading(false);
_rig->disableHands = true;
setEnableRigAnimations(true);
if (andReload) {
qApp->setRawAvatarUpdateThreading(false);
_rig->disableHands = true;
setEnableRigAnimations(true);
}
// Reset dynamic state.
_wasPushing = _isPushing = _isBraking = _billboardValid = _goToPending = _straighteningLean = false;
_wasPushing = _isPushing = _isBraking = _billboardValid = _straighteningLean = false;
_skeletonModel.reset();
getHead()->reset();
_targetVelocity = glm::vec3(0.0f);
setThrust(glm::vec3(0.0f));
// Get fresh data, in case we're really slow and out of wack.
_hmdSensorMatrix = qApp->getHMDSensorPose();
_hmdSensorPosition = extractTranslation(_hmdSensorMatrix);
_hmdSensorOrientation = glm::quat_cast(_hmdSensorMatrix);
if (andReload) {
// Get fresh data, in case we're really slow and out of wack.
_hmdSensorMatrix = qApp->getHMDSensorPose();
_hmdSensorPosition = extractTranslation(_hmdSensorMatrix);
_hmdSensorOrientation = glm::quat_cast(_hmdSensorMatrix);
// Reset body position/orientation under the head.
auto newBodySensorMatrix = deriveBodyFromHMDSensor(); // Based on current cached HMD position/rotation..
auto worldBodyMatrix = _sensorToWorldMatrix * newBodySensorMatrix;
glm::vec3 worldBodyPos = extractTranslation(worldBodyMatrix);
glm::quat worldBodyRot = glm::normalize(glm::quat_cast(worldBodyMatrix));
// Reset body position/orientation under the head.
auto newBodySensorMatrix = deriveBodyFromHMDSensor(); // Based on current cached HMD position/rotation..
auto worldBodyMatrix = _sensorToWorldMatrix * newBodySensorMatrix;
glm::vec3 worldBodyPos = extractTranslation(worldBodyMatrix);
glm::quat worldBodyRot = glm::normalize(glm::quat_cast(worldBodyMatrix));
// FIXME: Hack to retain the previous behavior wrt height.
// I'd like to make the body match head height, but that will have to wait for separate PR.
worldBodyPos.y = getPosition().y;
// FIXME: Hack to retain the previous behavior wrt height.
// I'd like to make the body match head height, but that will have to wait for separate PR.
worldBodyPos.y = getPosition().y;
setPosition(worldBodyPos);
setOrientation(worldBodyRot);
// If there is any discrepency between positioning and the head (as there is in initial deriveBodyFromHMDSensor),
// we can make that right by setting _bodySensorMatrix = newBodySensorMatrix.
// However, doing so will make the head want to point to the previous body orientation, as cached above.
//_bodySensorMatrix = newBodySensorMatrix;
//updateSensorToWorldMatrix(); // Uses updated position/orientation and _bodySensorMatrix changes
setPosition(worldBodyPos);
setOrientation(worldBodyRot);
// If there is any discrepency between positioning and the head (as there is in initial deriveBodyFromHMDSensor),
// we can make that right by setting _bodySensorMatrix = newBodySensorMatrix.
// However, doing so will make the head want to point to the previous body orientation, as cached above.
//_bodySensorMatrix = newBodySensorMatrix;
//updateSensorToWorldMatrix(); // Uses updated position/orientation and _bodySensorMatrix changes
_skeletonModel.simulate(0.1f); // non-zero
setEnableRigAnimations(false);
_skeletonModel.simulate(0.1f);
_skeletonModel.simulate(0.1f); // non-zero
setEnableRigAnimations(false);
_skeletonModel.simulate(0.1f);
}
if (isRig) {
setEnableRigAnimations(true);
Menu::getInstance()->setIsOptionChecked(MenuOption::EnableRigAnimations, true);
@ -191,8 +195,10 @@ void MyAvatar::reset() {
setEnableAnimGraph(true);
Menu::getInstance()->setIsOptionChecked(MenuOption::EnableAnimGraph, true);
}
_rig->disableHands = false;
qApp->setRawAvatarUpdateThreading();
if (andReload) {
_rig->disableHands = false;
qApp->setRawAvatarUpdateThreading();
}
}
void MyAvatar::update(float deltaTime) {
@ -320,6 +326,7 @@ static bool capsuleCheck(const glm::vec3& pos, float capsuleLen, float capsuleRa
// as it moves through the world.
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
// calc deltaTime
auto now = usecTimestampNow();
auto deltaUsecs = now - _lastUpdateFromHMDTime;
_lastUpdateFromHMDTime = now;
@ -334,21 +341,61 @@ void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
bool hmdIsAtRest = _hmdAtRestDetector.update(deltaTime, _hmdSensorPosition, _hmdSensorOrientation);
const float STRAIGHTENING_LEAN_DURATION = 0.5f; // seconds
// It can be more accurate/smooth to use velocity rather than position,
// but some modes (e.g., hmd standing) update position without updating velocity.
// So, let's create our own workingVelocity from the worldPosition...
glm::vec3 positionDelta = getPosition() - _lastPosition;
glm::vec3 workingVelocity = positionDelta / deltaTime;
_lastPosition = getPosition();
const float MOVE_ENTER_SPEED_THRESHOLD = 0.2f; // m/sec
const float MOVE_EXIT_SPEED_THRESHOLD = 0.07f; // m/sec
bool isMoving;
if (_lastIsMoving) {
isMoving = glm::length(workingVelocity) >= MOVE_EXIT_SPEED_THRESHOLD;
} else {
isMoving = glm::length(workingVelocity) > MOVE_ENTER_SPEED_THRESHOLD;
}
bool justStartedMoving = (_lastIsMoving != isMoving) && isMoving;
_lastIsMoving = isMoving;
if (shouldBeginStraighteningLean() || hmdIsAtRest || justStartedMoving) {
beginStraighteningLean();
}
processStraighteningLean(deltaTime);
}
void MyAvatar::beginStraighteningLean() {
// begin homing toward derived body position.
if (!_straighteningLean) {
_straighteningLean = true;
_straighteningLeanAlpha = 0.0f;
}
}
bool MyAvatar::shouldBeginStraighteningLean() const {
// define a vertical capsule
const float STRAIGHTENING_LEAN_CAPSULE_RADIUS = 0.2f; // meters
const float STRAIGHTENING_LEAN_CAPSULE_LENGTH = 0.05f; // length of the cylinder part of the capsule in meters.
// detect if the derived body position is outside of a capsule around the _bodySensorMatrix
auto newBodySensorMatrix = deriveBodyFromHMDSensor();
glm::vec3 diff = extractTranslation(newBodySensorMatrix) - extractTranslation(_bodySensorMatrix);
if (!_straighteningLean && (capsuleCheck(diff, STRAIGHTENING_LEAN_CAPSULE_LENGTH, STRAIGHTENING_LEAN_CAPSULE_RADIUS) || hmdIsAtRest)) {
bool isBodyPosOutsideCapsule = capsuleCheck(diff, STRAIGHTENING_LEAN_CAPSULE_LENGTH, STRAIGHTENING_LEAN_CAPSULE_RADIUS);
// begin homing toward derived body position.
_straighteningLean = true;
_straighteningLeanAlpha = 0.0f;
if (isBodyPosOutsideCapsule) {
return true;
} else {
return false;
}
}
} else if (_straighteningLean) {
void MyAvatar::processStraighteningLean(float deltaTime) {
if (_straighteningLean) {
const float STRAIGHTENING_LEAN_DURATION = 0.5f; // seconds
auto newBodySensorMatrix = deriveBodyFromHMDSensor();
auto worldBodyMatrix = _sensorToWorldMatrix * newBodySensorMatrix;
@ -1523,33 +1570,69 @@ bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
void MyAvatar::updateOrientation(float deltaTime) {
// Smoothly rotate body with arrow keys
float targetSpeed = (_driveKeys[ROT_LEFT] - _driveKeys[ROT_RIGHT]) * YAW_SPEED;
if (targetSpeed != 0.0f) {
const float ROTATION_RAMP_TIMESCALE = 0.1f;
float blend = deltaTime / ROTATION_RAMP_TIMESCALE;
if (blend > 1.0f) {
blend = 1.0f;
}
_bodyYawDelta = (1.0f - blend) * _bodyYawDelta + blend * targetSpeed;
} else if (_bodyYawDelta != 0.0f) {
// attenuate body rotation speed
const float ROTATION_DECAY_TIMESCALE = 0.05f;
float attenuation = 1.0f - deltaTime / ROTATION_DECAY_TIMESCALE;
if (attenuation < 0.0f) {
attenuation = 0.0f;
}
_bodyYawDelta *= attenuation;
float targetSpeed = 0.0f;
// FIXME - this comfort mode code is a total hack, remove it when we have new input mapping
bool isComfortMode = Menu::getInstance()->isOptionChecked(MenuOption::ComfortMode);
bool isHMDMode = qApp->getAvatarUpdater()->isHMDMode();
if (!isHMDMode || !isComfortMode) {
targetSpeed = (_driveKeys[ROT_LEFT] - _driveKeys[ROT_RIGHT]) * YAW_SPEED;
if (targetSpeed != 0.0f) {
const float ROTATION_RAMP_TIMESCALE = 0.1f;
float blend = deltaTime / ROTATION_RAMP_TIMESCALE;
if (blend > 1.0f) {
blend = 1.0f;
}
_bodyYawDelta = (1.0f - blend) * _bodyYawDelta + blend * targetSpeed;
} else if (_bodyYawDelta != 0.0f) {
// attenuate body rotation speed
const float ROTATION_DECAY_TIMESCALE = 0.05f;
float attenuation = 1.0f - deltaTime / ROTATION_DECAY_TIMESCALE;
if (attenuation < 0.0f) {
attenuation = 0.0f;
}
_bodyYawDelta *= attenuation;
float MINIMUM_ROTATION_RATE = 2.0f;
if (fabsf(_bodyYawDelta) < MINIMUM_ROTATION_RATE) {
_bodyYawDelta = 0.0f;
}
}
// update body orientation by movement inputs
setOrientation(getOrientation() *
glm::quat(glm::radians(glm::vec3(0.0f, _bodyYawDelta * deltaTime, 0.0f))));
} else {
// Comfort Mode: If you press any of the left/right rotation drive keys or input, you'll
// get an instantaneous 15 degree turn. If you keep holding the key down you'll get another
// snap turn every half second.
_bodyYawDelta = 0.0f;
static quint64 lastPulse = 0;
quint64 now = usecTimestampNow();
quint64 COMFORT_MODE_PULSE_TIMING = USECS_PER_SECOND / 2; // turn once per half second
float driveLeft = _driveKeys[ROT_LEFT];
float driveRight= _driveKeys[ROT_RIGHT];
if ((driveLeft != 0.0f || driveRight != 0.0f) && (now - lastPulse > COMFORT_MODE_PULSE_TIMING)) {
lastPulse = now;
const float SNAP_TURN_DELTA = 15.0f; // degrees
float direction = (driveLeft - driveRight) < 0.0f ? -1.0f : 1.0f;
float turnAmount = direction * SNAP_TURN_DELTA;
// update body orientation by movement inputs
setOrientation(getOrientation() *
glm::quat(glm::radians(glm::vec3(0.0f, turnAmount, 0.0f))));
float MINIMUM_ROTATION_RATE = 2.0f;
if (fabsf(_bodyYawDelta) < MINIMUM_ROTATION_RATE) {
_bodyYawDelta = 0.0f;
}
}
getHead()->setBasePitch(getHead()->getBasePitch() + (_driveKeys[ROT_UP] - _driveKeys[ROT_DOWN]) * PITCH_SPEED * deltaTime);
// update body orientation by movement inputs
setOrientation(getOrientation() *
glm::quat(glm::radians(glm::vec3(0.0f, _bodyYawDelta * deltaTime, 0.0f))));
if (qApp->getAvatarUpdater()->isHMDMode()) {
glm::quat orientation = glm::quat_cast(getSensorToWorldMatrix()) * getHMDSensorOrientation();

View file

@ -58,7 +58,7 @@ public:
AudioListenerMode getAudioListenerModeCamera() const { return FROM_CAMERA; }
AudioListenerMode getAudioListenerModeCustom() const { return CUSTOM; }
void reset();
void reset(bool andReload = false);
void update(float deltaTime);
void preRender(RenderArgs* renderArgs);
@ -271,6 +271,10 @@ private:
const RecorderPointer getRecorder() const { return _recorder; }
const PlayerPointer getPlayer() const { return _player; }
void beginStraighteningLean();
bool shouldBeginStraighteningLean() const;
void processStraighteningLean(float deltaTime);
bool cameraInsideHead() const;
// These are made private for MyAvatar so that you will use the "use" methods instead
@ -366,6 +370,8 @@ private:
quint64 _lastUpdateFromHMDTime = usecTimestampNow();
AtRestDetector _hmdAtRestDetector;
glm::vec3 _lastPosition;
bool _lastIsMoving = false;
};
QScriptValue audioListenModeToScriptValue(QScriptEngine* engine, const AudioListenerMode& audioListenerMode);

View file

@ -11,11 +11,6 @@
#include "AssetUploadDialogFactory.h"
#include <AssetClient.h>
#include <AssetUpload.h>
#include <AssetUtils.h>
#include <NodeList.h>
#include <QtCore/QDebug>
#include <QtWidgets/QDialogButtonBox>
#include <QtWidgets/QFileDialog>
@ -24,12 +19,18 @@
#include <QtWidgets/QLineEdit>
#include <QtWidgets/QVBoxLayout>
#include <AssetClient.h>
#include <AssetUpload.h>
#include <AssetUtils.h>
#include <NodeList.h>
#include <ResourceManager.h>
AssetUploadDialogFactory& AssetUploadDialogFactory::getInstance() {
static AssetUploadDialogFactory staticInstance;
return staticInstance;
}
static const QString PERMISSION_DENIED_ERROR = "You do not have permission to upload content to this asset-server.";
void AssetUploadDialogFactory::showDialog() {
auto nodeList = DependencyManager::get<NodeList>();
@ -59,7 +60,7 @@ void AssetUploadDialogFactory::showDialog() {
}
} else {
// we don't have permission to upload to asset server in this domain - show the permission denied error
showErrorDialog(QString(), PERMISSION_DENIED_ERROR);
showErrorDialog(nullptr, _dialogParent, AssetUpload::PERMISSION_DENIED_ERROR);
}
}
@ -85,7 +86,7 @@ void AssetUploadDialogFactory::handleUploadFinished(AssetUpload* upload, const Q
// setup the line edit to hold the copiable text
QLineEdit* lineEdit = new QLineEdit;
QString atpURL = QString("%1:%2.%3").arg(ATP_SCHEME).arg(hash).arg(upload->getExtension());
QString atpURL = QString("%1:%2.%3").arg(URL_SCHEME_ATP).arg(hash).arg(upload->getExtension());
// set the ATP URL as the text value so it's copiable
lineEdit->insert(atpURL);
@ -117,42 +118,33 @@ void AssetUploadDialogFactory::handleUploadFinished(AssetUpload* upload, const Q
// show the new dialog
hashCopyDialog->show();
} else {
// figure out the right error message for the message box
QString additionalError;
switch (upload->getError()) {
case AssetUpload::PermissionDenied:
additionalError = PERMISSION_DENIED_ERROR;
break;
case AssetUpload::TooLarge:
additionalError = "The uploaded content was too large and could not be stored in the asset-server.";
break;
case AssetUpload::FileOpenError:
additionalError = "The file could not be opened. Please check your permissions and try again.";
break;
case AssetUpload::NetworkError:
additionalError = "The file could not be opened. Please check your network connectivity.";
break;
default:
// not handled, do not show a message box
return;
}
// display a message box with the error
showErrorDialog(QFileInfo(upload->getFilename()).fileName(), additionalError);
showErrorDialog(upload, _dialogParent);
}
upload->deleteLater();
}
void AssetUploadDialogFactory::showErrorDialog(const QString& filename, const QString& additionalError) {
QString errorMessage;
void AssetUploadDialogFactory::showErrorDialog(AssetUpload* upload, QWidget* dialogParent, const QString& overrideMessage) {
QString filename;
if (!filename.isEmpty()) {
errorMessage += QString("Failed to upload %1.\n\n").arg(filename);
if (upload) {
filename = QFileInfo { upload->getFilename() }.fileName();
}
errorMessage += additionalError;
QString errorMessage = overrideMessage;
QMessageBox::warning(_dialogParent, "Failed Upload", errorMessage);
if (errorMessage.isEmpty() && upload) {
errorMessage = upload->getErrorString();
}
QString dialogMessage;
if (upload) {
dialogMessage += QString("Failed to upload %1.\n\n").arg(filename);
}
dialogMessage += errorMessage;
QMessageBox::warning(dialogParent, "Failed Upload", dialogMessage);
}

View file

@ -25,18 +25,18 @@ public:
AssetUploadDialogFactory& operator=(const AssetUploadDialogFactory& rhs) = delete;
static AssetUploadDialogFactory& getInstance();
static void showErrorDialog(AssetUpload* upload, QWidget* dialogParent, const QString& overrideMessage = QString());
void setDialogParent(QWidget* dialogParent) { _dialogParent = dialogParent; }
public slots:
void showDialog();
private slots:
void handleUploadFinished(AssetUpload* upload, const QString& hash);
private:
AssetUploadDialogFactory() = default;
void showErrorDialog(const QString& filename, const QString& additionalError);
QWidget* _dialogParent { nullptr };
};

View file

@ -56,9 +56,9 @@ void AnimInverseKinematics::computeAbsolutePoses(AnimPoseVec& absolutePoses) con
}
void AnimInverseKinematics::setTargetVars(
const QString& jointName,
const QString& positionVar,
const QString& rotationVar,
const QString& jointName,
const QString& positionVar,
const QString& rotationVar,
const QString& typeVar) {
// if there are dups, last one wins.
bool found = false;
@ -95,14 +95,20 @@ void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::
}
} else {
IKTarget target;
AnimPose defaultPose = _skeleton->getAbsolutePose(targetVar.jointIndex, underPoses);
target.pose.trans = animVars.lookup(targetVar.positionVar, defaultPose.trans);
target.pose.rot = animVars.lookup(targetVar.rotationVar, defaultPose.rot);
target.setType(animVars.lookup(targetVar.typeVar, QString("")));
target.index = targetVar.jointIndex;
targets.push_back(target);
if (target.index > _maxTargetIndex) {
_maxTargetIndex = target.index;
target.setType(animVars.lookup(targetVar.typeVar, (int)IKTarget::Type::RotationAndPosition));
if (target.getType() != IKTarget::Type::Unknown) {
AnimPose defaultPose = _skeleton->getAbsolutePose(targetVar.jointIndex, underPoses);
glm::quat rotation = animVars.lookup(targetVar.rotationVar, defaultPose.rot);
glm::vec3 translation = animVars.lookup(targetVar.positionVar, defaultPose.trans);
if (target.getType() == IKTarget::Type::HipsRelativeRotationAndPosition) {
translation += _hipsOffset;
}
target.setPose(rotation, translation);
target.setIndex(targetVar.jointIndex);
targets.push_back(target);
if (targetVar.jointIndex > _maxTargetIndex) {
_maxTargetIndex = targetVar.jointIndex;
}
}
}
}
@ -141,107 +147,116 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
do {
int lowestMovedIndex = _relativePoses.size();
for (auto& target: targets) {
if (target.type == IKTarget::Type::RotationOnly) {
IKTarget::Type targetType = target.getType();
if (targetType == IKTarget::Type::RotationOnly) {
// the final rotation will be enforced after the iterations
continue;
}
AnimPose targetPose = target.pose;
// cache tip absolute transform
int tipIndex = target.index;
int tipIndex = target.getIndex();
int pivotIndex = _skeleton->getParentIndex(tipIndex);
if (pivotIndex == -1) {
continue;
}
int pivotsParentIndex = _skeleton->getParentIndex(pivotIndex);
if (pivotsParentIndex == -1) {
// TODO?: handle case where tip's parent is root?
continue;
}
glm::vec3 tipPosition = absolutePoses[tipIndex].trans;
glm::quat tipRotation = absolutePoses[tipIndex].rot;
// cache tip's parent's absolute rotation so we can recompute the tip's parent-relative
// cache tip's parent's absolute rotation so we can recompute the tip's parent-relative
// as we proceed walking down the joint chain
int pivotIndex = _skeleton->getParentIndex(tipIndex);
glm::quat tipParentRotation;
if (pivotIndex != -1) {
tipParentRotation = absolutePoses[pivotIndex].rot;
}
glm::quat tipParentRotation = absolutePoses[pivotIndex].rot;
// descend toward root, pivoting each joint to get tip closer to target
int ancestorCount = 1;
while (pivotIndex != -1) {
while (pivotsParentIndex != -1) {
// compute the two lines that should be aligned
glm::vec3 jointPosition = absolutePoses[pivotIndex].trans;
glm::vec3 leverArm = tipPosition - jointPosition;
glm::vec3 targetLine = targetPose.trans - jointPosition;
// compute the swing that would get get tip closer
glm::vec3 axis = glm::cross(leverArm, targetLine);
float axisLength = glm::length(axis);
glm::quat deltaRotation;
const float MIN_AXIS_LENGTH = 1.0e-4f;
if (axisLength > MIN_AXIS_LENGTH) {
// compute deltaRotation for alignment (swings tip closer to target)
axis /= axisLength;
float angle = acosf(glm::dot(leverArm, targetLine) / (glm::length(leverArm) * glm::length(targetLine)));
if (targetType == IKTarget::Type::RotationAndPosition ||
targetType == IKTarget::Type::HipsRelativeRotationAndPosition) {
// compute the swing that would get get tip closer
glm::vec3 targetLine = target.getTranslation() - jointPosition;
glm::vec3 axis = glm::cross(leverArm, targetLine);
float axisLength = glm::length(axis);
const float MIN_AXIS_LENGTH = 1.0e-4f;
if (axisLength > MIN_AXIS_LENGTH) {
// compute deltaRotation for alignment (swings tip closer to target)
axis /= axisLength;
float angle = acosf(glm::dot(leverArm, targetLine) / (glm::length(leverArm) * glm::length(targetLine)));
// NOTE: even when axisLength is not zero (e.g. lever-arm and pivot-arm are not quite aligned) it is
// still possible for the angle to be zero so we also check that to avoid unnecessary calculations.
const float MIN_ADJUSTMENT_ANGLE = 1.0e-4f;
if (angle > MIN_ADJUSTMENT_ANGLE) {
// reduce angle by a fraction (reduces IK swing contribution of this joint)
angle /= (float)ancestorCount;
deltaRotation = glm::angleAxis(angle, axis);
}
// NOTE: even when axisLength is not zero (e.g. lever-arm and pivot-arm are not quite aligned) it is
// still possible for the angle to be zero so we also check that to avoid unnecessary calculations.
const float MIN_ADJUSTMENT_ANGLE = 1.0e-4f;
if (angle > MIN_ADJUSTMENT_ANGLE) {
// reduce angle by a fraction (for stability)
const float fraction = 0.5f;
angle *= fraction;
deltaRotation = glm::angleAxis(angle, axis);
// The swing will re-orient the tip but there will tend to be be a non-zero delta between the tip's
// new rotation and its target. We compute that delta here and rotate the tipJoint accordingly.
glm::quat tipRelativeRotation = glm::inverse(deltaRotation * tipParentRotation) * targetPose.rot;
// The swing will re-orient the tip but there will tend to be be a non-zero delta between the tip's
// new rotation and its target. This is the final parent-relative rotation that the tip joint have
// make to achieve its target rotation.
glm::quat tipRelativeRotation = glm::inverse(deltaRotation * tipParentRotation) * target.getRotation();
// enforce tip's constraint
RotationConstraint* constraint = getConstraint(tipIndex);
if (constraint) {
bool constrained = constraint->apply(tipRelativeRotation);
if (constrained) {
// The tip's final parent-relative rotation violates its constraint
// so we try to twist this pivot to compensate.
glm::quat constrainedTipRotation = deltaRotation * tipParentRotation * tipRelativeRotation;
glm::quat missingRotation = targetPose.rot * glm::inverse(constrainedTipRotation);
glm::quat swingPart;
glm::quat twistPart;
glm::vec3 axis = glm::normalize(deltaRotation * leverArm);
swingTwistDecomposition(missingRotation, axis, swingPart, twistPart);
deltaRotation = twistPart * deltaRotation;
}
// we update the tip rotation here to rotate it as close to its target orientation as possible
// before moving on to next pivot
tipRotation = tipParentRotation * tipRelativeRotation;
}
}
++ancestorCount;
int parentIndex = _skeleton->getParentIndex(pivotIndex);
if (parentIndex == -1) {
// TODO? apply constraints to root?
// TODO? harvest the root's transform as movement of entire skeleton?
} else {
// compute joint's new parent-relative rotation after swing
// Q' = dQ * Q and Q = Qp * q --> q' = Qp^ * dQ * Q
glm::quat newRot = glm::normalize(glm::inverse(
absolutePoses[parentIndex].rot) *
deltaRotation *
absolutePoses[pivotIndex].rot);
// enforce pivot's constraint
RotationConstraint* constraint = getConstraint(pivotIndex);
if (constraint) {
bool constrained = constraint->apply(newRot);
if (constrained) {
// the constraint will modify the movement of the tip so we have to compute the modified
// model-frame deltaRotation
// Q' = Qp^ * dQ * Q --> dQ = Qp * Q' * Q^
deltaRotation = absolutePoses[parentIndex].rot *
newRot *
glm::inverse(absolutePoses[pivotIndex].rot);
// enforce tip's constraint
RotationConstraint* constraint = getConstraint(tipIndex);
if (constraint) {
bool constrained = constraint->apply(tipRelativeRotation);
if (constrained) {
// The tip's final parent-relative rotation would violate its constraint
// so we try to pre-twist this pivot to compensate.
glm::quat constrainedTipRotation = deltaRotation * tipParentRotation * tipRelativeRotation;
glm::quat missingRotation = target.getRotation() * glm::inverse(constrainedTipRotation);
glm::quat swingPart;
glm::quat twistPart;
glm::vec3 axis = glm::normalize(deltaRotation * leverArm);
swingTwistDecomposition(missingRotation, axis, swingPart, twistPart);
float dotSign = copysignf(1.0f, twistPart.w);
deltaRotation = glm::normalize(glm::lerp(glm::quat(), dotSign * twistPart, fraction)) * deltaRotation;
}
}
}
}
// store the rotation change in the accumulator
_accumulators[pivotIndex].add(newRot);
} else if (targetType == IKTarget::Type::HmdHead) {
// An HmdHead target slaves the orientation of the end-effector by distributing rotation
// deltas up the hierarchy. Its target position is enforced later by shifting the hips.
deltaRotation = target.getRotation() * glm::inverse(tipRotation);
float dotSign = copysignf(1.0f, deltaRotation.w);
const float ANGLE_DISTRIBUTION_FACTOR = 0.15f;
deltaRotation = glm::normalize(glm::lerp(glm::quat(), dotSign * deltaRotation, ANGLE_DISTRIBUTION_FACTOR));
}
// compute joint's new parent-relative rotation after swing
// Q' = dQ * Q and Q = Qp * q --> q' = Qp^ * dQ * Q
glm::quat newRot = glm::normalize(glm::inverse(
absolutePoses[pivotsParentIndex].rot) *
deltaRotation *
absolutePoses[pivotIndex].rot);
// enforce pivot's constraint
RotationConstraint* constraint = getConstraint(pivotIndex);
if (constraint) {
bool constrained = constraint->apply(newRot);
if (constrained) {
// the constraint will modify the movement of the tip so we have to compute the modified
// model-frame deltaRotation
// Q' = Qp^ * dQ * Q --> dQ = Qp * Q' * Q^
deltaRotation = absolutePoses[pivotsParentIndex].rot *
newRot *
glm::inverse(absolutePoses[pivotIndex].rot);
}
}
// store the rotation change in the accumulator
_accumulators[pivotIndex].add(newRot);
// this joint has been changed so we check to see if it has the lowest index
if (pivotIndex < lowestMovedIndex) {
lowestMovedIndex = pivotIndex;
@ -252,7 +267,8 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
tipRotation = glm::normalize(deltaRotation * tipRotation);
tipParentRotation = glm::normalize(deltaRotation * tipParentRotation);
pivotIndex = _skeleton->getParentIndex(pivotIndex);
pivotIndex = pivotsParentIndex;
pivotsParentIndex = _skeleton->getParentIndex(pivotIndex);
}
}
++numLoops;
@ -275,26 +291,15 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
}
} while (numLoops < MAX_IK_LOOPS);
/* KEEP: example code for measuring endeffector error of IK solution
for (uint32_t i = 0; i < targets.size(); ++i) {
auto& target = targets[i];
if (target.type == IKTarget::Type::RotationOnly) {
continue;
}
glm::vec3 tipPosition = absolutePoses[target.index].trans;
std::cout << i << " IK error = " << glm::distance(tipPosition, target.pose.trans) << std::endl;
}
*/
// finally set the relative rotation of each tip to agree with absolute target rotation
for (auto& target: targets) {
int tipIndex = target.index;
int tipIndex = target.getIndex();
int parentIndex = _skeleton->getParentIndex(tipIndex);
if (parentIndex != -1) {
AnimPose targetPose = target.pose;
const glm::quat& targetRotation = target.getRotation();
// compute tip's new parent-relative rotation
// Q = Qp * q --> q' = Qp^ * Q
glm::quat newRelativeRotation = glm::inverse(absolutePoses[parentIndex].rot) * targetPose.rot;
glm::quat newRelativeRotation = glm::inverse(absolutePoses[parentIndex].rot) * targetRotation;
RotationConstraint* constraint = getConstraint(tipIndex);
if (constraint) {
constraint->apply(newRelativeRotation);
@ -303,7 +308,7 @@ void AnimInverseKinematics::solveWithCyclicCoordinateDescent(const std::vector<I
// to help this rotation target get met.
}
_relativePoses[tipIndex].rot = newRelativeRotation;
absolutePoses[tipIndex].rot = targetPose.rot;
absolutePoses[tipIndex].rot = targetRotation;
}
}
}
@ -323,7 +328,7 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
// relax toward underpose
// HACK: this relaxation needs to be constant per-frame rather than per-realtime
// in order to prevent IK "flutter" for bad FPS. The bad news is that the good parts
// of this relaxation will be FPS dependent (low FPS will make the limbs align slower
// of this relaxation will be FPS dependent (low FPS will make the limbs align slower
// in real-time), however most people will not notice this and this problem is less
// annoying than the flutter.
const float blend = (1.0f / 60.0f) / (0.25f); // effectively: dt / RELAXATION_TIMESCALE
@ -343,7 +348,7 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
// build a list of targets from _targetVarVec
std::vector<IKTarget> targets;
computeTargets(animVars, targets, underPoses);
if (targets.empty()) {
// no IK targets but still need to enforce constraints
std::map<int, RotationConstraint*>::iterator constraintItr = _constraints.begin();
@ -355,7 +360,50 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
++constraintItr;
}
} else {
// shift the hips according to the offset from the previous frame
float offsetLength = glm::length(_hipsOffset);
const float MIN_HIPS_OFFSET_LENGTH = 0.03f;
if (offsetLength > MIN_HIPS_OFFSET_LENGTH) {
// but only if offset is long enough
float scaleFactor = ((offsetLength - MIN_HIPS_OFFSET_LENGTH) / offsetLength);
_relativePoses[0].trans = underPoses[0].trans + scaleFactor * _hipsOffset;
}
solveWithCyclicCoordinateDescent(targets);
// compute the new target hips offset (for next frame)
// by looking for discrepancies between where a targeted endEffector is
// and where it wants to be (after IK solutions are done)
glm::vec3 newHipsOffset = Vectors::ZERO;
for (auto& target: targets) {
int targetIndex = target.getIndex();
if (targetIndex == _headIndex && _headIndex != -1) {
// special handling for headTarget
if (target.getType() == IKTarget::Type::RotationOnly) {
// we want to shift the hips to bring the underpose closer
// to where the head happens to be (overpose)
glm::vec3 under = _skeleton->getAbsolutePose(_headIndex, underPoses).trans;
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans;
const float HEAD_OFFSET_SLAVE_FACTOR = 0.65f;
newHipsOffset += HEAD_OFFSET_SLAVE_FACTOR * (actual - under);
} else if (target.getType() == IKTarget::Type::HmdHead) {
// we want to shift the hips to bring the head to its designated position
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans;
_hipsOffset += target.getTranslation() - actual;
// and ignore all other targets
newHipsOffset = _hipsOffset;
break;
}
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans;
glm::vec3 targetPosition = target.getTranslation();
newHipsOffset += targetPosition - actualPosition;
}
}
// smooth transitions by relaxing _hipsOffset toward the new value
const float HIPS_OFFSET_SLAVE_TIMESCALE = 0.15f;
_hipsOffset += (newHipsOffset - _hipsOffset) * (dt / HIPS_OFFSET_SLAVE_TIMESCALE);
}
}
return _relativePoses;
@ -477,7 +525,7 @@ void AnimInverseKinematics::initConstraints() {
stConstraint->setSwingLimits(minDots);
constraint = static_cast<RotationConstraint*>(stConstraint);
} else if (0 == baseName.compare("UpLegXXX", Qt::CaseInsensitive)) {
} else if (0 == baseName.compare("UpLeg", Qt::CaseInsensitive)) {
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot);
stConstraint->setTwistLimits(-PI / 4.0f, PI / 4.0f);
@ -581,7 +629,7 @@ void AnimInverseKinematics::initConstraints() {
} else if (0 == baseName.compare("Neck", Qt::CaseInsensitive)) {
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot);
const float MAX_NECK_TWIST = PI / 2.0f;
const float MAX_NECK_TWIST = PI / 4.0f;
stConstraint->setTwistLimits(-MAX_NECK_TWIST, MAX_NECK_TWIST);
std::vector<float> minDots;
@ -589,6 +637,18 @@ void AnimInverseKinematics::initConstraints() {
minDots.push_back(cosf(MAX_NECK_SWING));
stConstraint->setSwingLimits(minDots);
constraint = static_cast<RotationConstraint*>(stConstraint);
} else if (0 == baseName.compare("Head", Qt::CaseInsensitive)) {
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot);
const float MAX_HEAD_TWIST = PI / 4.0f;
stConstraint->setTwistLimits(-MAX_HEAD_TWIST, MAX_HEAD_TWIST);
std::vector<float> minDots;
const float MAX_HEAD_SWING = PI / 4.0f;
minDots.push_back(cosf(MAX_HEAD_SWING));
stConstraint->setSwingLimits(minDots);
constraint = static_cast<RotationConstraint*>(stConstraint);
} else if (0 == baseName.compare("ForeArm", Qt::CaseInsensitive)) {
// The elbow joint rotates about the parent-frame's zAxis (-zAxis) for the Right (Left) arm.
@ -621,7 +681,7 @@ void AnimInverseKinematics::initConstraints() {
eConstraint->setAngleLimits(minAngle, maxAngle);
constraint = static_cast<RotationConstraint*>(eConstraint);
} else if (0 == baseName.compare("LegXXX", Qt::CaseInsensitive)) {
} else if (0 == baseName.compare("Leg", Qt::CaseInsensitive)) {
// The knee joint rotates about the parent-frame's -xAxis.
ElbowConstraint* eConstraint = new ElbowConstraint();
glm::quat referenceRotation = _defaultRelativePoses[i].rot;
@ -652,7 +712,7 @@ void AnimInverseKinematics::initConstraints() {
eConstraint->setAngleLimits(minAngle, maxAngle);
constraint = static_cast<RotationConstraint*>(eConstraint);
} else if (0 == baseName.compare("FootXXX", Qt::CaseInsensitive)) {
} else if (0 == baseName.compare("Foot", Qt::CaseInsensitive)) {
SwingTwistConstraint* stConstraint = new SwingTwistConstraint();
stConstraint->setReferenceRotation(_defaultRelativePoses[i].rot);
stConstraint->setTwistLimits(-PI / 4.0f, PI / 4.0f);
@ -697,7 +757,9 @@ void AnimInverseKinematics::setSkeletonInternal(AnimSkeleton::ConstPointer skele
if (skeleton) {
initConstraints();
_headIndex = _skeleton->nameToJointIndex("Head");
} else {
clearConstraints();
_headIndex = -1;
}
}

View file

@ -16,6 +16,7 @@
#include <vector>
#include "AnimNode.h"
#include "IKTarget.h"
#include "RotationAccumulator.h"
@ -37,18 +38,6 @@ public:
virtual const AnimPoseVec& overlay(const AnimVariantMap& animVars, float dt, Triggers& triggersOut, const AnimPoseVec& underPoses) override;
protected:
struct IKTarget {
enum class Type {
RotationAndPosition,
RotationOnly
};
AnimPose pose;
int index;
Type type = Type::RotationAndPosition;
void setType(const QString& typeVar) { type = ((typeVar == "RotationOnly") ? Type::RotationOnly : Type::RotationAndPosition); }
};
void computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses);
void solveWithCyclicCoordinateDescent(const std::vector<IKTarget>& targets);
virtual void setSkeletonInternal(AnimSkeleton::ConstPointer skeleton) override;
@ -60,6 +49,10 @@ protected:
void clearConstraints();
void initConstraints();
// no copies
AnimInverseKinematics(const AnimInverseKinematics&) = delete;
AnimInverseKinematics& operator=(const AnimInverseKinematics&) = delete;
struct IKTargetVar {
IKTargetVar(const QString& jointNameIn,
const QString& positionVarIn,
@ -85,9 +78,9 @@ protected:
AnimPoseVec _defaultRelativePoses; // poses of the relaxed state
AnimPoseVec _relativePoses; // current relative poses
// no copies
AnimInverseKinematics(const AnimInverseKinematics&) = delete;
AnimInverseKinematics& operator=(const AnimInverseKinematics&) = delete;
// experimental data for moving hips during IK
int _headIndex = -1;
glm::vec3 _hipsOffset = Vectors::ZERO;
// _maxTargetIndex is tracked to help optimize the recalculation of absolute poses
// during the the cyclic coordinate descent algorithm

View file

@ -93,7 +93,7 @@ void AnimStateMachine::switchState(const AnimVariantMap& animVars, State::Pointe
const float dt = 0.0f;
Triggers triggers;
_nextPoses = nextStateNode->evaluate(animVars, dt, triggers);
#if WANT_DEBUGa
#if WANT_DEBUG
qCDebug(animation) << "AnimStateMachine::switchState:" << _currentState->getID() << "->" << desiredState->getID() << "duration =" << duration << "targetFrame =" << desiredState->_interpTarget;
#endif
_currentState = desiredState;

View file

@ -0,0 +1,34 @@
//
// IKTarget.cpp
//
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "IKTarget.h"
void IKTarget::setPose(const glm::quat& rotation, const glm::vec3& translation) {
_pose.rot = rotation;
_pose.trans = translation;
}
void IKTarget::setType(int type) {
switch (type) {
case (int)Type::RotationAndPosition:
_type = Type::RotationAndPosition;
break;
case (int)Type::RotationOnly:
_type = Type::RotationOnly;
break;
case (int)Type::HmdHead:
_type = Type::HmdHead;
break;
case (int)Type::HipsRelativeRotationAndPosition:
_type = Type::HipsRelativeRotationAndPosition;
break;
default:
_type = Type::Unknown;
}
}

View file

@ -0,0 +1,43 @@
//
// IKTarget.h
//
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_IKTarget_h
#define hifi_IKTarget_h
#include "AnimSkeleton.h"
class IKTarget {
public:
enum class Type {
RotationAndPosition,
RotationOnly,
HmdHead,
HipsRelativeRotationAndPosition,
Unknown,
};
IKTarget() {}
const glm::vec3& getTranslation() const { return _pose.trans; }
const glm::quat& getRotation() const { return _pose.rot; }
int getIndex() const { return _index; }
Type getType() const { return _type; }
void setPose(const glm::quat& rotation, const glm::vec3& translation);
void setIndex(int index) { _index = index; }
void setType(int);
private:
AnimPose _pose;
int _index = -1;
Type _type = Type::RotationAndPosition;
};
#endif // hifi_IKTarget_h

View file

@ -14,13 +14,14 @@
#include <glm/gtx/vector_angle.hpp>
#include <queue>
#include "NumericalConstants.h"
#include <NumericalConstants.h>
#include <DebugDraw.h>
#include "AnimationHandle.h"
#include "AnimationLogging.h"
#include "AnimSkeleton.h"
#include "DebugDraw.h"
#include "IKTarget.h"
#include "Rig.h"
void Rig::HeadParameters::dump() const {
qCDebug(animation, "HeadParameters =");
@ -436,16 +437,6 @@ void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPos
static float t = 0.0f;
_animVars.set("sine", static_cast<float>(0.5 * sin(t) + 0.5));
// default anim vars to notMoving and notTurning
_animVars.set("isMovingForward", false);
_animVars.set("isMovingBackward", false);
_animVars.set("isMovingLeft", false);
_animVars.set("isMovingRight", false);
_animVars.set("isNotMoving", true);
_animVars.set("isTurningLeft", false);
_animVars.set("isTurningRight", false);
_animVars.set("isNotTurning", true);
const float ANIM_WALK_SPEED = 1.4f; // m/s
_animVars.set("walkTimeScale", glm::clamp(0.5f, 2.0f, glm::length(localVel) / ANIM_WALK_SPEED));
@ -469,47 +460,102 @@ void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPos
}
if (glm::length(localVel) > moveThresh) {
if (fabsf(forwardSpeed) > 0.5f * fabsf(lateralSpeed)) {
if (forwardSpeed > 0.0f) {
// forward
_animVars.set("isMovingForward", true);
_animVars.set("isNotMoving", false);
} else {
// backward
_animVars.set("isMovingBackward", true);
_animVars.set("isNotMoving", false);
}
} else {
if (lateralSpeed > 0.0f) {
// right
_animVars.set("isMovingRight", true);
_animVars.set("isNotMoving", false);
} else {
// left
_animVars.set("isMovingLeft", true);
_animVars.set("isNotMoving", false);
}
if (_desiredState != RigRole::Move) {
_desiredStateAge = 0.0f;
}
_state = RigRole::Move;
_desiredState = RigRole::Move;
} else {
if (fabsf(turningSpeed) > turnThresh) {
if (turningSpeed > 0.0f) {
// turning right
_animVars.set("isTurningRight", true);
_animVars.set("isNotTurning", false);
} else {
// turning left
_animVars.set("isTurningLeft", true);
_animVars.set("isNotTurning", false);
if (_desiredState != RigRole::Turn) {
_desiredStateAge = 0.0f;
}
_state = RigRole::Turn;
} else {
// idle
_state = RigRole::Idle;
_desiredState = RigRole::Turn;
} else { // idle
if (_desiredState != RigRole::Idle) {
_desiredStateAge = 0.0f;
}
_desiredState = RigRole::Idle;
}
}
const float STATE_CHANGE_HYSTERESIS_TIMER = 0.1f;
if ((_desiredStateAge >= STATE_CHANGE_HYSTERESIS_TIMER) && _desiredState != _state) {
_state = _desiredState;
_desiredStateAge = 0.0f;
}
_desiredStateAge += deltaTime;
if (_state == RigRole::Move) {
if (glm::length(localVel) > MOVE_ENTER_SPEED_THRESHOLD) {
if (fabsf(forwardSpeed) > 0.5f * fabsf(lateralSpeed)) {
if (forwardSpeed > 0.0f) {
// forward
_animVars.set("isMovingForward", true);
_animVars.set("isMovingBackward", false);
_animVars.set("isMovingRight", false);
_animVars.set("isMovingLeft", false);
_animVars.set("isNotMoving", false);
} else {
// backward
_animVars.set("isMovingBackward", true);
_animVars.set("isMovingForward", false);
_animVars.set("isMovingRight", false);
_animVars.set("isMovingLeft", false);
_animVars.set("isNotMoving", false);
}
} else {
if (lateralSpeed > 0.0f) {
// right
_animVars.set("isMovingRight", true);
_animVars.set("isMovingLeft", false);
_animVars.set("isMovingForward", false);
_animVars.set("isMovingBackward", false);
_animVars.set("isNotMoving", false);
} else {
// left
_animVars.set("isMovingLeft", true);
_animVars.set("isMovingRight", false);
_animVars.set("isMovingForward", false);
_animVars.set("isMovingBackward", false);
_animVars.set("isNotMoving", false);
}
}
_animVars.set("isTurningLeft", false);
_animVars.set("isTurningRight", false);
_animVars.set("isNotTurning", true);
}
} else if (_state == RigRole::Turn) {
if (turningSpeed > 0.0f) {
// turning right
_animVars.set("isTurningRight", true);
_animVars.set("isTurningLeft", false);
_animVars.set("isNotTurning", false);
} else {
// turning left
_animVars.set("isTurningLeft", true);
_animVars.set("isTurningRight", false);
_animVars.set("isNotTurning", false);
}
_animVars.set("isMovingForward", false);
_animVars.set("isMovingBackward", false);
_animVars.set("isMovingRight", false);
_animVars.set("isMovingLeft", false);
_animVars.set("isNotMoving", true);
} else {
// default anim vars to notMoving and notTurning
_animVars.set("isMovingForward", false);
_animVars.set("isMovingBackward", false);
_animVars.set("isMovingLeft", false);
_animVars.set("isMovingRight", false);
_animVars.set("isNotMoving", true);
_animVars.set("isTurningLeft", false);
_animVars.set("isTurningRight", false);
_animVars.set("isNotTurning", true);
}
t += deltaTime;
}
@ -1057,9 +1103,11 @@ void Rig::updateNeckJoint(int index, const HeadParameters& params) {
_animVars.set("headPosition", headPos);
_animVars.set("headRotation", headRot);
_animVars.set("headAndNeckType", QString("RotationAndPosition"));
_animVars.set("headType", (int)IKTarget::Type::HmdHead);
_animVars.set("neckPosition", neckPos);
_animVars.set("neckRotation", neckRot);
//_animVars.set("neckType", (int)IKTarget::Type::RotationOnly);
_animVars.set("neckType", (int)IKTarget::Type::Unknown); // 'Unknown' disables the target
} else {
@ -1070,9 +1118,11 @@ void Rig::updateNeckJoint(int index, const HeadParameters& params) {
_animVars.unset("headPosition");
_animVars.set("headRotation", realLocalHeadOrientation);
_animVars.set("headAndNeckType", QString("RotationOnly"));
_animVars.set("headAndNeckType", (int)IKTarget::Type::RotationOnly);
_animVars.set("headType", (int)IKTarget::Type::RotationOnly);
_animVars.unset("neckPosition");
_animVars.unset("neckRotation");
_animVars.set("neckType", (int)IKTarget::Type::RotationOnly);
}
} else if (!_enableAnimGraph) {
@ -1130,16 +1180,20 @@ void Rig::updateFromHandParameters(const HandParameters& params, float dt) {
if (params.isLeftEnabled) {
_animVars.set("leftHandPosition", rootBindPose.trans + rootBindPose.rot * yFlipHACK * params.leftPosition);
_animVars.set("leftHandRotation", rootBindPose.rot * yFlipHACK * params.leftOrientation);
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
} else {
_animVars.unset("leftHandPosition");
_animVars.unset("leftHandRotation");
_animVars.set("leftHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
}
if (params.isRightEnabled) {
_animVars.set("rightHandPosition", rootBindPose.trans + rootBindPose.rot * yFlipHACK * params.rightPosition);
_animVars.set("rightHandRotation", rootBindPose.rot * yFlipHACK * params.rightOrientation);
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
} else {
_animVars.unset("rightHandPosition");
_animVars.unset("rightHandRotation");
_animVars.set("rightHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
}
// set leftHand grab vars

View file

@ -236,6 +236,8 @@ public:
Move
};
RigRole _state = RigRole::Idle;
RigRole _desiredState = RigRole::Idle;
float _desiredStateAge = 0.0f;
float _leftHandOverlayAlpha = 0.0f;
float _rightHandOverlayAlpha = 0.0f;
};

View file

@ -11,6 +11,8 @@
#include "OculusHelpers.h"
#include <plugins/PluginContainer.h>
#if (OVR_MAJOR_VERSION >= 6)
// A base class for FBO wrappers that need to use the Oculus C
@ -135,6 +137,19 @@ const QString & OculusDisplayPlugin::getName() const {
return NAME;
}
static const QString MONO_PREVIEW = "Mono Preview";
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
void OculusDisplayPlugin::activate() {
CONTAINER->addMenuItem(MENU_PATH(), MONO_PREVIEW,
[this](bool clicked) {
_monoPreview = clicked;
}, true, true);
CONTAINER->removeMenu(FRAMERATE);
OculusBaseDisplayPlugin::activate();
}
void OculusDisplayPlugin::customizeContext() {
WindowOpenGLDisplayPlugin::customizeContext();
#if (OVR_MAJOR_VERSION >= 6)
@ -149,7 +164,7 @@ void OculusDisplayPlugin::customizeContext() {
#endif
enableVsync(false);
// Only enable mirroring if we know vsync is disabled
_enableMirror = !isVsyncEnabled();
_enablePreview = !isVsyncEnabled();
}
void OculusDisplayPlugin::deactivate() {
@ -169,10 +184,15 @@ void OculusDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSi
// controlling vsync
wglSwapIntervalEXT(0);
// screen mirroring
if (_enableMirror) {
// screen preview mirroring
if (_enablePreview) {
auto windowSize = toGlm(_window->size());
Context::Viewport(windowSize.x, windowSize.y);
if (_monoPreview) {
Context::Viewport(windowSize.x * 2, windowSize.y);
Context::Scissor(0, windowSize.y, windowSize.x, windowSize.y);
} else {
Context::Viewport(windowSize.x, windowSize.y);
}
glBindTexture(GL_TEXTURE_2D, finalTexture);
GLenum err = glGetError();
Q_ASSERT(0 == err);
@ -216,7 +236,7 @@ void OculusDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSi
otherwise the swapbuffer delay will interefere with the framerate of the headset
*/
void OculusDisplayPlugin::finishFrame() {
if (_enableMirror) {
if (_enablePreview) {
swapBuffers();
}
doneCurrent();

View file

@ -14,6 +14,7 @@ using SwapFboPtr = QSharedPointer<SwapFramebufferWrapper>;
class OculusDisplayPlugin : public OculusBaseDisplayPlugin {
public:
virtual void activate() override;
virtual void deactivate() override;
virtual const QString & getName() const override;
@ -25,7 +26,8 @@ protected:
private:
static const QString NAME;
bool _enableMirror{ false };
bool _enablePreview { false };
bool _monoPreview { true };
#if (OVR_MAJOR_VERSION >= 6)
SwapFboPtr _sceneFbo;

View file

@ -61,6 +61,8 @@ glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProje
return eyeProjection;
}
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
std::vector<QAction*> _screenActions;
void StereoDisplayPlugin::activate() {
auto screens = qApp->screens();
@ -76,6 +78,9 @@ void StereoDisplayPlugin::activate() {
[this](bool clicked) { updateScreen(); }, true, checked, "Screens");
_screenActions[i] = action;
}
CONTAINER->removeMenu(FRAMERATE);
CONTAINER->setFullscreen(qApp->primaryScreen());
WindowOpenGLDisplayPlugin::activate();
}

View file

@ -17,8 +17,6 @@
const float CONTROLLER_THRESHOLD = 0.3f;
const float MAX_AXIS = 32768.0f;
StandardController::~StandardController() {
}

View file

@ -65,43 +65,65 @@ void AssetClient::init() {
}
}
bool haveAssetServer() {
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (!assetServer) {
qCWarning(asset_client) << "Could not complete AssetClient operation "
<< "since you are not currently connected to an asset-server.";
return false;
}
return true;
}
AssetRequest* AssetClient::createRequest(const QString& hash, const QString& extension) {
if (hash.length() != SHA256_HASH_HEX_LENGTH) {
qCWarning(asset_client) << "Invalid hash size";
return nullptr;
}
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (!assetServer) {
qCWarning(asset_client).nospace() << "Could not request " << hash << "." << extension
<< " since you are not currently connected to an asset-server.";
if (haveAssetServer()) {
auto request = new AssetRequest(hash, extension);
// Move to the AssetClient thread in case we are not currently on that thread (which will usually be the case)
request->moveToThread(thread());
request->setParent(this);
return request;
} else {
return nullptr;
}
auto request = new AssetRequest(hash, extension);
// Move to the AssetClient thread in case we are not currently on that thread (which will usually be the case)
request->moveToThread(thread());
return request;
}
AssetUpload* AssetClient::createUpload(const QString& filename) {
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer assetServer = nodeList->soloNodeOfType(NodeType::AssetServer);
if (!assetServer) {
qCWarning(asset_client) << "Could not upload" << filename << "since you are not currently connected to an asset-server.";
if (haveAssetServer()) {
auto upload = new AssetUpload(filename);
upload->moveToThread(thread());
upload->setParent(this);
return upload;
} else {
return nullptr;
}
auto upload = new AssetUpload(this, filename);
}
upload->moveToThread(thread());
return upload;
AssetUpload* AssetClient::createUpload(const QByteArray& data, const QString& extension) {
if (haveAssetServer()) {
auto upload = new AssetUpload(data, extension);
upload->moveToThread(thread());
upload->setParent(this);
return upload;
} else {
return nullptr;
}
}
bool AssetClient::getAsset(const QString& hash, const QString& extension, DataOffset start, DataOffset end,

View file

@ -45,6 +45,7 @@ public:
Q_INVOKABLE AssetRequest* createRequest(const QString& hash, const QString& extension);
Q_INVOKABLE AssetUpload* createUpload(const QString& filename);
Q_INVOKABLE AssetUpload* createUpload(const QByteArray& data, const QString& extension);
private slots:
void handleAssetGetInfoReply(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);

View file

@ -14,10 +14,8 @@
#include <algorithm>
#include <QtCore/QThread>
#include <QtNetwork/QAbstractNetworkCache>
#include "AssetClient.h"
#include "NetworkAccessManager.h"
#include "NetworkLogging.h"
#include "NodeList.h"
#include "ResourceCache.h"
@ -41,14 +39,14 @@ void AssetRequest::start() {
}
// Try to load from cache
if (loadFromCache()) {
_data = loadFromCache(getUrl());
if (!_data.isNull()) {
_info.hash = _hash;
_info.size = _data.size();
_error = NoError;
_state = Finished;
emit finished(this);
qCDebug(asset_client) << getUrl().toDisplayString() << "loaded from disk cache.";
return;
}
@ -112,9 +110,7 @@ void AssetRequest::start() {
_totalReceived += data.size();
emit progress(_totalReceived, _info.size);
if (saveToCache(data)) {
qCDebug(asset_client) << getUrl().toDisplayString() << "saved to disk cache";
}
saveToCache(getUrl(), data);
} else {
// hash doesn't match - we have an error
_error = HashVerificationFailed;
@ -131,51 +127,3 @@ void AssetRequest::start() {
});
});
}
QUrl AssetRequest::getUrl() const {
if (!_extension.isEmpty()) {
return QUrl(QString("%1:%2.%3").arg(URL_SCHEME_ATP, _hash, _extension));
} else {
return QUrl(QString("%1:%2").arg(URL_SCHEME_ATP, _hash));
}
}
bool AssetRequest::loadFromCache() {
if (auto cache = NetworkAccessManager::getInstance().cache()) {
auto url = getUrl();
if (auto ioDevice = cache->data(url)) {
_data = ioDevice->readAll();
return true;
} else {
qCDebug(asset_client) << url.toDisplayString() << "not in disk cache";
}
} else {
qCWarning(asset_client) << "No disk cache to load assets from.";
}
return false;
}
bool AssetRequest::saveToCache(const QByteArray& file) const {
if (auto cache = NetworkAccessManager::getInstance().cache()) {
auto url = getUrl();
if (!cache->metaData(url).isValid()) {
QNetworkCacheMetaData metaData;
metaData.setUrl(url);
metaData.setSaveToDisk(true);
metaData.setLastModified(QDateTime::currentDateTime());
metaData.setExpirationDate(QDateTime()); // Never expires
if (auto ioDevice = cache->prepare(metaData)) {
ioDevice->write(file);
cache->insert(ioDevice);
return true;
}
qCWarning(asset_client) << "Could not save" << url.toDisplayString() << "to disk cache.";
}
} else {
qCWarning(asset_client) << "No disk cache to save assets to.";
}
return false;
}

View file

@ -46,16 +46,13 @@ public:
const QByteArray& getData() const { return _data; }
const State& getState() const { return _state; }
const Error& getError() const { return _error; }
QUrl getUrl() const;
QUrl getUrl() const { return ::getATPUrl(_hash, _extension); }
signals:
void finished(AssetRequest* thisRequest);
void progress(qint64 totalReceived, qint64 total);
private:
bool loadFromCache();
bool saveToCache(const QByteArray& file) const;
State _state = NotStarted;
Error _error = NoError;
AssetInfo _info;

View file

@ -24,7 +24,7 @@ void AssetResourceRequest::doSend() {
// Make request to atp
auto assetClient = DependencyManager::get<AssetClient>();
auto parts = _url.path().split(".", QString::SkipEmptyParts);
auto hash = parts[0];
auto hash = parts.length() > 0 ? parts[0] : "";
auto extension = parts.length() > 1 ? parts[1] : "";
if (hash.length() != SHA256_HASH_HEX_LENGTH) {

View file

@ -17,59 +17,94 @@
#include "AssetClient.h"
#include "NetworkLogging.h"
AssetUpload::AssetUpload(QObject* object, const QString& filename) :
const QString AssetUpload::PERMISSION_DENIED_ERROR = "You do not have permission to upload content to this asset-server.";
AssetUpload::AssetUpload(const QByteArray& data, const QString& extension) :
_data(data),
_extension(extension)
{
}
AssetUpload::AssetUpload(const QString& filename) :
_filename(filename)
{
}
QString AssetUpload::getErrorString() const {
// figure out the right error message for error
switch (_error) {
case AssetUpload::PermissionDenied:
return PERMISSION_DENIED_ERROR;
case AssetUpload::TooLarge:
return "The uploaded content was too large and could not be stored in the asset-server.";
case AssetUpload::FileOpenError:
return "The file could not be opened. Please check your permissions and try again.";
case AssetUpload::NetworkError:
return "The file could not be opened. Please check your network connectivity.";
default:
// not handled, do not show a message box
return QString();
}
}
void AssetUpload::start() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "start", Qt::AutoConnection);
QMetaObject::invokeMethod(this, "start");
return;
}
// try to open the file at the given filename
QFile file { _filename };
if (file.open(QIODevice::ReadOnly)) {
if (_data.isEmpty() && !_filename.isEmpty()) {
// try to open the file at the given filename
QFile file { _filename };
// file opened, read the data and grab the extension
_extension = QFileInfo(_filename).suffix();
auto data = file.readAll();
// ask the AssetClient to upload the asset and emit the proper signals from the passed callback
auto assetClient = DependencyManager::get<AssetClient>();
qCDebug(asset_client) << "Attempting to upload" << _filename << "to asset-server.";
assetClient->uploadAsset(data, _extension, [this](bool responseReceived, AssetServerError error, const QString& hash){
if (!responseReceived) {
_error = NetworkError;
} else {
switch (error) {
case AssetServerError::NoError:
_error = NoError;
break;
case AssetServerError::AssetTooLarge:
_error = TooLarge;
break;
case AssetServerError::PermissionDenied:
_error = PermissionDenied;
break;
default:
_error = FileOpenError;
break;
}
}
emit finished(this, hash);
});
} else {
// we couldn't open the file - set the error result
_error = FileOpenError;
// emit that we are done
emit finished(this, QString());
if (file.open(QIODevice::ReadOnly)) {
// file opened, read the data and grab the extension
_extension = QFileInfo(_filename).suffix();
_data = file.readAll();
} else {
// we couldn't open the file - set the error result
_error = FileOpenError;
// emit that we are done
emit finished(this, QString());
}
}
// ask the AssetClient to upload the asset and emit the proper signals from the passed callback
auto assetClient = DependencyManager::get<AssetClient>();
if (!_filename.isEmpty()) {
qCDebug(asset_client) << "Attempting to upload" << _filename << "to asset-server.";
}
assetClient->uploadAsset(_data, _extension, [this](bool responseReceived, AssetServerError error, const QString& hash){
if (!responseReceived) {
_error = NetworkError;
} else {
switch (error) {
case AssetServerError::NoError:
_error = NoError;
break;
case AssetServerError::AssetTooLarge:
_error = TooLarge;
break;
case AssetServerError::PermissionDenied:
_error = PermissionDenied;
break;
default:
_error = FileOpenError;
break;
}
}
if (_error == NoError && hash == hashData(_data).toHex()) {
saveToCache(getATPUrl(hash, _extension), _data);
}
emit finished(this, hash);
});
}

View file

@ -35,13 +35,17 @@ public:
FileOpenError
};
AssetUpload(QObject* parent, const QString& filename);
static const QString PERMISSION_DENIED_ERROR;
AssetUpload(const QString& filename);
AssetUpload(const QByteArray& data, const QString& extension);
Q_INVOKABLE void start();
const QString& getFilename() const { return _filename; }
const QString& getExtension() const { return _extension; }
const Error& getError() const { return _error; }
QString getErrorString() const;
signals:
void finished(AssetUpload* upload, const QString& hash);
@ -49,6 +53,7 @@ signals:
private:
QString _filename;
QByteArray _data;
QString _extension;
Error _error;
};

View file

@ -0,0 +1,69 @@
//
// AssetUtils.h
// libraries/networking/src
//
// Created by Clément Brisset on 10/12/2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AssetUtils.h"
#include <QtCore/QCryptographicHash>
#include <QtNetwork/QAbstractNetworkCache>
#include "NetworkAccessManager.h"
#include "NetworkLogging.h"
#include "ResourceManager.h"
QUrl getATPUrl(const QString& hash, const QString& extension) {
if (!extension.isEmpty()) {
return QUrl(QString("%1:%2.%3").arg(URL_SCHEME_ATP, hash, extension));
} else {
return QUrl(QString("%1:%2").arg(URL_SCHEME_ATP, hash));
}
}
QByteArray hashData(const QByteArray& data) {
return QCryptographicHash::hash(data, QCryptographicHash::Sha256);
}
QByteArray loadFromCache(const QUrl& url) {
if (auto cache = NetworkAccessManager::getInstance().cache()) {
if (auto ioDevice = cache->data(url)) {
qCDebug(asset_client) << url.toDisplayString() << "loaded from disk cache.";
return ioDevice->readAll();
} else {
qCDebug(asset_client) << url.toDisplayString() << "not in disk cache";
}
} else {
qCWarning(asset_client) << "No disk cache to load assets from.";
}
return QByteArray();
}
bool saveToCache(const QUrl& url, const QByteArray& file) {
if (auto cache = NetworkAccessManager::getInstance().cache()) {
if (!cache->metaData(url).isValid()) {
QNetworkCacheMetaData metaData;
metaData.setUrl(url);
metaData.setSaveToDisk(true);
metaData.setLastModified(QDateTime::currentDateTime());
metaData.setExpirationDate(QDateTime()); // Never expires
if (auto ioDevice = cache->prepare(metaData)) {
ioDevice->write(file);
cache->insert(ioDevice);
qCDebug(asset_client) << url.toDisplayString() << "saved to disk cache";
return true;
}
qCWarning(asset_client) << "Could not save" << url.toDisplayString() << "to disk cache.";
}
} else {
qCWarning(asset_client) << "No disk cache to save assets to.";
}
return false;
}

View file

@ -12,10 +12,11 @@
#ifndef hifi_AssetUtils_h
#define hifi_AssetUtils_h
#include <QtCore/QCryptographicHash>
#include <cstdint>
#include <QtCore/QByteArray>
#include <QtCore/QUrl>
using MessageID = uint32_t;
using DataOffset = int64_t;
@ -31,8 +32,11 @@ enum AssetServerError : uint8_t {
PermissionDenied
};
const QString ATP_SCHEME = "atp";
QUrl getATPUrl(const QString& hash, const QString& extension = QString());
inline QByteArray hashData(const QByteArray& data) { return QCryptographicHash::hash(data, QCryptographicHash::Sha256); }
QByteArray hashData(const QByteArray& data);
QByteArray loadFromCache(const QUrl& url);
bool saveToCache(const QUrl& url, const QByteArray& file);
#endif

View file

@ -319,10 +319,10 @@ void Resource::attemptRequest() {
void Resource::finishedLoading(bool success) {
if (success) {
qDebug() << "Finished loading:" << _url;
qDebug().noquote() << "Finished loading:" << _url.toDisplayString();
_loaded = true;
} else {
qDebug() << "Failed to load:" << _url;
qDebug().noquote() << "Failed to load:" << _url.toDisplayString();
_failedToLoad = true;
}
_loadPriorities.clear();
@ -339,13 +339,13 @@ void Resource::makeRequest() {
_request = ResourceManager::createResourceRequest(this, _activeUrl);
if (!_request) {
qDebug() << "Failed to get request for " << _url;
qDebug().noquote() << "Failed to get request for" << _url.toDisplayString();
ResourceCache::requestCompleted(this);
finishedLoading(false);
return;
}
qDebug() << "Starting request for: " << _url;
qDebug().noquote() << "Starting request for:" << _url.toDisplayString();
connect(_request, &ResourceRequest::progress, this, &Resource::handleDownloadProgress);
connect(_request, &ResourceRequest::finished, this, &Resource::handleReplyFinished);
@ -368,7 +368,8 @@ void Resource::handleReplyFinished() {
auto result = _request->getResult();
if (result == ResourceRequest::Success) {
_data = _request->getData();
qDebug() << "Request finished for " << _url << ", " << _activeUrl;
auto extraInfo = _url == _activeUrl ? "" : QString(", %1").arg(_activeUrl.toDisplayString());
qDebug().noquote() << QString("Request finished for %1%2").arg(_url.toDisplayString(), extraInfo);
finishedLoading(true);
emit loaded(_data);

View file

@ -52,7 +52,6 @@
};
MasterReset = function() {
var resetKey = "resetMe";
var GRABBABLE_DATA_KEY = "grabbableKey";
@ -320,6 +319,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true
},
grabbableKey: {
invertSolidWhileHeld: true
}
})
});
@ -379,7 +381,8 @@
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/target.fbx';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/target_collision_hull.obj';
var RESET_DISTANCE = 1;
var MINIMUM_MOVE_LENGTH = 0.05;
var RESET_DISTANCE = 0.5;
var TARGET_USER_DATA_KEY = 'hifi-ping_pong_target';
var NUMBER_OF_TARGETS = 6;
var TARGETS_PER_ROW = 3;
@ -393,7 +396,6 @@
var VERTICAL_SPACING = TARGET_DIMENSIONS.y + 0.5;
var HORIZONTAL_SPACING = TARGET_DIMENSIONS.z + 0.5;
var startPosition = {
x: 548.68,
y: 497.30,
@ -407,11 +409,6 @@
type: 'Box',
position: startPosition,
dimensions: TARGET_DIMENSIONS,
color: {
red: 0,
green: 255,
blue: 0
},
rotation: rotation,
visible: false,
collisionsWillMove: false,
@ -419,6 +416,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true
},
grabbableKey: {
grabbable: false
}
})
});
@ -427,6 +427,8 @@
var originalPositions = [];
var lastPositions = [];
function addTargets() {
var i;
var row = -1;
@ -443,6 +445,7 @@
position.y = startPosition.y - (row * VERTICAL_SPACING);
originalPositions.push(position);
lastPositions.push(position);
var targetProperties = {
name: 'Target',
@ -458,6 +461,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true
},
grabbableKey: {
grabbable: false
}
})
};
@ -474,7 +480,11 @@
var distance = Vec3.subtract(originalPosition, currentPosition);
var length = Vec3.length(distance);
if (length > RESET_DISTANCE) {
var moving = Vec3.length(Vec3.subtract(currentPosition, lastPositions[index]));
lastPositions[index] = currentPosition;
if (length > RESET_DISTANCE && moving < MINIMUM_MOVE_LENGTH) {
Entities.deleteEntity(target);
@ -492,11 +502,14 @@
userData: JSON.stringify({
resetMe: {
resetMe: true
},
grabbableKey: {
grabbable: false
}
})
};
var target = Entities.addEntity(targetProperties);
targets[index] = target;
targets[index] = Entities.addEntity(targetProperties);
}
});
@ -554,6 +567,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true
},
grabbableKey: {
grabbable: false
}
})
});
@ -589,7 +605,11 @@
userData: JSON.stringify({
resetMe: {
resetMe: true
},
grabbableKey: {
invertSolidWhileHeld: true
}
})
});
@ -658,6 +678,7 @@
green: 146,
blue: 24
},
isSpotlight: false,
userData: JSON.stringify({
resetMe: {
resetMe: true,
@ -685,6 +706,7 @@
green: 146,
blue: 24
},
isSpotlight: false,
userData: JSON.stringify({
resetMe: {
resetMe: true,
@ -734,7 +756,6 @@
var sconceLight3 = Entities.addEntity({
type: "Light",
position: {
@ -755,6 +776,7 @@
green: 146,
blue: 24
},
isSpotlight: false,
userData: JSON.stringify({
resetMe: {
resetMe: true,
@ -783,6 +805,7 @@
green: 146,
blue: 24
},
isSpotlight: false,
userData: JSON.stringify({
resetMe: {
resetMe: true,
@ -810,6 +833,7 @@
green: 146,
blue: 24
},
isSpotlight: false,
userData: JSON.stringify({
resetMe: {
resetMe: true,
@ -853,6 +877,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true,
},
grabbableKey: {
invertSolidWhileHeld: true
}
})
};
@ -888,7 +915,7 @@
y: 1.13,
z: 0.2
},
rotation: rotation2,
rotation: rotation,
collisionsWillMove: true,
gravity: {
x: 0,
@ -942,6 +969,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true,
},
grabbableKey: {
invertSolidWhileHeld: true
}
})
});
@ -1016,6 +1046,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true,
},
grabbableKey: {
invertSolidWhileHeld: true
}
})
});
@ -1055,6 +1088,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true,
},
grabbableKey: {
invertSolidWhileHeld: true
}
})
});
@ -1092,6 +1128,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true,
},
grabbableKey: {
invertSolidWhileHeld: true
}
})
});
@ -1128,6 +1167,9 @@
userData: JSON.stringify({
resetMe: {
resetMe: true,
},
grabbableKey: {
invertSolidWhileHeld: true
}
})
});
@ -1256,7 +1298,7 @@
y: 0.05,
z: 0.25
}
},];
}, ];
var modelURL, entity;
for (i = 0; i < blockTypes.length; i++) {
@ -1305,7 +1347,6 @@
Script.scriptEnding.connect(cleanup);
}
};
// entity scripts always need to return a newly constructed object of our type
return new ResetSwitch();
});
});

File diff suppressed because it is too large Load diff