Merge branch 'master' of https://github.com/highfidelity/hifi into orange

This commit is contained in:
samcake 2016-03-07 19:07:13 -08:00
commit 3ed5a2afe2
30 changed files with 761 additions and 296 deletions

View file

@ -139,13 +139,13 @@ AssignmentClientApp::AssignmentClientApp(int argc, char* argv[]) :
httpStatusPort = parser.value(httpStatusPortOption).toUShort();
}
QDir logDirectory { "." };
QString logDirectory;
if (parser.isSet(logDirectoryOption)) {
logDirectory = parser.value(logDirectoryOption);
} else {
logDirectory = QStandardPaths::writableLocation(QStandardPaths::DataLocation);
}
Assignment::Type requestAssignmentType = Assignment::AllTypes;
if (argumentVariantMap.contains(ASSIGNMENT_TYPE_OVERRIDE_OPTION)) {
requestAssignmentType = (Assignment::Type) argumentVariantMap.value(ASSIGNMENT_TYPE_OVERRIDE_OPTION).toInt();

View file

@ -33,8 +33,7 @@ AssignmentClientMonitor::AssignmentClientMonitor(const unsigned int numAssignmen
const unsigned int maxAssignmentClientForks,
Assignment::Type requestAssignmentType, QString assignmentPool,
quint16 listenPort, QUuid walletUUID, QString assignmentServerHostname,
quint16 assignmentServerPort, quint16 httpStatusServerPort, QDir logDirectory) :
_logDirectory(logDirectory),
quint16 assignmentServerPort, quint16 httpStatusServerPort, QString logDirectory) :
_httpManager(QHostAddress::LocalHost, httpStatusServerPort, "", this),
_numAssignmentClientForks(numAssignmentClientForks),
_minAssignmentClientForks(minAssignmentClientForks),
@ -48,6 +47,11 @@ AssignmentClientMonitor::AssignmentClientMonitor(const unsigned int numAssignmen
{
qDebug() << "_requestAssignmentType =" << _requestAssignmentType;
if (!logDirectory.isEmpty()) {
_wantsChildFileLogging = true;
_logDirectory = QDir(logDirectory);
}
// start the Logging class with the parent's target name
LogHandler::getInstance().setTargetName(ASSIGNMENT_CLIENT_MONITOR_TARGET_NAME);
@ -159,6 +163,10 @@ void AssignmentClientMonitor::spawnChildClient() {
_childArguments.append("--" + ASSIGNMENT_CLIENT_MONITOR_PORT_OPTION);
_childArguments.append(QString::number(DependencyManager::get<NodeList>()->getLocalSockAddr().getPort()));
QString nowString, stdoutFilenameTemp, stderrFilenameTemp, stdoutPathTemp, stderrPathTemp;
if (_wantsChildFileLogging) {
// Setup log files
const QString DATETIME_FORMAT = "yyyyMMdd.hh.mm.ss.zzz";
@ -167,26 +175,30 @@ void AssignmentClientMonitor::spawnChildClient() {
_logDirectory.mkpath(_logDirectory.absolutePath());
}
auto nowString = QDateTime::currentDateTime().toString(DATETIME_FORMAT);
auto stdoutFilenameTemp = QString("ac-%1-stdout.txt").arg(nowString);
auto stderrFilenameTemp = QString("ac-%1-stderr.txt").arg(nowString);
QString stdoutPathTemp = _logDirectory.absoluteFilePath(stdoutFilenameTemp);
QString stderrPathTemp = _logDirectory.absoluteFilePath(stderrFilenameTemp);
nowString = QDateTime::currentDateTime().toString(DATETIME_FORMAT);
stdoutFilenameTemp = QString("ac-%1-stdout.txt").arg(nowString);
stderrFilenameTemp = QString("ac-%1-stderr.txt").arg(nowString);
stdoutPathTemp = _logDirectory.absoluteFilePath(stdoutFilenameTemp);
stderrPathTemp = _logDirectory.absoluteFilePath(stderrFilenameTemp);
// reset our output and error files
assignmentClient->setStandardOutputFile(stdoutPathTemp);
assignmentClient->setStandardErrorFile(stderrPathTemp);
}
// make sure that the output from the child process appears in our output
assignmentClient->setProcessChannelMode(QProcess::ForwardedChannels);
assignmentClient->start(QCoreApplication::applicationFilePath(), _childArguments);
QString stdoutPath, stderrPath;
if (_wantsChildFileLogging) {
// Update log path to use PID in filename
auto stdoutFilename = QString("ac-%1_%2-stdout.txt").arg(nowString).arg(assignmentClient->processId());
auto stderrFilename = QString("ac-%1_%2-stderr.txt").arg(nowString).arg(assignmentClient->processId());
QString stdoutPath = _logDirectory.absoluteFilePath(stdoutFilename);
QString stderrPath = _logDirectory.absoluteFilePath(stderrFilename);
stdoutPath = _logDirectory.absoluteFilePath(stdoutFilename);
stderrPath = _logDirectory.absoluteFilePath(stderrFilename);
qDebug() << "Renaming " << stdoutPathTemp << " to " << stdoutPath;
if (!_logDirectory.rename(stdoutFilenameTemp, stdoutFilename)) {
@ -204,6 +216,7 @@ void AssignmentClientMonitor::spawnChildClient() {
qDebug() << "Child stdout being written to: " << stdoutFilename;
qDebug() << "Child stderr being written to: " << stderrFilename;
}
if (assignmentClient->processId() > 0) {
auto pid = assignmentClient->processId();
@ -212,6 +225,7 @@ void AssignmentClientMonitor::spawnChildClient() {
this, [this, pid]() { childProcessFinished(pid); });
qDebug() << "Spawned a child client with PID" << assignmentClient->processId();
_childProcesses.insert(assignmentClient->processId(), { assignmentClient, stdoutPath, stderrPath });
}
}

View file

@ -38,7 +38,7 @@ public:
AssignmentClientMonitor(const unsigned int numAssignmentClientForks, const unsigned int minAssignmentClientForks,
const unsigned int maxAssignmentClientForks, Assignment::Type requestAssignmentType,
QString assignmentPool, quint16 listenPort, QUuid walletUUID, QString assignmentServerHostname,
quint16 assignmentServerPort, quint16 httpStatusServerPort, QDir logDirectory);
quint16 assignmentServerPort, quint16 httpStatusServerPort, QString logDirectory);
~AssignmentClientMonitor();
void stopChildProcesses();
@ -73,6 +73,8 @@ private:
quint16 _assignmentServerPort;
QMap<qint64, ACProcess> _childProcesses;
bool _wantsChildFileLogging { false };
};
#endif // hifi_AssignmentClientMonitor_h

View file

@ -1049,9 +1049,14 @@ function MyController(hand) {
}
this.distanceHolding = function() {
var handControllerPosition = (this.hand === RIGHT_HAND) ? MyAvatar.rightHandPosition : MyAvatar.leftHandPosition;
var controllerHandInput = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
var handRotation = Quat.multiply(MyAvatar.orientation, Controller.getPoseValue(controllerHandInput).rotation);
// controller pose is in avatar frame
var avatarControllerPose = Controller.getPoseValue((this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand);
// transform it into world frame
var controllerPosition = Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, avatarControllerPose.translation));
var controllerRotation = Quat.multiply(MyAvatar.orientation, avatarControllerPose.rotation);
var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES);
var now = Date.now();
@ -1059,12 +1064,10 @@ function MyController(hand) {
this.currentObjectPosition = grabbedProperties.position;
this.currentObjectRotation = grabbedProperties.rotation;
this.currentObjectTime = now;
this.handRelativePreviousPosition = Vec3.subtract(handControllerPosition, MyAvatar.position);
this.handPreviousRotation = handRotation;
this.currentCameraOrientation = Camera.orientation;
// compute a constant based on the initial conditions which we use below to exagerate hand motion onto the held object
this.radiusScalar = Math.log(Vec3.distance(this.currentObjectPosition, handControllerPosition) + 1.0);
this.radiusScalar = Math.log(Vec3.distance(this.currentObjectPosition, controllerPosition) + 1.0);
if (this.radiusScalar < 1.0) {
this.radiusScalar = 1.0;
}
@ -1094,10 +1097,10 @@ function MyController(hand) {
this.callEntityMethodOnGrabbed("startDistanceGrab");
}
this.currentAvatarPosition = MyAvatar.position;
this.currentAvatarOrientation = MyAvatar.orientation;
this.turnOffVisualizations();
this.previousControllerPosition = controllerPosition;
this.previousControllerRotation = controllerRotation;
};
this.continueDistanceHolding = function() {
@ -1109,10 +1112,13 @@ function MyController(hand) {
this.heartBeat(this.grabbedEntity);
var handPosition = this.getHandPosition();
var handControllerPosition = (this.hand === RIGHT_HAND) ? MyAvatar.rightHandPosition : MyAvatar.leftHandPosition;
var controllerHandInput = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
var handRotation = Quat.multiply(MyAvatar.orientation, Controller.getPoseValue(controllerHandInput).rotation);
// controller pose is in avatar frame
var avatarControllerPose = Controller.getPoseValue((this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand);
// transform it into world frame
var controllerPosition = Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, avatarControllerPose.translation));
var controllerRotation = Quat.multiply(MyAvatar.orientation, avatarControllerPose.rotation);
var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES);
var grabbableData = getEntityCustomData(GRABBABLE_DATA_KEY, this.grabbedEntity, DEFAULT_GRABBABLE_DATA);
@ -1125,66 +1131,27 @@ function MyController(hand) {
return;
}
var now = Date.now();
this.currentObjectTime = now;
// the action was set up on a previous call. update the targets.
var radius = Vec3.distance(this.currentObjectPosition, handControllerPosition) *
var radius = Vec3.distance(this.currentObjectPosition, controllerPosition) *
this.radiusScalar * DISTANCE_HOLDING_RADIUS_FACTOR;
if (radius < 1.0) {
radius = 1.0;
}
// how far did avatar move this timestep?
var currentPosition = MyAvatar.position;
var avatarDeltaPosition = Vec3.subtract(currentPosition, this.currentAvatarPosition);
this.currentAvatarPosition = currentPosition;
// scale delta controller hand movement by radius.
var handMoved = Vec3.multiply(Vec3.subtract(controllerPosition, this.previousControllerPosition), radius);
// How far did the avatar turn this timestep?
// Note: The following code is too long because we need a Quat.quatBetween() function
// that returns the minimum quaternion between two quaternions.
var currentOrientation = MyAvatar.orientation;
if (Quat.dot(currentOrientation, this.currentAvatarOrientation) < 0.0) {
var negativeCurrentOrientation = {
x: -currentOrientation.x,
y: -currentOrientation.y,
z: -currentOrientation.z,
w: -currentOrientation.w
};
var avatarDeltaOrientation = Quat.multiply(negativeCurrentOrientation, Quat.inverse(this.currentAvatarOrientation));
} else {
var avatarDeltaOrientation = Quat.multiply(currentOrientation, Quat.inverse(this.currentAvatarOrientation));
}
var handToAvatar = Vec3.subtract(handControllerPosition, this.currentAvatarPosition);
var objectToAvatar = Vec3.subtract(this.currentObjectPosition, this.currentAvatarPosition);
var handMovementFromTurning = Vec3.subtract(Quat.multiply(avatarDeltaOrientation, handToAvatar), handToAvatar);
var objectMovementFromTurning = Vec3.subtract(Quat.multiply(avatarDeltaOrientation, objectToAvatar), objectToAvatar);
this.currentAvatarOrientation = currentOrientation;
// how far did hand move this timestep?
var handMoved = Vec3.subtract(handToAvatar, this.handRelativePreviousPosition);
this.handRelativePreviousPosition = handToAvatar;
// magnify the hand movement but not the change from avatar movement & rotation
handMoved = Vec3.subtract(handMoved, handMovementFromTurning);
var superHandMoved = Vec3.multiply(handMoved, radius);
// Move the object by the magnified amount and then by amount from avatar movement & rotation
var newObjectPosition = Vec3.sum(this.currentObjectPosition, superHandMoved);
newObjectPosition = Vec3.sum(newObjectPosition, avatarDeltaPosition);
newObjectPosition = Vec3.sum(newObjectPosition, objectMovementFromTurning);
var deltaPosition = Vec3.subtract(newObjectPosition, this.currentObjectPosition); // meters
var now = Date.now();
var deltaTime = (now - this.currentObjectTime) / MSEC_PER_SEC; // convert to seconds
this.currentObjectPosition = newObjectPosition;
this.currentObjectTime = now;
// this doubles hand rotation
var handChange = Quat.multiply(Quat.slerp(this.handPreviousRotation,
handRotation,
// double delta controller rotation
var handChange = Quat.multiply(Quat.slerp(this.previousControllerRotation,
controllerRotation,
DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR),
Quat.inverse(this.handPreviousRotation));
this.handPreviousRotation = handRotation;
Quat.inverse(this.previousControllerRotation));
// update the currentObject position and rotation.
this.currentObjectPosition = Vec3.sum(this.currentObjectPosition, handMoved);
this.currentObjectRotation = Quat.multiply(handChange, this.currentObjectRotation);
this.callEntityMethodOnGrabbed("continueDistantGrab");
@ -1195,6 +1162,7 @@ function MyController(hand) {
var handControllerData = getEntityCustomData('handControllerKey', this.grabbedEntity, defaultMoveWithHeadData);
var objectToAvatar = Vec3.subtract(this.currentObjectPosition, MyAvatar.position);
if (handControllerData.disableMoveWithHead !== true) {
// mix in head motion
if (MOVE_WITH_HEAD) {
@ -1234,6 +1202,7 @@ function MyController(hand) {
}
}
var handPosition = this.getHandPosition();
//visualizations
if (USE_ENTITY_LINES_FOR_MOVING === true) {
@ -1265,6 +1234,9 @@ function MyController(hand) {
} else {
print("continueDistanceHolding -- updateAction failed");
}
this.previousControllerPosition = controllerPosition;
this.previousControllerRotation = controllerRotation;
};
this.setupHoldAction = function() {

View file

@ -104,8 +104,9 @@
green: 255,
blue: 255
},
intensity: 2,
exponent: 0.3,
intensity: 1,
falloffRadius:0.9,
exponent: 0.5,
cutoff: 20,
lifetime: LIFETIME,
position: lightTransform.p,
@ -128,6 +129,8 @@
blue: 255
},
exponent: 0,
intensity:1.0,
falloffRadius:0.3,
lifetime: LIFETIME,
cutoff: 90, // in degrees
position: glowLightTransform.p,

View file

@ -13,8 +13,8 @@ Script.include("../../libraries/utils.js");
var scriptURL = Script.resolvePath('pingPongGun.js');
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun.fbx'
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_convex.obj';
var MODEL_URL = 'http://hifi-content.s3.amazonaws.com/alan/dev/Pingpong-Gun-New.fbx'
var COLLISION_HULL_URL = 'http://hifi-content.s3.amazonaws.com/alan/dev/Pingpong-Gun-New.obj';
var COLLISION_SOUND_URL = 'http://hifi-public.s3.amazonaws.com/sounds/Collisions-otherorganic/plastic_impact.L.wav';
var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
x: 0,
@ -40,20 +40,30 @@ var pingPongGun = Entities.addEntity({
grabbableKey: {
invertSolidWhileHeld: true
},
wearable:{joints:{RightHand:[{x:0.1177130937576294,
wearable: {
joints: {
RightHand: [{
x: 0.1177130937576294,
y: 0.12922893464565277,
z:0.08307232707738876},
{x:0.4934672713279724,
z: 0.08307232707738876
}, {
x: 0.4934672713279724,
y: 0.3605862259864807,
z: 0.6394805908203125,
w:-0.4664038419723511}],
LeftHand:[{x:0.09151676297187805,
w: -0.4664038419723511
}],
LeftHand: [{
x: 0.09151676297187805,
y: 0.13639454543590546,
z:0.09354984760284424},
{x:-0.19628101587295532,
z: 0.09354984760284424
}, {
x: -0.19628101587295532,
y: 0.6418180465698242,
z: 0.2830369472503662,
w:0.6851521730422974}]}}
w: 0.6851521730422974
}]
}
}
})
});

View file

@ -0,0 +1,29 @@
//
// fireworksLaunchButtonEntityScript.js
//
// Created by Eric Levin on 3/7/2016
// Copyright 2016 High Fidelity, Inc.
//
// This is the chapter 1 entity script of the fireworks tutorial (https://docs.highfidelity.com/docs/fireworks-scripting-tutorial)
//
// Distributed under the Apache License, Version 2.0.
(function() {
Script.include("../../libraries/utils.js");
var _this;
Fireworks = function() {
_this = this;
};
Fireworks.prototype = {
preload: function(entityID) {
_this.entityID = entityID;
}
};
// entity scripts always need to return a newly constructed object of our type
return new Fireworks();
});

View file

@ -0,0 +1,37 @@
//
// fireworksLaunchButtonSpawner.js
//
// Created by Eric Levin on 3/7/2016
// Copyright 2016 High Fidelity, Inc.
//
// This is the chapter 1 interface script of the fireworks tutorial (https://docs.highfidelity.com/docs/fireworks-scripting-tutorial)
//
// Distributed under the Apache License, Version 2.0.
var orientation = Camera.getOrientation();
orientation = Quat.safeEulerAngles(orientation);
orientation.x = 0;
orientation = Quat.fromVec3Degrees(orientation);
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
var SCRIPT_URL = Script.resolvePath("fireworksLaunchButtonEntityScript.js");
var MODEL_URL = "https://s3-us-west-1.amazonaws.com/hifi-content/eric/models/Launch-Button.fbx";
var launchButton = Entities.addEntity({
type: "Model",
name: "hifi-launch-button",
modelURL: MODEL_URL,
position: center,
dimensions: {
x: 0.98,
y: 1.16,
z: 0.98
},
script: SCRIPT_URL,
})
function cleanup() {
Entities.deleteEntity(launchButton);
}
Script.scriptEnding.connect(cleanup);

View file

@ -0,0 +1,92 @@
//
// fireworksLaunchButtonEntityScript.js
//
// Created by Eric Levin on 3/7/2016
// Copyright 2016 High Fidelity, Inc.
//
// This is the chapter 2 entity script of the fireworks tutorial (https://docs.highfidelity.com/docs/fireworks-scripting-tutorial)
//
// Distributed under the Apache License, Version 2.0.
(function() {
Script.include("../../libraries/utils.js");
var _this;
Fireworks = function() {
_this = this;
_this.launchSound = SoundCache.getSound("https://s3-us-west-1.amazonaws.com/hifi-content/eric/Sounds/missle+launch.wav");
};
Fireworks.prototype = {
startNearTrigger: function() {
_this.shootFirework(_this.position);
},
startFarTrigger: function() {
_this.shootFirework(_this.position);
},
clickReleaseOnEntity: function() {
_this.shootFirework(_this.position);
},
shootFirework: function(launchPosition) {
Audio.playSound(_this.launchSound, {
position: launchPosition,
volume: 0.5
});
var smoke = Entities.addEntity({
type: "ParticleEffect",
position: _this.position,
velocity: {x: 0, y: 3, z: 0},
lifespan: 10,
lifetime: 20,
isEmitting: true,
name: "Smoke Trail",
maxParticles: 3000,
emitRate: 80,
emitSpeed: 0,
speedSpread: 0,
polarStart: 0,
polarFinish: 0,
azimuthStart: -3.14,
azimuthFinish: 3.14,
emitAcceleration: {
x: 0,
y: 0.01,
z: 0
},
accelerationSpread: {
x: 0.01,
y: 0,
z: 0.01
},
radiusSpread: 0.03,
particleRadius: 0.3,
radiusStart: 0.06,
radiusFinish: 0.9,
alpha: 0.1,
alphaSpread: 0,
alphaStart: 0.7,
alphaFinish: 0,
textures: "https://hifi-public.s3.amazonaws.com/alan/Particles/Particle-Sprite-Smoke-1.png",
emitterShouldTrail: true,
});
},
preload: function(entityID) {
_this.entityID = entityID;
_this.position = Entities.getEntityProperties(_this.entityID, "position").position;
}
};
// entity scripts always need to return a newly constructed object of our type
return new Fireworks();
});

View file

@ -0,0 +1,37 @@
//
// fireworksLaunchButtonSpawner.js
//
// Created by Eric Levin on 3/7/2016
// Copyright 2016 High Fidelity, Inc.
//
// This is the chapter 2 interface script of the fireworks tutorial (https://docs.highfidelity.com/docs/fireworks-scripting-tutorial)
//
// Distributed under the Apache License, Version 2.0.
var orientation = Camera.getOrientation();
orientation = Quat.safeEulerAngles(orientation);
orientation.x = 0;
orientation = Quat.fromVec3Degrees(orientation);
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
var SCRIPT_URL = Script.resolvePath("fireworksLaunchButtonEntityScript.js");
var MODEL_URL = "https://s3-us-west-1.amazonaws.com/hifi-content/eric/models/Launch-Button.fbx";
var launchButton = Entities.addEntity({
type: "Model",
name: "hifi-launch-button",
modelURL: MODEL_URL,
position: center,
dimensions: {
x: 0.98,
y: 1.16,
z: 0.98
},
script: SCRIPT_URL,
})
function cleanup() {
Entities.deleteEntity(launchButton);
}
Script.scriptEnding.connect(cleanup);

View file

@ -0,0 +1,164 @@
//
// fireworksLaunchButtonEntityScript.js
//
// Created by Eric Levin on 3/7/2016
// Copyright 2016 High Fidelity, Inc.
//
// This is the chapter 3 entity script of the fireworks tutorial (https://docs.highfidelity.com/docs/fireworks-scripting-tutorial)
//
// Distributed under the Apache License, Version 2.0.
(function() {
Script.include("../../libraries/utils.js");
var _this;
Fireworks = function() {
_this = this;
_this.launchSound = SoundCache.getSound("https://s3-us-west-1.amazonaws.com/hifi-content/eric/Sounds/missle+launch.wav");
_this.explosionSound = SoundCache.getSound("https://s3-us-west-1.amazonaws.com/hifi-content/eric/Sounds/fireworksExplosion.wav");
_this.TIME_TO_EXPLODE = 3000;
};
Fireworks.prototype = {
startNearTrigger: function() {
_this.shootFirework(_this.position);
},
startFarTrigger: function() {
_this.shootFirework(_this.position);
},
clickReleaseOnEntity: function() {
_this.shootFirework(_this.position);
},
shootFirework: function(launchPosition) {
Audio.playSound(_this.launchSound, {
position: launchPosition,
volume: 0.5
});
var smoke = Entities.addEntity({
type: "ParticleEffect",
position: _this.position,
velocity: {x: 0, y: 3, z: 0},
linearDamping: 0,
lifespan: 10,
lifetime: 20,
isEmitting: true,
name: "Smoke Trail",
maxParticles: 3000,
emitRate: 80,
emitSpeed: 0,
speedSpread: 0,
polarStart: 0,
polarFinish: 0,
azimuthStart: -3.14,
azimuthFinish: 3.14,
emitAcceleration: {
x: 0,
y: 0.01,
z: 0
},
accelerationSpread: {
x: 0.01,
y: 0,
z: 0.01
},
radiusSpread: 0.03,
particleRadius: 0.3,
radiusStart: 0.06,
radiusFinish: 0.9,
alpha: 0.1,
alphaSpread: 0,
alphaStart: 0.7,
alphaFinish: 0,
textures: "https://hifi-public.s3.amazonaws.com/alan/Particles/Particle-Sprite-Smoke-1.png",
emitterShouldTrail: true,
});
Script.setTimeout(function() {
var explodePosition = Entities.getEntityProperties(smoke, "position").position;
_this.explodeFirework(explodePosition);
}, _this.TIME_TO_EXPLODE);
},
explodeFirework: function(explodePosition) {
Audio.playSound(_this.explosionSound, {
position: explodePosition
});
var firework = Entities.addEntity({
name: "fireworks emitter",
position: explodePosition,
type: "ParticleEffect",
colorStart: hslToRgb({
h: Math.random(),
s: 0.5,
l: 0.7
}),
color: hslToRgb({
h: Math.random(),
s: 0.5,
l: 0.5
}),
colorFinish: hslToRgb({
h: Math.random(),
s: 0.5,
l: 0.7
}),
maxParticles: 10000,
lifetime: 20,
lifespan: randFloat(1.5, 3),
emitRate: randInt(500, 5000),
emitSpeed: randFloat(0.5, 2),
speedSpread: 0.2,
emitOrientation: Quat.fromPitchYawRollDegrees(randInt(0, 360), randInt(0, 360), randInt(0, 360)),
polarStart: 1,
polarFinish: randFloat(1.2, 3),
azimuthStart: -Math.PI,
azimuthFinish: Math.PI,
emitAcceleration: {
x: 0,
y: randFloat(-1, -0.2),
z: 0
},
accelerationSpread: {
x: Math.random(),
y: 0,
z: Math.random()
},
particleRadius: randFloat(0.001, 0.1),
radiusSpread: Math.random() * 0.1,
radiusStart: randFloat(0.001, 0.1),
radiusFinish: randFloat(0.001, 0.1),
alpha: randFloat(0.8, 1.0),
alphaSpread: randFloat(0.1, 0.2),
alphaStart: randFloat(0.7, 1.0),
alphaFinish: randFloat(0.7, 1.0),
textures: "http://ericrius1.github.io/PlatosCave/assets/star.png",
});
Script.setTimeout(function() {
Entities.editEntity(firework, {
isEmitting: false
});
}, randInt(500, 1000));
},
preload: function(entityID) {
_this.entityID = entityID;
_this.position = Entities.getEntityProperties(_this.entityID, "position").position;
}
};
// entity scripts always need to return a newly constructed object of our type
return new Fireworks();
});

View file

@ -0,0 +1,37 @@
//
// fireworksLaunchButtonSpawner.js
//
// Created by Eric Levin on 3/7/2016
// Copyright 2016 High Fidelity, Inc.
//
// This is the chapter 3 interface script of the fireworks tutorial (https://docs.highfidelity.com/docs/fireworks-scripting-tutorial)
//
// Distributed under the Apache License, Version 2.0.
var orientation = Camera.getOrientation();
orientation = Quat.safeEulerAngles(orientation);
orientation.x = 0;
orientation = Quat.fromVec3Degrees(orientation);
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(orientation)));
var SCRIPT_URL = Script.resolvePath("fireworksLaunchButtonEntityScript.js");
var MODEL_URL = "https://s3-us-west-1.amazonaws.com/hifi-content/eric/models/Launch-Button.fbx";
var launchButton = Entities.addEntity({
type: "Model",
name: "hifi-launch-button",
modelURL: MODEL_URL,
position: center,
dimensions: {
x: 0.98,
y: 1.16,
z: 0.98
},
script: SCRIPT_URL,
})
function cleanup() {
Entities.deleteEntity(launchButton);
}
Script.scriptEnding.connect(cleanup);

View file

@ -58,21 +58,12 @@ TreeView {
text: styleData.isExpanded ? hifi.glyphs.caratDn : hifi.glyphs.caratR
size: hifi.fontSizes.carat
color: colorScheme == hifi.colorSchemes.light
? (styleData.selected
? hifi.colors.black
: (iconArea.pressed ? hifi.colors.white : hifi.colors.baseGrayHighlight))
: (styleData.selected
? hifi.colors.black
: (iconArea.pressed ? hifi.colors.white : hifi.colors.lightGrayText))
? (styleData.selected ? hifi.colors.black : hifi.colors.baseGrayHighlight)
: (styleData.selected ? hifi.colors.black : hifi.colors.lightGrayText)
anchors {
left: parent ? parent.left : undefined
leftMargin: hifi.dimensions.tablePadding / 2
}
MouseArea {
id: iconArea
anchors.fill: parent
propagateComposedEvents: true
}
}
handle: Item {

View file

@ -2946,7 +2946,7 @@ void Application::updateMyAvatarLookAtPosition() {
if (isHMD) {
glm::mat4 headPose = _avatarUpdate->getHeadPose();
glm::quat headRotation = glm::quat_cast(headPose);
lookAtSpot = _myCamera.getPosition() +
lookAtSpot = myAvatar->getPosition() +
myAvatar->getOrientation() * (headRotation * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
} else {
lookAtSpot = myAvatar->getHead()->getEyePosition() +

View file

@ -31,7 +31,14 @@ void AvatarUpdate::synchronousProcess() {
// Keep our own updated value, so that our asynchronous code can consult it.
_isHMDMode = qApp->isHMDMode();
auto frameCount = qApp->getFrameCount();
_headPose = qApp->getActiveDisplayPlugin()->getHeadPose(frameCount);
QSharedPointer<AvatarManager> manager = DependencyManager::get<AvatarManager>();
MyAvatar* myAvatar = manager->getMyAvatar();
assert(myAvatar);
// transform the head pose from the displayPlugin into avatar coordinates.
glm::mat4 invAvatarMat = glm::inverse(createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition()));
_headPose = invAvatarMat * (myAvatar->getSensorToWorldMatrix() * qApp->getActiveDisplayPlugin()->getHeadPose(frameCount));
if (!isThreaded()) {
process();

View file

@ -19,6 +19,7 @@
#include <QtCore/QTimer>
#include <scripting/HMDScriptingInterface.h>
#include <AccountManager.h>
#include <AddressManager.h>
#include <AudioClient.h>
@ -849,7 +850,7 @@ void MyAvatar::updateLookAtTargetAvatar() {
avatar->setIsLookAtTarget(false);
if (!avatar->isMyAvatar() && avatar->isInitialized() &&
(distanceTo < GREATEST_LOOKING_AT_DISTANCE * getUniformScale())) {
float angleTo = glm::angle(lookForward, glm::normalize(avatar->getHead()->getEyePosition() - cameraPosition));
float angleTo = glm::angle(lookForward, glm::normalize(avatar->getHead()->getEyePosition() - getHead()->getEyePosition()));
if (angleTo < (smallestAngleTo * (isCurrentTarget ? KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR : 1.0f))) {
_lookAtTargetAvatar = avatarPointer;
_targetAvatarPosition = avatarPointer->getPosition();
@ -864,9 +865,13 @@ void MyAvatar::updateLookAtTargetAvatar() {
// Let's get everything to world space:
glm::vec3 avatarLeftEye = getHead()->getLeftEyePosition();
glm::vec3 avatarRightEye = getHead()->getRightEyePosition();
// When not in HMD, these might both answer identity (i.e., the bridge of the nose). That's ok.
// By my inpsection of the code and live testing, getEyeOffset and getEyePose are the same. (Application hands identity as offset matrix.)
// This might be more work than needed for any given use, but as we explore different formulations, we go mad if we don't work in world space.
// First find out where (in world space) the person is looking relative to that bridge-of-the-avatar point.
// (We will be adding that offset to the camera position, after making some other adjustments.)
glm::vec3 gazeOffset = lookAtPosition - getHead()->getEyePosition();
// scale gazeOffset by IPD, if wearing an HMD.
if (qApp->isHMDMode()) {
glm::mat4 leftEye = qApp->getEyeOffset(Eye::Left);
glm::mat4 rightEye = qApp->getEyeOffset(Eye::Right);
glm::vec3 leftEyeHeadLocal = glm::vec3(leftEye[3]);
@ -875,25 +880,14 @@ void MyAvatar::updateLookAtTargetAvatar() {
glm::vec3 humanLeftEye = humanSystem->getPosition() + (humanSystem->getOrientation() * leftEyeHeadLocal);
glm::vec3 humanRightEye = humanSystem->getPosition() + (humanSystem->getOrientation() * rightEyeHeadLocal);
// First find out where (in world space) the person is looking relative to that bridge-of-the-avatar point.
// (We will be adding that offset to the camera position, after making some other adjustments.)
glm::vec3 gazeOffset = lookAtPosition - getHead()->getEyePosition();
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
float ipdScale = hmdInterface->getIPDScale();
// Scale by proportional differences between avatar and human.
float humanEyeSeparationInModelSpace = glm::length(humanLeftEye - humanRightEye);
float humanEyeSeparationInModelSpace = glm::length(humanLeftEye - humanRightEye) * ipdScale;
float avatarEyeSeparation = glm::length(avatarLeftEye - avatarRightEye);
gazeOffset = gazeOffset * humanEyeSeparationInModelSpace / avatarEyeSeparation;
// If the camera is also not oriented with the head, adjust by getting the offset in head-space...
/* Not needed (i.e., code is a no-op), but I'm leaving the example code here in case something like this is needed someday.
glm::quat avatarHeadOrientation = getHead()->getOrientation();
glm::vec3 gazeOffsetLocalToHead = glm::inverse(avatarHeadOrientation) * gazeOffset;
// ... and treat that as though it were in camera space, bringing it back to world space.
// But camera is fudged to make the picture feel like the avatar's orientation.
glm::quat humanOrientation = humanSystem->getOrientation(); // or just avatar getOrienation() ?
gazeOffset = humanOrientation * gazeOffsetLocalToHead;
glm::vec3 corrected = humanSystem->getPosition() + gazeOffset;
*/
}
// And now we can finally add that offset to the camera.
glm::vec3 corrected = qApp->getViewFrustum()->getPosition() + gazeOffset;
@ -1777,25 +1771,6 @@ glm::quat MyAvatar::getWorldBodyOrientation() const {
return glm::quat_cast(_sensorToWorldMatrix * _bodySensorMatrix);
}
#if 0
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in sensor space
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
if (_rig) {
// orientation
const glm::quat hmdOrientation = getHMDSensorOrientation();
const glm::quat yaw = cancelOutRollAndPitch(hmdOrientation);
// position
// we flip about yAxis when going from "root" to "avatar" frame
// and we must also apply "yaw" to get into HMD frame
glm::quat rotY180 = glm::angleAxis((float)M_PI, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 eyesInAvatarFrame = rotY180 * yaw * _rig->getEyesInRootFrame();
glm::vec3 bodyPos = getHMDSensorPosition() - eyesInAvatarFrame;
return createMatFromQuatAndPos(yaw, bodyPos);
}
return glm::mat4();
}
#else
// old school meat hook style
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
@ -1836,7 +1811,6 @@ glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
return createMatFromQuatAndPos(hmdOrientationYawOnly, bodyPos);
}
#endif
glm::vec3 MyAvatar::getPositionForAudio() {
switch (_audioListenerMode) {
@ -1944,13 +1918,26 @@ bool MyAvatar::FollowHelper::shouldActivateRotation(const MyAvatar& myAvatar, co
bool MyAvatar::FollowHelper::shouldActivateHorizontal(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const {
const float CYLINDER_RADIUS = 0.3f;
// -z axis of currentBodyMatrix in world space.
glm::vec3 forward = glm::normalize(glm::vec3(-currentBodyMatrix[0][2], -currentBodyMatrix[1][2], -currentBodyMatrix[2][2]));
// x axis of currentBodyMatrix in world space.
glm::vec3 right = glm::normalize(glm::vec3(currentBodyMatrix[0][0], currentBodyMatrix[1][0], currentBodyMatrix[2][0]));
glm::vec3 offset = extractTranslation(desiredBodyMatrix) - extractTranslation(currentBodyMatrix);
glm::vec3 radialOffset(offset.x, 0.0f, offset.z);
float radialDistance = glm::length(radialOffset);
return radialDistance > CYLINDER_RADIUS;
float forwardLeanAmount = glm::dot(forward, offset);
float lateralLeanAmount = glm::dot(right, offset);
const float MAX_LATERAL_LEAN = 0.3f;
const float MAX_FORWARD_LEAN = 0.15f;
const float MAX_BACKWARD_LEAN = 0.1f;
if (forwardLeanAmount > 0 && forwardLeanAmount > MAX_FORWARD_LEAN) {
return true;
} else if (forwardLeanAmount < 0 && forwardLeanAmount < -MAX_BACKWARD_LEAN) {
return true;
}
return fabs(lateralLeanAmount) > MAX_LATERAL_LEAN;
}
bool MyAvatar::FollowHelper::shouldActivateVertical(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const {

View file

@ -173,8 +173,6 @@ void Rig::initJointStates(const FBXGeometry& geometry, const glm::mat4& modelOff
_animSkeleton = std::make_shared<AnimSkeleton>(geometry);
computeEyesInRootFrame(_animSkeleton->getRelativeDefaultPoses());
_internalPoseSet._relativePoses.clear();
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
@ -201,8 +199,6 @@ void Rig::reset(const FBXGeometry& geometry) {
_geometryOffset = AnimPose(geometry.offset);
_animSkeleton = std::make_shared<AnimSkeleton>(geometry);
computeEyesInRootFrame(_animSkeleton->getRelativeDefaultPoses());
_internalPoseSet._relativePoses.clear();
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
@ -237,10 +233,20 @@ int Rig::getJointStateCount() const {
return (int)_internalPoseSet._relativePoses.size();
}
static const uint32_t MAX_JOINT_NAME_WARNING_COUNT = 100;
int Rig::indexOfJoint(const QString& jointName) const {
if (_animSkeleton) {
return _animSkeleton->nameToJointIndex(jointName);
int result = _animSkeleton->nameToJointIndex(jointName);
// This is a content error, so we should issue a warning.
if (result < 0 && _jointNameWarningCount < MAX_JOINT_NAME_WARNING_COUNT) {
qCWarning(animation) << "Rig: Missing joint" << jointName << "in avatar model";
_jointNameWarningCount++;
}
return result;
} else {
// This is normal and can happen when the avatar model has not been dowloaded/loaded yet.
return -1;
}
}
@ -444,26 +450,6 @@ void Rig::calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds,
*alphaOut = alpha;
}
void Rig::computeEyesInRootFrame(const AnimPoseVec& poses) {
// TODO: use cached eye/hips indices for these calculations
int numPoses = (int)poses.size();
int hipsIndex = _animSkeleton->nameToJointIndex(QString("Hips"));
int headIndex = _animSkeleton->nameToJointIndex(QString("Head"));
if (hipsIndex > 0 && headIndex > 0) {
int rightEyeIndex = _animSkeleton->nameToJointIndex(QString("RightEye"));
int leftEyeIndex = _animSkeleton->nameToJointIndex(QString("LeftEye"));
if (numPoses > rightEyeIndex && numPoses > leftEyeIndex && rightEyeIndex > 0 && leftEyeIndex > 0) {
glm::vec3 rightEye = _animSkeleton->getAbsolutePose(rightEyeIndex, poses).trans;
glm::vec3 leftEye = _animSkeleton->getAbsolutePose(leftEyeIndex, poses).trans;
glm::vec3 hips = _animSkeleton->getAbsolutePose(hipsIndex, poses).trans;
_eyesInRootFrame = 0.5f * (rightEye + leftEye) - hips;
} else {
glm::vec3 hips = _animSkeleton->getAbsolutePose(hipsIndex, poses).trans;
_eyesInRootFrame = 0.5f * (DEFAULT_RIGHT_EYE_POS + DEFAULT_LEFT_EYE_POS) - hips;
}
}
}
void Rig::setEnableInverseKinematics(bool enable) {
_enableInverseKinematics = enable;
}
@ -893,8 +879,6 @@ void Rig::updateAnimations(float deltaTime, glm::mat4 rootTransform) {
for (auto& trigger : triggersOut) {
_animVars.setTrigger(trigger);
}
computeEyesInRootFrame(_internalPoseSet._relativePoses);
}
applyOverridePoses();
@ -1067,14 +1051,21 @@ void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm
glm::mat4 rigToWorld = createMatFromQuatAndPos(modelRotation, modelTranslation);
glm::mat4 worldToRig = glm::inverse(rigToWorld);
glm::vec3 zAxis = glm::normalize(_internalPoseSet._absolutePoses[index].trans - transformPoint(worldToRig, lookAtSpot));
glm::quat q = rotationBetween(IDENTITY_FRONT, zAxis);
glm::quat desiredQuat = rotationBetween(IDENTITY_FRONT, zAxis);
glm::quat headQuat;
int headIndex = indexOfJoint("Head");
if (headIndex >= 0) {
headQuat = _internalPoseSet._absolutePoses[headIndex].rot;
}
glm::quat deltaQuat = desiredQuat * glm::inverse(headQuat);
// limit rotation
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
q = glm::angleAxis(glm::clamp(glm::angle(q), -MAX_ANGLE, MAX_ANGLE), glm::axis(q));
deltaQuat = glm::angleAxis(glm::clamp(glm::angle(deltaQuat), -MAX_ANGLE, MAX_ANGLE), glm::axis(deltaQuat));
// directly set absolutePose rotation
_internalPoseSet._absolutePoses[index].rot = q;
_internalPoseSet._absolutePoses[index].rot = deltaQuat * headQuat;
}
}
@ -1086,7 +1077,11 @@ void Rig::updateFromHandParameters(const HandParameters& params, float dt) {
const float MIN_LENGTH = 1.0e-4f;
// project the hips onto the xz plane.
auto hipsTrans = _internalPoseSet._absolutePoses[_animSkeleton->nameToJointIndex("Hips")].trans;
int hipsIndex = indexOfJoint("Hips");
glm::vec3 hipsTrans;
if (hipsIndex >= 0) {
hipsTrans = _internalPoseSet._absolutePoses[hipsIndex].trans;
}
const glm::vec2 bodyCircleCenter(hipsTrans.x, hipsTrans.z);
if (params.isLeftEnabled) {
@ -1271,7 +1266,11 @@ void Rig::computeAvatarBoundingCapsule(
AnimPose geometryToRig = _modelOffset * _geometryOffset;
AnimPose hips = geometryToRig * _animSkeleton->getAbsoluteBindPose(_animSkeleton->nameToJointIndex("Hips"));
AnimPose hips(glm::vec3(1), glm::quat(), glm::vec3());
int hipsIndex = indexOfJoint("Hips");
if (hipsIndex >= 0) {
hips = geometryToRig * _animSkeleton->getAbsoluteBindPose(hipsIndex);
}
AnimVariantMap animVars;
glm::quat handRotation = glm::angleAxis(PI, Vectors::UNIT_X);
animVars.set("leftHandPosition", hips.trans);
@ -1281,8 +1280,8 @@ void Rig::computeAvatarBoundingCapsule(
animVars.set("rightHandRotation", handRotation);
animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
int rightFootIndex = _animSkeleton->nameToJointIndex("RightFoot");
int leftFootIndex = _animSkeleton->nameToJointIndex("LeftFoot");
int rightFootIndex = indexOfJoint("RightFoot");
int leftFootIndex = indexOfJoint("LeftFoot");
if (rightFootIndex != -1 && leftFootIndex != -1) {
glm::vec3 foot = Vectors::ZERO;
glm::quat footRotation = glm::angleAxis(0.5f * PI, Vectors::UNIT_X);
@ -1314,7 +1313,7 @@ void Rig::computeAvatarBoundingCapsule(
// HACK to reduce the radius of the bounding capsule to be tight with the torso, we only consider joints
// from the head to the hips when computing the rest of the bounding capsule.
int index = _animSkeleton->nameToJointIndex(QString("Head"));
int index = indexOfJoint("Head");
while (index != -1) {
const FBXJointShapeInfo& shapeInfo = geometry.joints.at(index).shapeInfo;
AnimPose pose = finalPoses[index];
@ -1337,3 +1336,5 @@ void Rig::computeAvatarBoundingCapsule(
glm::vec3 rigCenter = (geometryToRig * (0.5f * (totalExtents.maximum + totalExtents.minimum)));
localOffsetOut = rigCenter - (geometryToRig * rootPosition);
}

View file

@ -231,8 +231,6 @@ public:
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
void calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds, float* alphaOut) const;
void computeEyesInRootFrame(const AnimPoseVec& poses);
AnimPose _modelOffset; // model to rig space
AnimPose _geometryOffset; // geometry to model space (includes unit offset & fst offsets)
@ -305,6 +303,8 @@ public:
bool _lastEnableInverseKinematics { true };
bool _enableInverseKinematics { true };
mutable uint32_t _jointNameWarningCount { 0 };
private:
QMap<int, StateHandler> _stateHandlers;
int _nextStateHandlerId { 0 };

View file

@ -336,6 +336,33 @@ bool RenderableModelEntityItem::getAnimationFrame() {
return newFrame;
}
void RenderableModelEntityItem::updateModelBounds() {
if (!hasModel() || !_model) {
return;
}
bool movingOrAnimating = isMovingRelativeToParent() || isAnimatingSomething();
if ((movingOrAnimating ||
_needsInitialSimulation ||
_model->getTranslation() != getPosition() ||
_model->getRotation() != getRotation() ||
_model->getRegistrationPoint() != getRegistrationPoint())
&& _model->isActive() && _dimensionsInitialized) {
_model->setScaleToFit(true, getDimensions());
_model->setSnapModelToRegistrationPoint(true, getRegistrationPoint());
_model->setRotation(getRotation());
_model->setTranslation(getPosition());
// make sure to simulate so everything gets set up correctly for rendering
{
PerformanceTimer perfTimer("_model->simulate");
_model->simulate(0.0f);
}
_needsInitialSimulation = false;
}
}
// NOTE: this only renders the "meta" portion of the Model, namely it renders debugging items, and it handles
// the per frame simulation/update that might be required if the models properties changed.
void RenderableModelEntityItem::render(RenderArgs* args) {
@ -414,27 +441,7 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
}
}
});
bool movingOrAnimating = isMovingRelativeToParent() || isAnimatingSomething();
if ((movingOrAnimating ||
_needsInitialSimulation ||
_model->getTranslation() != getPosition() ||
_model->getRotation() != getRotation() ||
_model->getRegistrationPoint() != getRegistrationPoint())
&& _model->isActive() && _dimensionsInitialized) {
_model->setScaleToFit(true, getDimensions());
_model->setSnapModelToRegistrationPoint(true, getRegistrationPoint());
_model->setRotation(getRotation());
_model->setTranslation(getPosition());
// make sure to simulate so everything gets set up correctly for rendering
{
PerformanceTimer perfTimer("_model->simulate");
_model->simulate(0.0f);
}
_needsInitialSimulation = false;
}
updateModelBounds();
}
}
} else {
@ -598,7 +605,9 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
if (type != SHAPE_TYPE_COMPOUND) {
ModelEntityItem::computeShapeInfo(info);
info.setParams(type, 0.5f * getDimensions());
adjustShapeInfoByRegistration(info);
} else {
updateModelBounds();
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = _model->getCollisionGeometry();
// should never fall in here when collision model not fully loaded
@ -690,10 +699,13 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
AABox box;
for (int i = 0; i < _points.size(); i++) {
for (int j = 0; j < _points[i].size(); j++) {
// compensate for registraion
// compensate for registration
_points[i][j] += _model->getOffset();
// scale so the collision points match the model points
_points[i][j] *= scale;
// this next subtraction is done so we can give info the offset, which will cause
// the shape-key to change.
_points[i][j] -= _model->getOffset();
box += _points[i][j];
}
}
@ -701,6 +713,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
glm::vec3 collisionModelDimensions = box.getDimensions();
info.setParams(type, collisionModelDimensions, _compoundShapeURL);
info.setConvexHulls(_points);
info.setOffset(_model->getOffset());
}
}

View file

@ -49,6 +49,7 @@ public:
virtual void removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) override;
void updateModelBounds();
virtual void render(RenderArgs* args) override;
virtual bool supportsDetailedRayIntersection() const override { return true; }
virtual bool findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,

View file

@ -142,7 +142,7 @@ glm::vec3 RenderablePolyVoxEntityItem::getSurfacePositionAdjustment() const {
glm::mat4 RenderablePolyVoxEntityItem::voxelToLocalMatrix() const {
glm::vec3 scale = getDimensions() / _voxelVolumeSize; // meters / voxel-units
bool success; // TODO -- Does this actually have to happen in world space?
glm::vec3 center = getCenterPosition(success);
glm::vec3 center = getCenterPosition(success); // this handles registrationPoint changes
glm::vec3 position = getPosition(success);
glm::vec3 positionToCenter = center - position;
@ -430,6 +430,13 @@ ShapeType RenderablePolyVoxEntityItem::getShapeType() const {
return SHAPE_TYPE_COMPOUND;
}
void RenderablePolyVoxEntityItem::updateRegistrationPoint(const glm::vec3& value) {
if (value != _registrationPoint) {
_meshDirty = true;
EntityItem::updateRegistrationPoint(value);
}
}
bool RenderablePolyVoxEntityItem::isReadyToComputeShape() {
_meshLock.lockForRead();
if (_meshDirty) {
@ -1224,10 +1231,16 @@ void RenderablePolyVoxEntityItem::computeShapeInfoWorkerAsync() {
}
glm::vec3 collisionModelDimensions = box.getDimensions();
QByteArray b64 = _voxelData.toBase64();
// include the registrationPoint in the shape key, because the offset is already
// included in the points and the shapeManager wont know that the shape has changed.
QString shapeKey = QString(_voxelData.toBase64()) + "," +
QString::number(_registrationPoint.x) + "," +
QString::number(_registrationPoint.y) + "," +
QString::number(_registrationPoint.z);
_shapeInfoLock.lockForWrite();
_shapeInfo.setParams(SHAPE_TYPE_COMPOUND, collisionModelDimensions, QString(b64));
_shapeInfo.setParams(SHAPE_TYPE_COMPOUND, collisionModelDimensions, shapeKey);
_shapeInfo.setConvexHulls(points);
// adjustShapeInfoByRegistration(_shapeInfo);
_shapeInfoLock.unlock();
_meshLock.lockForWrite();

View file

@ -116,6 +116,8 @@ public:
virtual void rebakeMesh();
virtual void updateRegistrationPoint(const glm::vec3& value);
private:
// The PolyVoxEntityItem class has _voxelData which contains dimensions and compressed voxel data. The dimensions
// may not match _voxelVolumeSize.

View file

@ -677,7 +677,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
READ_ENTITY_PROPERTY(PROP_LIFETIME, float, updateLifetime);
READ_ENTITY_PROPERTY(PROP_SCRIPT, QString, setScript);
READ_ENTITY_PROPERTY(PROP_SCRIPT_TIMESTAMP, quint64, setScriptTimestamp);
READ_ENTITY_PROPERTY(PROP_REGISTRATION_POINT, glm::vec3, setRegistrationPoint);
READ_ENTITY_PROPERTY(PROP_REGISTRATION_POINT, glm::vec3, updateRegistrationPoint);
READ_ENTITY_PROPERTY(PROP_ANGULAR_DAMPING, float, updateAngularDamping);
READ_ENTITY_PROPERTY(PROP_VISIBLE, bool, setVisible);
@ -1120,7 +1120,7 @@ bool EntityItem::setProperties(const EntityItemProperties& properties) {
// these (along with "position" above) affect tree structure
SET_ENTITY_PROPERTY_FROM_PROPERTIES(dimensions, updateDimensions);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(registrationPoint, setRegistrationPoint);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(registrationPoint, updateRegistrationPoint);
// these (along with all properties above) affect the simulation
SET_ENTITY_PROPERTY_FROM_PROPERTIES(density, updateDensity);
@ -1340,6 +1340,15 @@ float EntityItem::getRadius() const {
return 0.5f * glm::length(getDimensions());
}
void EntityItem::adjustShapeInfoByRegistration(ShapeInfo& info) const {
if (_registrationPoint != ENTITY_ITEM_DEFAULT_REGISTRATION_POINT) {
glm::mat4 scale = glm::scale(getDimensions());
glm::mat4 registration = scale * glm::translate(ENTITY_ITEM_DEFAULT_REGISTRATION_POINT - getRegistrationPoint());
glm::vec3 regTransVec = glm::vec3(registration[3]); // extract position component from matrix
info.setOffset(regTransVec);
}
}
bool EntityItem::contains(const glm::vec3& point) const {
if (getShapeType() == SHAPE_TYPE_COMPOUND) {
bool success;
@ -1348,12 +1357,21 @@ bool EntityItem::contains(const glm::vec3& point) const {
} else {
ShapeInfo info;
info.setParams(getShapeType(), glm::vec3(0.5f));
adjustShapeInfoByRegistration(info);
return info.contains(worldToEntity(point));
}
}
void EntityItem::computeShapeInfo(ShapeInfo& info) {
info.setParams(getShapeType(), 0.5f * getDimensions());
adjustShapeInfoByRegistration(info);
}
void EntityItem::updateRegistrationPoint(const glm::vec3& value) {
if (value != _registrationPoint) {
setRegistrationPoint(value);
_dirtyFlags |= Simulation::DIRTY_SHAPE;
}
}
void EntityItem::updatePosition(const glm::vec3& value) {

View file

@ -305,6 +305,7 @@ public:
// TODO: get rid of users of getRadius()...
float getRadius() const;
virtual void adjustShapeInfoByRegistration(ShapeInfo& info) const;
virtual bool contains(const glm::vec3& point) const;
virtual bool isReadyToComputeShape() { return !isDead(); }
@ -319,6 +320,7 @@ public:
virtual void setRotation(glm::quat orientation) { setOrientation(orientation); }
// updateFoo() methods to be used when changes need to be accumulated in the _dirtyFlags
virtual void updateRegistrationPoint(const glm::vec3& value);
void updatePosition(const glm::vec3& value);
void updatePositionFromNetwork(const glm::vec3& value);
void updateDimensions(const glm::vec3& value);

View file

@ -79,11 +79,15 @@ void MessagesClient::handleMessagesPacket(QSharedPointer<ReceivedMessage> receiv
QString channel, message;
QUuid senderID;
decodeMessagesPacket(receivedMessage, channel, message, senderID);
emit messageReceived(channel, message, senderID);
emit messageReceived(channel, message, senderID, false);
}
void MessagesClient::sendMessage(QString channel, QString message) {
void MessagesClient::sendMessage(QString channel, QString message, bool localOnly) {
auto nodeList = DependencyManager::get<NodeList>();
if (localOnly) {
QUuid senderID = nodeList->getSessionUUID();
emit messageReceived(channel, message, senderID, true);
} else {
SharedNodePointer messagesMixer = nodeList->soloNodeOfType(NodeType::MessagesMixer);
if (messagesMixer) {
@ -92,6 +96,11 @@ void MessagesClient::sendMessage(QString channel, QString message) {
nodeList->sendPacketList(std::move(packetList), *messagesMixer);
}
}
}
void MessagesClient::sendLocalMessage(QString channel, QString message) {
sendMessage(channel, message, true);
}
void MessagesClient::subscribe(QString channel) {
_subscribedChannels << channel;

View file

@ -29,7 +29,8 @@ public:
Q_INVOKABLE void init();
Q_INVOKABLE void sendMessage(QString channel, QString message);
Q_INVOKABLE void sendMessage(QString channel, QString message, bool localOnly = false);
Q_INVOKABLE void sendLocalMessage(QString channel, QString message);
Q_INVOKABLE void subscribe(QString channel);
Q_INVOKABLE void unsubscribe(QString channel);
@ -38,7 +39,7 @@ public:
signals:
void messageReceived(QString channel, QString message, QUuid senderUUID);
void messageReceived(QString channel, QString message, QUuid senderUUID, bool localOnly);
private slots:
void handleMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode);

View file

@ -106,7 +106,7 @@ btCollisionShape* ShapeFactory::createShapeFromInfo(const ShapeInfo& info) {
}
break;
}
if (shape && type != SHAPE_TYPE_COMPOUND) {
if (shape) {
if (glm::length2(info.getOffset()) > MIN_SHAPE_OFFSET * MIN_SHAPE_OFFSET) {
// this shape has an offset, which we support by wrapping the true shape
// in a btCompoundShape with a local transform

View file

@ -973,6 +973,8 @@
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1001,6 +1003,8 @@
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1074,6 +1078,8 @@
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1103,6 +1109,8 @@
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1131,6 +1139,8 @@
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1241,8 +1251,9 @@
}
function createPingPongBallGun() {
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun.fbx';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_convex.obj';
var MODEL_URL = 'http://hifi-content.s3.amazonaws.com/alan/dev/Pingpong-Gun-New.fbx';
var COLLISION_HULL_URL = 'http://hifi-content.s3.amazonaws.com/alan/dev/Pingpong-Gun-New.obj';
var COLLISION_SOUND_URL = 'http://hifi-public.s3.amazonaws.com/sounds/Collisions-otherorganic/plastic_impact.L.wav';
var position = {
x: 548.6,

View file

@ -720,6 +720,7 @@ MasterReset = function() {
function createTargets() {
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/target.fbx';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/target_collision_hull.obj';
@ -960,6 +961,8 @@ MasterReset = function() {
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -988,6 +991,8 @@ MasterReset = function() {
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1061,6 +1066,8 @@ MasterReset = function() {
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1090,6 +1097,8 @@ MasterReset = function() {
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1118,6 +1127,8 @@ MasterReset = function() {
y: 2.545,
z: 2.545
},
intensity: 1.0,
falloffRadius: 0.3,
cutoff: 90,
color: {
red: 217,
@ -1228,8 +1239,9 @@ MasterReset = function() {
}
function createPingPongBallGun() {
var MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun.fbx';
var COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/ping_pong_gun/ping_pong_gun_convex.obj';
var MODEL_URL = 'http://hifi-content.s3.amazonaws.com/alan/dev/Pingpong-Gun-New.fbx';
var COLLISION_HULL_URL = 'http://hifi-content.s3.amazonaws.com/alan/dev/Pingpong-Gun-New.obj';
var COLLISION_SOUND_URL = 'http://hifi-public.s3.amazonaws.com/sounds/Collisions-otherorganic/plastic_impact.L.wav';
var position = {
x: 548.6,