Merge branch 'master' of https://github.com/highfidelity/hifi into baseball

This commit is contained in:
Stephen Birarda 2015-11-12 10:40:51 -08:00
commit ef778c554c
57 changed files with 3566 additions and 472 deletions

View file

@ -5,7 +5,7 @@ setup_hifi_project(Core Gui Network Script Quick Widgets WebSockets)
# link in the shared libraries
link_hifi_libraries(
audio avatars octree environment gpu model fbx entities
networking animation shared script-engine embedded-webserver
networking animation recording shared script-engine embedded-webserver
controllers physics
)

View file

@ -18,17 +18,13 @@
class EntityNodeData : public OctreeQueryNode {
public:
EntityNodeData() :
OctreeQueryNode(),
_lastDeletedEntitiesSentAt(0) { }
virtual PacketType getMyPacketType() const { return PacketType::EntityData; }
quint64 getLastDeletedEntitiesSentAt() const { return _lastDeletedEntitiesSentAt; }
void setLastDeletedEntitiesSentAt(quint64 sentAt) { _lastDeletedEntitiesSentAt = sentAt; }
private:
quint64 _lastDeletedEntitiesSentAt;
quint64 _lastDeletedEntitiesSentAt { usecTimestampNow() };
};
#endif // hifi_EntityNodeData_h

View file

@ -82,9 +82,15 @@ bool EntityServer::hasSpecialPacketsToSend(const SharedNodePointer& node) {
EntityNodeData* nodeData = static_cast<EntityNodeData*>(node->getLinkedData());
if (nodeData) {
quint64 deletedEntitiesSentAt = nodeData->getLastDeletedEntitiesSentAt();
EntityTreePointer tree = std::static_pointer_cast<EntityTree>(_tree);
shouldSendDeletedEntities = tree->hasEntitiesDeletedSince(deletedEntitiesSentAt);
#ifdef EXTRA_ERASE_DEBUGGING
if (shouldSendDeletedEntities) {
int elapsed = usecTimestampNow() - deletedEntitiesSentAt;
qDebug() << "shouldSendDeletedEntities to node:" << node->getUUID() << "deletedEntitiesSentAt:" << deletedEntitiesSentAt << "elapsed:" << elapsed;
}
#endif
}
return shouldSendDeletedEntities;
@ -97,7 +103,6 @@ int EntityServer::sendSpecialPackets(const SharedNodePointer& node, OctreeQueryN
if (nodeData) {
quint64 deletedEntitiesSentAt = nodeData->getLastDeletedEntitiesSentAt();
quint64 deletePacketSentAt = usecTimestampNow();
EntityTreePointer tree = std::static_pointer_cast<EntityTree>(_tree);
bool hasMoreToSend = true;
@ -118,6 +123,13 @@ int EntityServer::sendSpecialPackets(const SharedNodePointer& node, OctreeQueryN
nodeData->setLastDeletedEntitiesSentAt(deletePacketSentAt);
}
#ifdef EXTRA_ERASE_DEBUGGING
if (packetsSent > 0) {
qDebug() << "EntityServer::sendSpecialPackets() sent " << packetsSent << "special packets of "
<< totalBytes << " total bytes to node:" << node->getUUID();
}
#endif
// TODO: caller is expecting a packetLength, what if we send more than one packet??
return totalBytes;
}
@ -127,7 +139,6 @@ void EntityServer::pruneDeletedEntities() {
if (tree->hasAnyDeletedEntities()) {
quint64 earliestLastDeletedEntitiesSent = usecTimestampNow() + 1; // in the future
DependencyManager::get<NodeList>()->eachNode([&earliestLastDeletedEntitiesSent](const SharedNodePointer& node) {
if (node->getLinkedData()) {
EntityNodeData* nodeData = static_cast<EntityNodeData*>(node->getLinkedData());
@ -137,7 +148,6 @@ void EntityServer::pruneDeletedEntities() {
}
}
});
tree->forgetEntitiesDeletedBefore(earliestLastDeletedEntitiesSent);
}
}

View file

@ -570,7 +570,7 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
}
if (somethingToSend) {
if (somethingToSend && _myServer->wantsVerboseDebug()) {
qDebug() << "Hit PPS Limit, packetsSentThisInterval =" << packetsSentThisInterval
<< " maxPacketsPerInterval = " << maxPacketsPerInterval
<< " clientMaxPacketsPerInterval = " << clientMaxPacketsPerInterval;

View file

@ -17,8 +17,14 @@
#include <JurisdictionSender.h>
const int MAX_FILENAME_LENGTH = 1024;
const int INTERVALS_PER_SECOND = 60;
/// This is the frequency (hz) that we check the octree server for changes to determine if we need to
/// send new "scene" information to the viewers. This will directly effect how quickly edits are
/// sent down do viewers. By setting it to 90hz we allow edits happening at 90hz to be sent down
/// to viewers at a rate more closely matching the edit rate. It would probably be better to allow
/// clients to ask the server to send at a rate consistent with their current vsynch since clients
/// can't render any faster than their vsynch even if the server sent them more acurate information
const int INTERVALS_PER_SECOND = 90;
const int OCTREE_SEND_INTERVAL_USECS = (1000 * 1000)/INTERVALS_PER_SECOND;
const int SENDING_TIME_TO_SPARE = 5 * 1000; // usec of sending interval to spare for sending octree elements
#endif // hifi_OctreeServerConsts_h

View file

@ -17,17 +17,21 @@ var NAMES = new Array("Craig", "Clement", "Jeff"); // ACs names ordered by IDs (
// Those variables MUST be common to every scripts
var controlEntitySize = 0.25;
var controlEntityPosition = { x: 2000 , y: 0, z: 0 };
var controlEntityPosition = { x: 0, y: 0, z: 0 };
// Script. DO NOT MODIFY BEYOND THIS LINE.
Script.include("../libraries/toolBars.js");
var clip_url = null;
var input_text = null;
var DO_NOTHING = 0;
var PLAY = 1;
var PLAY_LOOP = 2;
var STOP = 3;
var SHOW = 4;
var HIDE = 5;
var LOAD = 6;
var COLORS = [];
COLORS[PLAY] = { red: PLAY, green: 0, blue: 0 };
@ -35,6 +39,7 @@ COLORS[PLAY_LOOP] = { red: PLAY_LOOP, green: 0, blue: 0 };
COLORS[STOP] = { red: STOP, green: 0, blue: 0 };
COLORS[SHOW] = { red: SHOW, green: 0, blue: 0 };
COLORS[HIDE] = { red: HIDE, green: 0, blue: 0 };
COLORS[LOAD] = { red: LOAD, green: 0, blue: 0 };
@ -53,6 +58,7 @@ var onOffIcon = new Array();
var playIcon = new Array();
var playLoopIcon = new Array();
var stopIcon = new Array();
var loadIcon = new Array();
setupToolBars();
@ -104,6 +110,14 @@ function setupToolBars() {
alpha: ALPHA_OFF,
visible: true
}, false);
loadIcon[i] = toolBars[i].addTool({
imageURL: TOOL_ICON_URL + "recording-upload.svg",
width: Tool.IMAGE_WIDTH,
height: Tool.IMAGE_HEIGHT,
alpha: ALPHA_OFF,
visible: true
}, false);
nameOverlays.push(Overlays.addOverlay("text", {
backgroundColor: { red: 0, green: 0, blue: 0 },
@ -129,11 +143,13 @@ function sendCommand(id, action) {
toolBars[id].setAlpha(ALPHA_ON, playIcon[id]);
toolBars[id].setAlpha(ALPHA_ON, playLoopIcon[id]);
toolBars[id].setAlpha(ALPHA_ON, stopIcon[id]);
toolBars[id].setAlpha(ALPHA_ON, loadIcon[id]);
} else if (action === HIDE) {
toolBars[id].selectTool(onOffIcon[id], true);
toolBars[id].setAlpha(ALPHA_OFF, playIcon[id]);
toolBars[id].setAlpha(ALPHA_OFF, playLoopIcon[id]);
toolBars[id].setAlpha(ALPHA_OFF, stopIcon[id]);
toolBars[id].setAlpha(ALPHA_OFF, loadIcon[id]);
} else if (toolBars[id].toolSelected(onOffIcon[id])) {
return;
}
@ -148,6 +164,8 @@ function sendCommand(id, action) {
var position = { x: controlEntityPosition.x + id * controlEntitySize,
y: controlEntityPosition.y, z: controlEntityPosition.z };
Entities.addEntity({
name: "Actor Controller",
userData: clip_url,
type: "Box",
position: position,
dimensions: { x: controlEntitySize, y: controlEntitySize, z: controlEntitySize },
@ -173,6 +191,8 @@ function mousePressEvent(event) {
sendCommand(i, PLAY_LOOP);
} else if (stopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, STOP);
} else if (loadIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, LOAD);
} else {
// Check individual controls
for (i = 0; i < NUM_AC; i++) {
@ -188,6 +208,12 @@ function mousePressEvent(event) {
sendCommand(i, PLAY_LOOP);
} else if (stopIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
sendCommand(i, STOP);
} else if (loadIcon[i] === toolBars[i].clicked(clickedOverlay, false)) {
input_text = Window.prompt("Insert the url of the clip: ","");
if(!(input_text === "" || input_text === null)){
clip_url = input_text;
sendCommand(i, LOAD);
}
} else {
}
@ -231,4 +257,4 @@ Controller.mousePressEvent.connect(mousePressEvent);
Script.update.connect(update);
Script.scriptEnding.connect(scriptEnding);
moveUI();
moveUI();

View file

@ -9,10 +9,9 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
// Set the following variables to the values needed
var filename = "/Users/clement/Desktop/recording.hfr";
var clip_url = null;
var playFromCurrentLocation = true;
var useDisplayName = true;
var useAttachments = true;
@ -21,8 +20,6 @@ var useAvatarModel = true;
// ID of the agent. Two agents can't have the same ID.
var id = 0;
// Set avatar model URL
Avatar.skeletonModelURL = "https://hifi-public.s3.amazonaws.com/marketplace/contents/e21c0b95-e502-4d15-8c41-ea2fc40f1125/3585ddf674869a67d31d5964f7b52de1.fst?1427169998";
// Set position/orientation/scale here if playFromCurrentLocation is true
Avatar.position = { x:1, y: 1, z: 1 };
Avatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
@ -30,7 +27,7 @@ Avatar.scale = 1.0;
// Those variables MUST be common to every scripts
var controlEntitySize = 0.25;
var controlEntityPosition = { x: 2000, y: 0, z: 0 };
var controlEntityPosition = { x: 0, y: 0, z: 0 };
// Script. DO NOT MODIFY BEYOND THIS LINE.
var DO_NOTHING = 0;
@ -39,6 +36,7 @@ var PLAY_LOOP = 2;
var STOP = 3;
var SHOW = 4;
var HIDE = 5;
var LOAD = 6;
var COLORS = [];
COLORS[PLAY] = { red: PLAY, green: 0, blue: 0 };
@ -46,10 +44,11 @@ COLORS[PLAY_LOOP] = { red: PLAY_LOOP, green: 0, blue: 0 };
COLORS[STOP] = { red: STOP, green: 0, blue: 0 };
COLORS[SHOW] = { red: SHOW, green: 0, blue: 0 };
COLORS[HIDE] = { red: HIDE, green: 0, blue: 0 };
COLORS[LOAD] = { red: LOAD, green: 0, blue: 0 };
controlEntityPosition.x += id * controlEntitySize;
Avatar.loadRecording(filename);
Avatar.loadRecording(clip_url);
Avatar.setPlayFromCurrentLocation(playFromCurrentLocation);
Avatar.setPlayerUseDisplayName(useDisplayName);
@ -68,7 +67,9 @@ function setupEntityViewer() {
EntityViewer.queryOctree();
}
function getAction(controlEntity) {
function getAction(controlEntity) {
clip_url = controlEntity.userData;
if (controlEntity === null ||
controlEntity.position.x !== controlEntityPosition.x ||
controlEntity.position.y !== controlEntityPosition.y ||
@ -141,6 +142,12 @@ function update(event) {
}
Agent.isAvatar = false;
break;
case LOAD:
print("Load");
if(clip_url !== null) {
Avatar.loadRecording(clip_url);
}
break;
case DO_NOTHING:
break;
default:

View file

@ -0,0 +1,73 @@
//
// handControlledHead.js
// examples
//
// Created by Alessandro Signa on 10/11/15.
// Copyright 2015 High Fidelity, Inc.
//
// This script allows you to look around, driving the rotation of the avatar's head by the right hand orientation.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
const YAW_MULTIPLIER = 20000;
const PITCH_MULTIPLIER = 15000;
const EPSILON = 0.001;
var firstPress = true;
var handPreviousVerticalRotation = 0.0;
var handCurrentVerticalRotation = 0.0;
var handPreviousHorizontalRotation = 0.0;
var handCurrentHorizontalRotation = 0.0;
var rotatedHandPosition;
var rotatedTipPosition;
function update(deltaTime) {
if(Controller.getValue(Controller.Standard.RightPrimaryThumb)){
pitchManager(deltaTime);
}else if(!firstPress){
firstPress = true;
}
if(firstPress && MyAvatar.headYaw){
MyAvatar.headYaw -= MyAvatar.headYaw/10;
}
}
function pitchManager(deltaTime){
rotatedHandPosition = Vec3.multiplyQbyV(Quat.fromPitchYawRollDegrees(0, -MyAvatar.bodyYaw, 0), MyAvatar.getRightHandPosition());
rotatedTipPosition = Vec3.multiplyQbyV(Quat.fromPitchYawRollDegrees(0, -MyAvatar.bodyYaw, 0), MyAvatar.getRightHandTipPosition());
handCurrentVerticalRotation = Vec3.subtract(rotatedTipPosition, rotatedHandPosition).y;
handCurrentHorizontalRotation = Vec3.subtract(rotatedTipPosition, rotatedHandPosition).x;
var handCurrentHorizontalRotationFiltered = handCurrentHorizontalRotation;
//to avoid yaw drift
if((handCurrentHorizontalRotation - handPreviousHorizontalRotation) < EPSILON && (handCurrentHorizontalRotation - handPreviousHorizontalRotation) > -EPSILON){
handCurrentHorizontalRotationFiltered = handPreviousHorizontalRotation;
}
if(firstPress){
handPreviousVerticalRotation = handCurrentVerticalRotation;
handPreviousHorizontalRotation = handCurrentHorizontalRotation;
firstPress = false;
}
MyAvatar.headPitch += (handCurrentVerticalRotation - handPreviousVerticalRotation)*PITCH_MULTIPLIER*deltaTime;
MyAvatar.headYaw -= (handCurrentHorizontalRotationFiltered - handPreviousHorizontalRotation)*YAW_MULTIPLIER*deltaTime;
handPreviousVerticalRotation = handCurrentVerticalRotation;
handPreviousHorizontalRotation = handCurrentHorizontalRotationFiltered;
}
function clean(){
MyAvatar.headYaw = 0.0;
}
Script.update.connect(update);
Script.scriptEnding.connect(clean);

View file

@ -0,0 +1,265 @@
//
// colorBusterWand.js
//
// Created by James B. Pollack @imgntn on 11/2/2015
// Copyright 2015 High Fidelity, Inc.
//
// This is the entity script that attaches to a wand for the Color Busters game
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function() {
Script.include("../../../libraries/utils.js");
var COMBINED_COLOR_DURATION = 5;
var INDICATOR_OFFSET_UP = 0.40;
var REMOVE_CUBE_SOUND_URL = 'http://hifi-public.s3.amazonaws.com/sounds/color_busters/boop.wav';
var COMBINE_COLORS_SOUND_URL = 'http://hifi-public.s3.amazonaws.com/sounds/color_busters/powerup.wav';
var COLOR_INDICATOR_DIMENSIONS = {
x: 0.10,
y: 0.10,
z: 0.10
};
var _this;
function ColorBusterWand() {
_this = this;
}
ColorBusterWand.prototype = {
combinedColorsTimer: null,
soundIsPlaying: false,
preload: function(entityID) {
print("preload");
this.entityID = entityID;
this.REMOVE_CUBE_SOUND = SoundCache.getSound(REMOVE_CUBE_SOUND_URL);
this.COMBINE_COLORS_SOUND = SoundCache.getSound(COMBINE_COLORS_SOUND_URL);
},
collisionWithEntity: function(me, otherEntity, collision) {
var otherProperties = Entities.getEntityProperties(otherEntity, ["name", "userData"]);
var myProperties = Entities.getEntityProperties(me, ["userData"]);
var myUserData = JSON.parse(myProperties.userData);
var otherUserData = JSON.parse(otherProperties.userData);
if (otherProperties.name === 'Hifi-ColorBusterWand') {
print('HIT ANOTHER COLOR WAND!!');
if (otherUserData.hifiColorBusterWandKey.colorLocked !== true && myUserData.hifiColorBusterWandKey.colorLocked !== true) {
if (otherUserData.hifiColorBusterWandKey.originalColorName === myUserData.hifiColorBusterWandKey.originalColorName) {
print('BUT ITS THE SAME COLOR!')
return;
} else {
print('COMBINE COLORS!' + this.entityID);
this.combineColorsWithOtherWand(otherUserData.hifiColorBusterWandKey.originalColorName, myUserData.hifiColorBusterWandKey.originalColorName);
}
}
}
if (otherProperties.name === 'Hifi-ColorBusterCube') {
if (otherUserData.hifiColorBusterCubeKey.originalColorName === myUserData.hifiColorBusterWandKey.currentColor) {
print('HIT THE SAME COLOR CUBE');
this.removeCubeOfSameColor(otherEntity);
} else {
print('HIT A CUBE OF A DIFFERENT COLOR');
}
}
},
combineColorsWithOtherWand: function(otherColor, myColor) {
print('combining my :' + myColor + " with their: " + otherColor);
if ((myColor === 'violet') || (myColor === 'orange') || (myColor === 'green')) {
print('MY WAND ALREADY COMBINED');
return;
}
var newColor;
if ((otherColor === 'red' && myColor == 'yellow') || (myColor === 'red' && otherColor === 'yellow')) {
//orange
newColor = 'orange';
}
if ((otherColor === 'red' && myColor == 'blue') || (myColor === 'red' && otherColor === 'blue')) {
//violet
newColor = 'violet';
}
if ((otherColor === 'blue' && myColor == 'yellow') || (myColor === 'blue' && otherColor === 'yellow')) {
//green.
newColor = 'green';
}
_this.combinedColorsTimer = Script.setTimeout(function() {
_this.resetToOriginalColor(myColor);
_this.combinedColorsTimer = null;
}, COMBINED_COLOR_DURATION * 1000);
setEntityCustomData('hifiColorBusterWandKey', this.entityID, {
owner: MyAvatar.sessionUUID,
currentColor: newColor,
originalColorName: myColor,
colorLocked: false
});
this.playSoundAtCurrentPosition(false);
},
setCurrentColor: function(newColor) {
var color;
if (newColor === 'orange') {
color = {
red: 255,
green: 165,
blue: 0
};
}
if (newColor === 'violet') {
color = {
red: 128,
green: 0,
blue: 128
};
}
if (newColor === 'green') {
color = {
red: 0,
green: 255,
blue: 0
};
}
if (newColor === 'red') {
color = {
red: 255,
green: 0,
blue: 0
};
}
if (newColor === 'yellow') {
color = {
red: 255,
green: 255,
blue: 0
};
}
if (newColor === 'blue') {
color = {
red: 0,
green: 0,
blue: 255
};
}
Entities.editEntity(this.colorIndicator, {
color: color
});
// print('SET THIS COLOR INDICATOR TO:' + newColor);
},
resetToOriginalColor: function(myColor) {
setEntityCustomData('hifiColorBusterWandKey', this.entityID, {
owner: MyAvatar.sessionUUID,
currentColor: myColor,
originalColorName: myColor,
colorLocked: false
});
this.setCurrentColor(myColor);
},
removeCubeOfSameColor: function(cube) {
this.playSoundAtCurrentPosition(true);
Entities.callEntityMethod(cube, 'cubeEnding');
Entities.deleteEntity(cube);
},
startNearGrab: function() {
this.currentProperties = Entities.getEntityProperties(this.entityID);
this.createColorIndicator();
},
continueNearGrab: function() {
this.currentProperties = Entities.getEntityProperties(this.entityID);
var color = JSON.parse(this.currentProperties.userData).hifiColorBusterWandKey.currentColor;
this.setCurrentColor(color);
this.updateColorIndicatorLocation();
},
releaseGrab: function() {
Entities.deleteEntity(this.colorIndicator);
if (this.combinedColorsTimer !== null) {
Script.clearTimeout(this.combinedColorsTimer);
}
},
createColorIndicator: function(color) {
var properties = {
name: 'Hifi-ColorBusterIndicator',
type: 'Box',
dimensions: COLOR_INDICATOR_DIMENSIONS,
position: this.currentProperties.position,
collisionsWillMove: false,
ignoreForCollisions: true
}
this.colorIndicator = Entities.addEntity(properties);
},
updateColorIndicatorLocation: function() {
var position;
var upVector = Quat.getUp(this.currentProperties.rotation);
var indicatorVector = Vec3.multiply(upVector, INDICATOR_OFFSET_UP);
position = Vec3.sum(this.currentProperties.position, indicatorVector);
var properties = {
position: position,
rotation: this.currentProperties.rotation
}
Entities.editEntity(this.colorIndicator, properties);
},
playSoundAtCurrentPosition: function(isRemoveCubeSound) {
var position = Entities.getEntityProperties(this.entityID, "position").position;
var audioProperties = {
volume: 0.25,
position: position
};
if (isRemoveCubeSound === true) {
Audio.playSound(this.REMOVE_CUBE_SOUND, audioProperties);
} else {
Audio.playSound(this.COMBINE_COLORS_SOUND, audioProperties);
}
},
};
return new ColorBusterWand();
});

View file

@ -0,0 +1,130 @@
//
// createColorBusterCubes.js
//
// Created by James B. Pollack @imgntn on 11/2/2015
// Copyright 2015 High Fidelity, Inc.
//
// This script creates cubes that can be removed with a Color Buster wand.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var DELETE_AT_ENDING = false;
var CUBE_DIMENSIONS = {
x: 1,
y: 1,
z: 1
};
var NUMBER_OF_CUBES_PER_SIDE = 8;
var STARTING_CORNER_POSITION = {
x: 100,
y: 100,
z: 100
};
var STARTING_COLORS = [
['red', {
red: 255,
green: 0,
blue: 0
}],
['yellow', {
red: 255,
green: 255,
blue: 0
}],
['blue', {
red: 0,
green: 0,
blue: 255
}],
['orange', {
red: 255,
green: 165,
blue: 0
}],
['violet', {
red: 128,
green: 0,
blue: 128
}],
['green', {
red: 0,
green: 255,
blue: 0
}]
];
function chooseStartingColor() {
var startingColor = STARTING_COLORS[Math.floor(Math.random() * STARTING_COLORS.length)];
return startingColor;
}
var cubes = [];
function createColorBusterCube(row, column, vertical) {
print('make cube at ' + row + ':' + column + ":" + vertical);
var position = {
x: STARTING_CORNER_POSITION.x + row,
y: STARTING_CORNER_POSITION.y + vertical,
z: STARTING_CORNER_POSITION.z + column
};
var startingColor = chooseStartingColor();
var colorBusterCubeProperties = {
name: 'Hifi-ColorBusterCube',
type: 'Box',
dimensions: CUBE_DIMENSIONS,
collisionsWillMove: false,
ignoreForCollisions: false,
color: startingColor[1],
position: position,
userData: JSON.stringify({
hifiColorBusterCubeKey: {
originalColorName: startingColor[0]
},
grabbableKey: {
grabbable: false
}
})
};
var cube = Entities.addEntity(colorBusterCubeProperties);
cubes.push(cube);
return cube
}
function createBoard() {
var vertical;
var row;
var column;
for (vertical = 0; vertical < NUMBER_OF_CUBES_PER_SIDE; vertical++) {
print('vertical:' + vertical)
//create a single layer
for (row = 0; row < NUMBER_OF_CUBES_PER_SIDE; row++) {
print('row:' + row)
for (column = 0; column < NUMBER_OF_CUBES_PER_SIDE; column++) {
print('column:' + column)
createColorBusterCube(row, column, vertical)
}
}
}
}
function deleteCubes() {
while (cubes.length > 0) {
Entities.deleteEntity(cubes.pop());
}
}
if (DELETE_AT_ENDING === true) {
Script.scriptEnding.connect(deleteCubes);
}
createBoard();

View file

@ -0,0 +1,99 @@
//
// createColorBusterWand.js
//
// Created by James B. Pollack @imgntn on 11/2/2015
// Copyright 2015 High Fidelity, Inc.
//
// This script creates a wand that can be used to remove color buster blocks. Touch your wand to someone else's to combine colors.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var DELETE_AT_ENDING = false;
var COLOR_WAND_MODEL_URL = 'http://hifi-public.s3.amazonaws.com/models/color_busters/wand.fbx';
var COLOR_WAND_COLLISION_HULL_URL = 'http://hifi-public.s3.amazonaws.com/models/color_busters/wand_collision_hull.obj';
var COLOR_WAND_SCRIPT_URL = Script.resolvePath('colorBusterWand.js');
var COLOR_WAND_DIMENSIONS = {
x: 0.04,
y: 0.87,
z: 0.04
};
var COLOR_WAND_START_POSITION = {
x: 0,
y: 0,
z: 0
};
var STARTING_COLORS = [
['red', {
red: 255,
green: 0,
blue: 0
}],
['yellow', {
red: 255,
green: 255,
blue: 0
}],
['blue', {
red: 0,
green: 0,
blue: 255
}]
];
var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
x: 0,
y: 0.5,
z: 0
}), Vec3.multiply(0.5, Quat.getFront(Camera.getOrientation())));
function chooseStartingColor() {
var startingColor = STARTING_COLORS[Math.floor(Math.random() * STARTING_COLORS.length)];
return startingColor
}
var wand;
function createColorBusterWand() {
var startingColor = chooseStartingColor();
var colorBusterWandProperties = {
name: 'Hifi-ColorBusterWand',
type: 'Model',
modelURL: COLOR_WAND_MODEL_URL,
shapeType: 'compound',
compoundShapeURL: COLOR_WAND_COLLISION_HULL_URL,
dimensions: COLOR_WAND_DIMENSIONS,
position: center,
script: COLOR_WAND_SCRIPT_URL,
collisionsWillMove: true,
userData: JSON.stringify({
hifiColorBusterWandKey: {
owner: MyAvatar.sessionUUID,
currentColor: startingColor[0],
originalColorName: startingColor[0],
colorLocked: false
},
grabbableKey: {
invertSolidWhileHeld: false
}
})
};
wand = Entities.addEntity(colorBusterWandProperties);
}
function deleteWand() {
Entities.deleteEntity(wand);
}
if (DELETE_AT_ENDING === true) {
Script.scriptEnding.connect(deleteWand);
}
createColorBusterWand();

View file

@ -0,0 +1,743 @@
// The MIT License (MIT)
// Copyright (c) 2012-2015 Bryce Neal
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// Adapted for High Fidelity by James B. Pollack on 11/6/2015
loadEasyStar = function() {
var ezStar = eStar();
return new ezStar.js()
}
var eStar = function() {
var EasyStar = EasyStar || {};
/**
* A simple Node that represents a single tile on the grid.
* @param {Object} parent The parent node.
* @param {Number} x The x position on the grid.
* @param {Number} y The y position on the grid.
* @param {Number} costSoFar How far this node is in moves*cost from the start.
* @param {Number} simpleDistanceToTarget Manhatten distance to the end point.
**/
EasyStar.Node = function(parent, x, y, costSoFar, simpleDistanceToTarget) {
this.parent = parent;
this.x = x;
this.y = y;
this.costSoFar = costSoFar;
this.simpleDistanceToTarget = simpleDistanceToTarget;
/**
* @return {Number} Best guess distance of a cost using this node.
**/
this.bestGuessDistance = function() {
return this.costSoFar + this.simpleDistanceToTarget;
}
};
// Constants
EasyStar.Node.OPEN_LIST = 0;
EasyStar.Node.CLOSED_LIST = 1;
/**
* This is an improved Priority Queue data type implementation that can be used to sort any object type.
* It uses a technique called a binary heap.
*
* For more on binary heaps see: http://en.wikipedia.org/wiki/Binary_heap
*
* @param {String} criteria The criteria by which to sort the objects.
* This should be a property of the objects you're sorting.
*
* @param {Number} heapType either PriorityQueue.MAX_HEAP or PriorityQueue.MIN_HEAP.
**/
EasyStar.PriorityQueue = function(criteria, heapType) {
this.length = 0; //The current length of heap.
var queue = [];
var isMax = false;
//Constructor
if (heapType == EasyStar.PriorityQueue.MAX_HEAP) {
isMax = true;
} else if (heapType == EasyStar.PriorityQueue.MIN_HEAP) {
isMax = false;
} else {
throw heapType + " not supported.";
}
/**
* Inserts the value into the heap and sorts it.
*
* @param value The object to insert into the heap.
**/
this.insert = function(value) {
if (!value.hasOwnProperty(criteria)) {
throw "Cannot insert " + value + " because it does not have a property by the name of " + criteria + ".";
}
queue.push(value);
this.length++;
bubbleUp(this.length - 1);
}
/**
* Peeks at the highest priority element.
*
* @return the highest priority element
**/
this.getHighestPriorityElement = function() {
return queue[0];
}
/**
* Removes and returns the highest priority element from the queue.
*
* @return the highest priority element
**/
this.shiftHighestPriorityElement = function() {
if (this.length === 0) {
throw ("There are no more elements in your priority queue.");
} else if (this.length === 1) {
var onlyValue = queue[0];
queue = [];
this.length = 0;
return onlyValue;
}
var oldRoot = queue[0];
var newRoot = queue.pop();
this.length--;
queue[0] = newRoot;
swapUntilQueueIsCorrect(0);
return oldRoot;
}
var bubbleUp = function(index) {
if (index === 0) {
return;
}
var parent = getParentOf(index);
if (evaluate(index, parent)) {
swap(index, parent);
bubbleUp(parent);
} else {
return;
}
}
var swapUntilQueueIsCorrect = function(value) {
var left = getLeftOf(value);
var right = getRightOf(value);
if (evaluate(left, value)) {
swap(value, left);
swapUntilQueueIsCorrect(left);
} else if (evaluate(right, value)) {
swap(value, right);
swapUntilQueueIsCorrect(right);
} else if (value == 0) {
return;
} else {
swapUntilQueueIsCorrect(0);
}
}
var swap = function(self, target) {
var placeHolder = queue[self];
queue[self] = queue[target];
queue[target] = placeHolder;
}
var evaluate = function(self, target) {
if (queue[target] === undefined || queue[self] === undefined) {
return false;
}
var selfValue;
var targetValue;
// Check if the criteria should be the result of a function call.
if (typeof queue[self][criteria] === 'function') {
selfValue = queue[self][criteria]();
targetValue = queue[target][criteria]();
} else {
selfValue = queue[self][criteria];
targetValue = queue[target][criteria];
}
if (isMax) {
if (selfValue > targetValue) {
return true;
} else {
return false;
}
} else {
if (selfValue < targetValue) {
return true;
} else {
return false;
}
}
}
var getParentOf = function(index) {
return Math.floor((index - 1) / 2);
}
var getLeftOf = function(index) {
return index * 2 + 1;
}
var getRightOf = function(index) {
return index * 2 + 2;
}
};
// Constants
EasyStar.PriorityQueue.MAX_HEAP = 0;
EasyStar.PriorityQueue.MIN_HEAP = 1;
/**
* Represents a single instance of EasyStar.
* A path that is in the queue to eventually be found.
*/
EasyStar.instance = function() {
this.isDoneCalculating = true;
this.pointsToAvoid = {};
this.startX;
this.callback;
this.startY;
this.endX;
this.endY;
this.nodeHash = {};
this.openList;
};
/**
* EasyStar.js
* github.com/prettymuchbryce/EasyStarJS
* Licensed under the MIT license.
*
* Implementation By Bryce Neal (@prettymuchbryce)
**/
EasyStar.js = function() {
var STRAIGHT_COST = 1.0;
var DIAGONAL_COST = 1.4;
var syncEnabled = false;
var pointsToAvoid = {};
var collisionGrid;
var costMap = {};
var pointsToCost = {};
var allowCornerCutting = true;
var iterationsSoFar;
var instances = [];
var iterationsPerCalculation = Number.MAX_VALUE;
var acceptableTiles;
var diagonalsEnabled = false;
/**
* Sets the collision grid that EasyStar uses.
*
* @param {Array|Number} tiles An array of numbers that represent
* which tiles in your grid should be considered
* acceptable, or "walkable".
**/
this.setAcceptableTiles = function(tiles) {
if (tiles instanceof Array) {
// Array
acceptableTiles = tiles;
} else if (!isNaN(parseFloat(tiles)) && isFinite(tiles)) {
// Number
acceptableTiles = [tiles];
}
};
/**
* Enables sync mode for this EasyStar instance..
* if you're into that sort of thing.
**/
this.enableSync = function() {
syncEnabled = true;
};
/**
* Disables sync mode for this EasyStar instance.
**/
this.disableSync = function() {
syncEnabled = false;
};
/**
* Enable diagonal pathfinding.
*/
this.enableDiagonals = function() {
diagonalsEnabled = true;
}
/**
* Disable diagonal pathfinding.
*/
this.disableDiagonals = function() {
diagonalsEnabled = false;
}
/**
* Sets the collision grid that EasyStar uses.
*
* @param {Array} grid The collision grid that this EasyStar instance will read from.
* This should be a 2D Array of Numbers.
**/
this.setGrid = function(grid) {
collisionGrid = grid;
//Setup cost map
for (var y = 0; y < collisionGrid.length; y++) {
for (var x = 0; x < collisionGrid[0].length; x++) {
if (!costMap[collisionGrid[y][x]]) {
costMap[collisionGrid[y][x]] = 1
}
}
}
};
/**
* Sets the tile cost for a particular tile type.
*
* @param {Number} The tile type to set the cost for.
* @param {Number} The multiplicative cost associated with the given tile.
**/
this.setTileCost = function(tileType, cost) {
costMap[tileType] = cost;
};
/**
* Sets the an additional cost for a particular point.
* Overrides the cost from setTileCost.
*
* @param {Number} x The x value of the point to cost.
* @param {Number} y The y value of the point to cost.
* @param {Number} The multiplicative cost associated with the given point.
**/
this.setAdditionalPointCost = function(x, y, cost) {
pointsToCost[x + '_' + y] = cost;
};
/**
* Remove the additional cost for a particular point.
*
* @param {Number} x The x value of the point to stop costing.
* @param {Number} y The y value of the point to stop costing.
**/
this.removeAdditionalPointCost = function(x, y) {
delete pointsToCost[x + '_' + y];
}
/**
* Remove all additional point costs.
**/
this.removeAllAdditionalPointCosts = function() {
pointsToCost = {};
}
/**
* Sets the number of search iterations per calculation.
* A lower number provides a slower result, but more practical if you
* have a large tile-map and don't want to block your thread while
* finding a path.
*
* @param {Number} iterations The number of searches to prefrom per calculate() call.
**/
this.setIterationsPerCalculation = function(iterations) {
iterationsPerCalculation = iterations;
};
/**
* Avoid a particular point on the grid,
* regardless of whether or not it is an acceptable tile.
*
* @param {Number} x The x value of the point to avoid.
* @param {Number} y The y value of the point to avoid.
**/
this.avoidAdditionalPoint = function(x, y) {
pointsToAvoid[x + "_" + y] = 1;
};
/**
* Stop avoiding a particular point on the grid.
*
* @param {Number} x The x value of the point to stop avoiding.
* @param {Number} y The y value of the point to stop avoiding.
**/
this.stopAvoidingAdditionalPoint = function(x, y) {
delete pointsToAvoid[x + "_" + y];
};
/**
* Enables corner cutting in diagonal movement.
**/
this.enableCornerCutting = function() {
allowCornerCutting = true;
};
/**
* Disables corner cutting in diagonal movement.
**/
this.disableCornerCutting = function() {
allowCornerCutting = false;
};
/**
* Stop avoiding all additional points on the grid.
**/
this.stopAvoidingAllAdditionalPoints = function() {
pointsToAvoid = {};
};
/**
* Find a path.
*
* @param {Number} startX The X position of the starting point.
* @param {Number} startY The Y position of the starting point.
* @param {Number} endX The X position of the ending point.
* @param {Number} endY The Y position of the ending point.
* @param {Function} callback A function that is called when your path
* is found, or no path is found.
*
**/
this.findPath = function(startX, startY, endX, endY, callback) {
// Wraps the callback for sync vs async logic
var callbackWrapper = function(result) {
if (syncEnabled) {
callback(result);
} else {
Script.setTimeout(function() {
callback(result);
}, 1);
}
}
// No acceptable tiles were set
if (acceptableTiles === undefined) {
throw new Error("You can't set a path without first calling setAcceptableTiles() on EasyStar.");
}
// No grid was set
if (collisionGrid === undefined) {
throw new Error("You can't set a path without first calling setGrid() on EasyStar.");
}
// Start or endpoint outside of scope.
if (startX < 0 || startY < 0 || endX < 0 || endX < 0 ||
startX > collisionGrid[0].length - 1 || startY > collisionGrid.length - 1 ||
endX > collisionGrid[0].length - 1 || endY > collisionGrid.length - 1) {
throw new Error("Your start or end point is outside the scope of your grid.");
}
// Start and end are the same tile.
if (startX === endX && startY === endY) {
callbackWrapper([]);
return;
}
// End point is not an acceptable tile.
var endTile = collisionGrid[endY][endX];
var isAcceptable = false;
for (var i = 0; i < acceptableTiles.length; i++) {
if (endTile === acceptableTiles[i]) {
isAcceptable = true;
break;
}
}
if (isAcceptable === false) {
callbackWrapper(null);
return;
}
// Create the instance
var instance = new EasyStar.instance();
instance.openList = new EasyStar.PriorityQueue("bestGuessDistance", EasyStar.PriorityQueue.MIN_HEAP);
instance.isDoneCalculating = false;
instance.nodeHash = {};
instance.startX = startX;
instance.startY = startY;
instance.endX = endX;
instance.endY = endY;
instance.callback = callbackWrapper;
instance.openList.insert(coordinateToNode(instance, instance.startX,
instance.startY, null, STRAIGHT_COST));
instances.push(instance);
};
/**
* This method steps through the A* Algorithm in an attempt to
* find your path(s). It will search 4-8 tiles (depending on diagonals) for every calculation.
* You can change the number of calculations done in a call by using
* easystar.setIteratonsPerCalculation().
**/
this.calculate = function() {
if (instances.length === 0 || collisionGrid === undefined || acceptableTiles === undefined) {
return;
}
for (iterationsSoFar = 0; iterationsSoFar < iterationsPerCalculation; iterationsSoFar++) {
if (instances.length === 0) {
return;
}
if (syncEnabled) {
// If this is a sync instance, we want to make sure that it calculates synchronously.
iterationsSoFar = 0;
}
// Couldn't find a path.
if (instances[0].openList.length === 0) {
var ic = instances[0];
ic.callback(null);
instances.shift();
continue;
}
var searchNode = instances[0].openList.shiftHighestPriorityElement();
var tilesToSearch = [];
searchNode.list = EasyStar.Node.CLOSED_LIST;
if (searchNode.y > 0) {
tilesToSearch.push({
instance: instances[0],
searchNode: searchNode,
x: 0,
y: -1,
cost: STRAIGHT_COST * getTileCost(searchNode.x, searchNode.y - 1)
});
}
if (searchNode.x < collisionGrid[0].length - 1) {
tilesToSearch.push({
instance: instances[0],
searchNode: searchNode,
x: 1,
y: 0,
cost: STRAIGHT_COST * getTileCost(searchNode.x + 1, searchNode.y)
});
}
if (searchNode.y < collisionGrid.length - 1) {
tilesToSearch.push({
instance: instances[0],
searchNode: searchNode,
x: 0,
y: 1,
cost: STRAIGHT_COST * getTileCost(searchNode.x, searchNode.y + 1)
});
}
if (searchNode.x > 0) {
tilesToSearch.push({
instance: instances[0],
searchNode: searchNode,
x: -1,
y: 0,
cost: STRAIGHT_COST * getTileCost(searchNode.x - 1, searchNode.y)
});
}
if (diagonalsEnabled) {
if (searchNode.x > 0 && searchNode.y > 0) {
if (allowCornerCutting ||
(isTileWalkable(collisionGrid, acceptableTiles, searchNode.x, searchNode.y - 1) &&
isTileWalkable(collisionGrid, acceptableTiles, searchNode.x - 1, searchNode.y))) {
tilesToSearch.push({
instance: instances[0],
searchNode: searchNode,
x: -1,
y: -1,
cost: DIAGONAL_COST * getTileCost(searchNode.x - 1, searchNode.y - 1)
});
}
}
if (searchNode.x < collisionGrid[0].length - 1 && searchNode.y < collisionGrid.length - 1) {
if (allowCornerCutting ||
(isTileWalkable(collisionGrid, acceptableTiles, searchNode.x, searchNode.y + 1) &&
isTileWalkable(collisionGrid, acceptableTiles, searchNode.x + 1, searchNode.y))) {
tilesToSearch.push({
instance: instances[0],
searchNode: searchNode,
x: 1,
y: 1,
cost: DIAGONAL_COST * getTileCost(searchNode.x + 1, searchNode.y + 1)
});
}
}
if (searchNode.x < collisionGrid[0].length - 1 && searchNode.y > 0) {
if (allowCornerCutting ||
(isTileWalkable(collisionGrid, acceptableTiles, searchNode.x, searchNode.y - 1) &&
isTileWalkable(collisionGrid, acceptableTiles, searchNode.x + 1, searchNode.y))) {
tilesToSearch.push({
instance: instances[0],
searchNode: searchNode,
x: 1,
y: -1,
cost: DIAGONAL_COST * getTileCost(searchNode.x + 1, searchNode.y - 1)
});
}
}
if (searchNode.x > 0 && searchNode.y < collisionGrid.length - 1) {
if (allowCornerCutting ||
(isTileWalkable(collisionGrid, acceptableTiles, searchNode.x, searchNode.y + 1) &&
isTileWalkable(collisionGrid, acceptableTiles, searchNode.x - 1, searchNode.y))) {
tilesToSearch.push({
instance: instances[0],
searchNode: searchNode,
x: -1,
y: 1,
cost: DIAGONAL_COST * getTileCost(searchNode.x - 1, searchNode.y + 1)
});
}
}
}
// First sort all of the potential nodes we could search by their cost + heuristic distance.
tilesToSearch.sort(function(a, b) {
var aCost = a.cost + getDistance(searchNode.x + a.x, searchNode.y + a.y, instances[0].endX, instances[0].endY)
var bCost = b.cost + getDistance(searchNode.x + b.x, searchNode.y + b.y, instances[0].endX, instances[0].endY)
if (aCost < bCost) {
return -1;
} else if (aCost === bCost) {
return 0;
} else {
return 1;
}
});
var isDoneCalculating = false;
// Search all of the surrounding nodes
for (var i = 0; i < tilesToSearch.length; i++) {
checkAdjacentNode(tilesToSearch[i].instance, tilesToSearch[i].searchNode,
tilesToSearch[i].x, tilesToSearch[i].y, tilesToSearch[i].cost);
if (tilesToSearch[i].instance.isDoneCalculating === true) {
isDoneCalculating = true;
break;
}
}
if (isDoneCalculating) {
instances.shift();
continue;
}
}
};
// Private methods follow
var checkAdjacentNode = function(instance, searchNode, x, y, cost) {
var adjacentCoordinateX = searchNode.x + x;
var adjacentCoordinateY = searchNode.y + y;
if (pointsToAvoid[adjacentCoordinateX + "_" + adjacentCoordinateY] === undefined) {
// Handles the case where we have found the destination
if (instance.endX === adjacentCoordinateX && instance.endY === adjacentCoordinateY) {
instance.isDoneCalculating = true;
var path = [];
var pathLen = 0;
path[pathLen] = {
x: adjacentCoordinateX,
y: adjacentCoordinateY
};
pathLen++;
path[pathLen] = {
x: searchNode.x,
y: searchNode.y
};
pathLen++;
var parent = searchNode.parent;
while (parent != null) {
path[pathLen] = {
x: parent.x,
y: parent.y
};
pathLen++;
parent = parent.parent;
}
path.reverse();
var ic = instance;
var ip = path;
ic.callback(ip);
return
}
if (isTileWalkable(collisionGrid, acceptableTiles, adjacentCoordinateX, adjacentCoordinateY)) {
var node = coordinateToNode(instance, adjacentCoordinateX,
adjacentCoordinateY, searchNode, cost);
if (node.list === undefined) {
node.list = EasyStar.Node.OPEN_LIST;
instance.openList.insert(node);
} else if (node.list === EasyStar.Node.OPEN_LIST) {
if (searchNode.costSoFar + cost < node.costSoFar) {
node.costSoFar = searchNode.costSoFar + cost;
node.parent = searchNode;
}
}
}
}
};
// Helpers
var isTileWalkable = function(collisionGrid, acceptableTiles, x, y) {
for (var i = 0; i < acceptableTiles.length; i++) {
if (collisionGrid[y][x] === acceptableTiles[i]) {
return true;
}
}
return false;
};
var getTileCost = function(x, y) {
return pointsToCost[x + '_' + y] || costMap[collisionGrid[y][x]]
};
var coordinateToNode = function(instance, x, y, parent, cost) {
if (instance.nodeHash[x + "_" + y] !== undefined) {
return instance.nodeHash[x + "_" + y];
}
var simpleDistanceToTarget = getDistance(x, y, instance.endX, instance.endY);
if (parent !== null) {
var costSoFar = parent.costSoFar + cost;
} else {
costSoFar = simpleDistanceToTarget;
}
var node = new EasyStar.Node(parent, x, y, costSoFar, simpleDistanceToTarget);
instance.nodeHash[x + "_" + y] = node;
return node;
};
var getDistance = function(x1, y1, x2, y2) {
return Math.sqrt((x2 -= x1) * x2 + (y2 -= y1) * y2);
};
}
return EasyStar
}

View file

@ -0,0 +1,270 @@
//
// easyStarExample.js
//
// Created by James B. Pollack @imgntn on 11/9/2015
// Copyright 2015 High Fidelity, Inc.
//
// This is a script that sets up a grid of obstacles and passable tiles, finds a path, and then moves an entity along the path.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// To-Do:
// Abstract start position and make tiles, spheres, etc. relative
// Handle dynamically changing grids
Script.include('easyStar.js');
var easystar = loadEasyStar();
Script.include('tween.js');
var TWEEN = loadTween();
var ANIMATION_DURATION = 350;
var grid = [
[0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 1, 1],
[0, 0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]
];
easystar.setGrid(grid);
easystar.setAcceptableTiles([0]);
easystar.enableCornerCutting();
easystar.findPath(0, 0, 8, 0, function(path) {
if (path === null) {
print("Path was not found.");
Script.update.disconnect(tickEasyStar);
} else {
print('path' + JSON.stringify(path));
convertPath(path);
Script.update.disconnect(tickEasyStar);
}
});
var tickEasyStar = function() {
easystar.calculate();
}
Script.update.connect(tickEasyStar);
//a sphere that will get moved
var playerSphere = Entities.addEntity({
type: 'Sphere',
shape: 'Sphere',
color: {
red: 255,
green: 0,
blue: 0
},
dimensions: {
x: 0.5,
y: 0.5,
z: 0.5
},
position: {
x: 0,
y: 0,
z: 0
},
gravity: {
x: 0,
y: -9.8,
z: 0
},
collisionsWillMove: true,
linearDamping: 0.2
});
Script.setInterval(function(){
Entities.editEntity(playerSphere,{
velocity:{
x:0,
y:4,
z:0
}
})
},1000)
var sphereProperties;
//for keeping track of entities
var obstacles = [];
var passables = [];
function createPassableAtTilePosition(posX, posY) {
var properties = {
type: 'Box',
shapeType: 'Box',
dimensions: {
x: 1,
y: 1,
z: 1
},
position: {
x: posY,
y: -1,
z: posX
},
color: {
red: 0,
green: 0,
blue: 255
}
};
var passable = Entities.addEntity(properties);
passables.push(passable);
}
function createObstacleAtTilePosition(posX, posY) {
var properties = {
type: 'Box',
shapeType: 'Box',
dimensions: {
x: 1,
y: 2,
z: 1
},
position: {
x: posY,
y: 0,
z: posX
},
color: {
red: 0,
green: 255,
blue: 0
}
};
var obstacle = Entities.addEntity(properties);
obstacles.push(obstacle);
}
function createObstacles(grid) {
grid.forEach(function(row, rowIndex) {
row.forEach(function(v, index) {
if (v === 1) {
createObstacleAtTilePosition(rowIndex, index);
}
if (v === 0) {
createPassableAtTilePosition(rowIndex, index);
}
})
})
}
createObstacles(grid);
var currentSpherePosition = {
x: 0,
y: 0,
z: 0
};
function convertPathPointToCoordinates(x, y) {
return {
x: y,
z: x
};
}
var convertedPath = [];
//convert our path to Vec3s
function convertPath(path) {
path.forEach(function(point) {
var convertedPoint = convertPathPointToCoordinates(point.x, point.y);
convertedPath.push(convertedPoint);
});
createTweenPath(convertedPath);
}
function updatePosition() {
sphereProperties = Entities.getEntityProperties(playerSphere, "position");
Entities.editEntity(playerSphere, {
position: {
x: currentSpherePosition.z,
y: sphereProperties.position.y,
z: currentSpherePosition.x
}
});
}
var upVelocity = {
x: 0,
y: 2.5,
z: 0
}
var noVelocity = {
x: 0,
y: -3.5,
z: 0
}
function createTweenPath(convertedPath) {
var i;
var stepTweens = [];
var velocityTweens = [];
//create the tweens
for (i = 0; i < convertedPath.length - 1; i++) {
var stepTween = new TWEEN.Tween(currentSpherePosition).to(convertedPath[i + 1], ANIMATION_DURATION).onUpdate(updatePosition).onComplete(tweenStep);
stepTweens.push(stepTween);
}
var j;
//chain one tween to the next
for (j = 0; j < stepTweens.length - 1; j++) {
stepTweens[j].chain(stepTweens[j + 1]);
}
//start the tween
stepTweens[0].start();
}
var velocityShaper = {
x: 0,
y: 0,
z: 0
}
function tweenStep() {
// print('step between tweens')
}
function updateTweens() {
//hook tween updates into our update loop
TWEEN.update();
}
Script.update.connect(updateTweens);
function cleanup() {
while (obstacles.length > 0) {
Entities.deleteEntity(obstacles.pop());
}
while (passables.length > 0) {
Entities.deleteEntity(passables.pop());
}
Entities.deleteEntity(playerSphere);
Script.update.disconnect(updateTweens);
}
Script.scriptEnding.connect(cleanup);

878
examples/libraries/tween.js Normal file
View file

@ -0,0 +1,878 @@
/**
* Tween.js - Licensed under the MIT license
* https://github.com/tweenjs/tween.js
* ----------------------------------------------
*
* See https://github.com/tweenjs/tween.js/graphs/contributors for the full list of contributors.
* Thank you all, you're awesome!
*/
// Include a performance.now polyfill
(function () {
window= {}
if ('performance' in window === false) {
window.performance = {};
}
// IE 8
Date.now = (Date.now || function () {
return new Date().getTime();
});
if ('now' in window.performance === false) {
var offset = window.performance.timing && window.performance.timing.navigationStart ? window.performance.timing.navigationStart
: Date.now();
window.performance.now = function () {
return Date.now() - offset;
};
}
})();
var TWEEN = TWEEN || (function () {
var _tweens = [];
return {
getAll: function () {
return _tweens;
},
removeAll: function () {
_tweens = [];
},
add: function (tween) {
_tweens.push(tween);
},
remove: function (tween) {
var i = _tweens.indexOf(tween);
if (i !== -1) {
_tweens.splice(i, 1);
}
},
update: function (time) {
if (_tweens.length === 0) {
return false;
}
var i = 0;
time = time !== undefined ? time : window.performance.now();
while (i < _tweens.length) {
if (_tweens[i].update(time)) {
i++;
} else {
_tweens.splice(i, 1);
}
}
return true;
}
};
})();
TWEEN.Tween = function (object) {
var _object = object;
var _valuesStart = {};
var _valuesEnd = {};
var _valuesStartRepeat = {};
var _duration = 1000;
var _repeat = 0;
var _yoyo = false;
var _isPlaying = false;
var _reversed = false;
var _delayTime = 0;
var _startTime = null;
var _easingFunction = TWEEN.Easing.Linear.None;
var _interpolationFunction = TWEEN.Interpolation.Linear;
var _chainedTweens = [];
var _onStartCallback = null;
var _onStartCallbackFired = false;
var _onUpdateCallback = null;
var _onCompleteCallback = null;
var _onStopCallback = null;
// Set all starting values present on the target object
for (var field in object) {
_valuesStart[field] = parseFloat(object[field], 10);
}
this.to = function (properties, duration) {
if (duration !== undefined) {
_duration = duration;
}
_valuesEnd = properties;
return this;
};
this.start = function (time) {
TWEEN.add(this);
_isPlaying = true;
_onStartCallbackFired = false;
_startTime = time !== undefined ? time : window.performance.now();
_startTime += _delayTime;
for (var property in _valuesEnd) {
// Check if an Array was provided as property value
if (_valuesEnd[property] instanceof Array) {
if (_valuesEnd[property].length === 0) {
continue;
}
// Create a local copy of the Array with the start value at the front
_valuesEnd[property] = [_object[property]].concat(_valuesEnd[property]);
}
_valuesStart[property] = _object[property];
if ((_valuesStart[property] instanceof Array) === false) {
_valuesStart[property] *= 1.0; // Ensures we're using numbers, not strings
}
_valuesStartRepeat[property] = _valuesStart[property] || 0;
}
return this;
};
this.stop = function () {
if (!_isPlaying) {
return this;
}
TWEEN.remove(this);
_isPlaying = false;
if (_onStopCallback !== null) {
_onStopCallback.call(_object);
}
this.stopChainedTweens();
return this;
};
this.stopChainedTweens = function () {
for (var i = 0, numChainedTweens = _chainedTweens.length; i < numChainedTweens; i++) {
_chainedTweens[i].stop();
}
};
this.delay = function (amount) {
_delayTime = amount;
return this;
};
this.repeat = function (times) {
_repeat = times;
return this;
};
this.yoyo = function (yoyo) {
_yoyo = yoyo;
return this;
};
this.easing = function (easing) {
_easingFunction = easing;
return this;
};
this.interpolation = function (interpolation) {
_interpolationFunction = interpolation;
return this;
};
this.chain = function () {
_chainedTweens = arguments;
return this;
};
this.onStart = function (callback) {
_onStartCallback = callback;
return this;
};
this.onUpdate = function (callback) {
_onUpdateCallback = callback;
return this;
};
this.onComplete = function (callback) {
_onCompleteCallback = callback;
return this;
};
this.onStop = function (callback) {
_onStopCallback = callback;
return this;
};
this.update = function (time) {
var property;
var elapsed;
var value;
if (time < _startTime) {
return true;
}
if (_onStartCallbackFired === false) {
if (_onStartCallback !== null) {
_onStartCallback.call(_object);
}
_onStartCallbackFired = true;
}
elapsed = (time - _startTime) / _duration;
elapsed = elapsed > 1 ? 1 : elapsed;
value = _easingFunction(elapsed);
for (property in _valuesEnd) {
var start = _valuesStart[property] || 0;
var end = _valuesEnd[property];
if (end instanceof Array) {
_object[property] = _interpolationFunction(end, value);
} else {
// Parses relative end values with start as base (e.g.: +10, -3)
if (typeof (end) === 'string') {
end = start + parseFloat(end, 10);
}
// Protect against non numeric properties.
if (typeof (end) === 'number') {
_object[property] = start + (end - start) * value;
}
}
}
if (_onUpdateCallback !== null) {
_onUpdateCallback.call(_object, value);
}
if (elapsed === 1) {
if (_repeat > 0) {
if (isFinite(_repeat)) {
_repeat--;
}
// Reassign starting values, restart by making startTime = now
for (property in _valuesStartRepeat) {
if (typeof (_valuesEnd[property]) === 'string') {
_valuesStartRepeat[property] = _valuesStartRepeat[property] + parseFloat(_valuesEnd[property], 10);
}
if (_yoyo) {
var tmp = _valuesStartRepeat[property];
_valuesStartRepeat[property] = _valuesEnd[property];
_valuesEnd[property] = tmp;
}
_valuesStart[property] = _valuesStartRepeat[property];
}
if (_yoyo) {
_reversed = !_reversed;
}
_startTime = time + _delayTime;
return true;
} else {
if (_onCompleteCallback !== null) {
_onCompleteCallback.call(_object);
}
for (var i = 0, numChainedTweens = _chainedTweens.length; i < numChainedTweens; i++) {
// Make the chained tweens start exactly at the time they should,
// even if the `update()` method was called way past the duration of the tween
_chainedTweens[i].start(_startTime + _duration);
}
return false;
}
}
return true;
};
};
TWEEN.Easing = {
Linear: {
None: function (k) {
return k;
}
},
Quadratic: {
In: function (k) {
return k * k;
},
Out: function (k) {
return k * (2 - k);
},
InOut: function (k) {
if ((k *= 2) < 1) {
return 0.5 * k * k;
}
return - 0.5 * (--k * (k - 2) - 1);
}
},
Cubic: {
In: function (k) {
return k * k * k;
},
Out: function (k) {
return --k * k * k + 1;
},
InOut: function (k) {
if ((k *= 2) < 1) {
return 0.5 * k * k * k;
}
return 0.5 * ((k -= 2) * k * k + 2);
}
},
Quartic: {
In: function (k) {
return k * k * k * k;
},
Out: function (k) {
return 1 - (--k * k * k * k);
},
InOut: function (k) {
if ((k *= 2) < 1) {
return 0.5 * k * k * k * k;
}
return - 0.5 * ((k -= 2) * k * k * k - 2);
}
},
Quintic: {
In: function (k) {
return k * k * k * k * k;
},
Out: function (k) {
return --k * k * k * k * k + 1;
},
InOut: function (k) {
if ((k *= 2) < 1) {
return 0.5 * k * k * k * k * k;
}
return 0.5 * ((k -= 2) * k * k * k * k + 2);
}
},
Sinusoidal: {
In: function (k) {
return 1 - Math.cos(k * Math.PI / 2);
},
Out: function (k) {
return Math.sin(k * Math.PI / 2);
},
InOut: function (k) {
return 0.5 * (1 - Math.cos(Math.PI * k));
}
},
Exponential: {
In: function (k) {
return k === 0 ? 0 : Math.pow(1024, k - 1);
},
Out: function (k) {
return k === 1 ? 1 : 1 - Math.pow(2, - 10 * k);
},
InOut: function (k) {
if (k === 0) {
return 0;
}
if (k === 1) {
return 1;
}
if ((k *= 2) < 1) {
return 0.5 * Math.pow(1024, k - 1);
}
return 0.5 * (- Math.pow(2, - 10 * (k - 1)) + 2);
}
},
Circular: {
In: function (k) {
return 1 - Math.sqrt(1 - k * k);
},
Out: function (k) {
return Math.sqrt(1 - (--k * k));
},
InOut: function (k) {
if ((k *= 2) < 1) {
return - 0.5 * (Math.sqrt(1 - k * k) - 1);
}
return 0.5 * (Math.sqrt(1 - (k -= 2) * k) + 1);
}
},
Elastic: {
In: function (k) {
var s;
var a = 0.1;
var p = 0.4;
if (k === 0) {
return 0;
}
if (k === 1) {
return 1;
}
if (!a || a < 1) {
a = 1;
s = p / 4;
} else {
s = p * Math.asin(1 / a) / (2 * Math.PI);
}
return - (a * Math.pow(2, 10 * (k -= 1)) * Math.sin((k - s) * (2 * Math.PI) / p));
},
Out: function (k) {
var s;
var a = 0.1;
var p = 0.4;
if (k === 0) {
return 0;
}
if (k === 1) {
return 1;
}
if (!a || a < 1) {
a = 1;
s = p / 4;
} else {
s = p * Math.asin(1 / a) / (2 * Math.PI);
}
return (a * Math.pow(2, - 10 * k) * Math.sin((k - s) * (2 * Math.PI) / p) + 1);
},
InOut: function (k) {
var s;
var a = 0.1;
var p = 0.4;
if (k === 0) {
return 0;
}
if (k === 1) {
return 1;
}
if (!a || a < 1) {
a = 1;
s = p / 4;
} else {
s = p * Math.asin(1 / a) / (2 * Math.PI);
}
if ((k *= 2) < 1) {
return - 0.5 * (a * Math.pow(2, 10 * (k -= 1)) * Math.sin((k - s) * (2 * Math.PI) / p));
}
return a * Math.pow(2, -10 * (k -= 1)) * Math.sin((k - s) * (2 * Math.PI) / p) * 0.5 + 1;
}
},
Back: {
In: function (k) {
var s = 1.70158;
return k * k * ((s + 1) * k - s);
},
Out: function (k) {
var s = 1.70158;
return --k * k * ((s + 1) * k + s) + 1;
},
InOut: function (k) {
var s = 1.70158 * 1.525;
if ((k *= 2) < 1) {
return 0.5 * (k * k * ((s + 1) * k - s));
}
return 0.5 * ((k -= 2) * k * ((s + 1) * k + s) + 2);
}
},
Bounce: {
In: function (k) {
return 1 - TWEEN.Easing.Bounce.Out(1 - k);
},
Out: function (k) {
if (k < (1 / 2.75)) {
return 7.5625 * k * k;
} else if (k < (2 / 2.75)) {
return 7.5625 * (k -= (1.5 / 2.75)) * k + 0.75;
} else if (k < (2.5 / 2.75)) {
return 7.5625 * (k -= (2.25 / 2.75)) * k + 0.9375;
} else {
return 7.5625 * (k -= (2.625 / 2.75)) * k + 0.984375;
}
},
InOut: function (k) {
if (k < 0.5) {
return TWEEN.Easing.Bounce.In(k * 2) * 0.5;
}
return TWEEN.Easing.Bounce.Out(k * 2 - 1) * 0.5 + 0.5;
}
}
};
TWEEN.Interpolation = {
Linear: function (v, k) {
var m = v.length - 1;
var f = m * k;
var i = Math.floor(f);
var fn = TWEEN.Interpolation.Utils.Linear;
if (k < 0) {
return fn(v[0], v[1], f);
}
if (k > 1) {
return fn(v[m], v[m - 1], m - f);
}
return fn(v[i], v[i + 1 > m ? m : i + 1], f - i);
},
Bezier: function (v, k) {
var b = 0;
var n = v.length - 1;
var pw = Math.pow;
var bn = TWEEN.Interpolation.Utils.Bernstein;
for (var i = 0; i <= n; i++) {
b += pw(1 - k, n - i) * pw(k, i) * v[i] * bn(n, i);
}
return b;
},
CatmullRom: function (v, k) {
var m = v.length - 1;
var f = m * k;
var i = Math.floor(f);
var fn = TWEEN.Interpolation.Utils.CatmullRom;
if (v[0] === v[m]) {
if (k < 0) {
i = Math.floor(f = m * (1 + k));
}
return fn(v[(i - 1 + m) % m], v[i], v[(i + 1) % m], v[(i + 2) % m], f - i);
} else {
if (k < 0) {
return v[0] - (fn(v[0], v[0], v[1], v[1], -f) - v[0]);
}
if (k > 1) {
return v[m] - (fn(v[m], v[m], v[m - 1], v[m - 1], f - m) - v[m]);
}
return fn(v[i ? i - 1 : 0], v[i], v[m < i + 1 ? m : i + 1], v[m < i + 2 ? m : i + 2], f - i);
}
},
Utils: {
Linear: function (p0, p1, t) {
return (p1 - p0) * t + p0;
},
Bernstein: function (n, i) {
var fc = TWEEN.Interpolation.Utils.Factorial;
return fc(n) / fc(i) / fc(n - i);
},
Factorial: (function () {
var a = [1];
return function (n) {
var s = 1;
if (a[n]) {
return a[n];
}
for (var i = n; i > 1; i--) {
s *= i;
}
a[n] = s;
return s;
};
})(),
CatmullRom: function (p0, p1, p2, p3, t) {
var v0 = (p2 - p0) * 0.5;
var v1 = (p3 - p1) * 0.5;
var t2 = t * t;
var t3 = t * t2;
return (2 * p1 - 2 * p2 + v0 + v1) * t3 + (- 3 * p1 + 3 * p2 - 2 * v0 - v1) * t2 + v0 * t + p1;
}
}
};
// UMD (Universal Module Definition)
(function (root) {
if (typeof define === 'function' && define.amd) {
// AMD
define([], function () {
return TWEEN;
});
} else if (typeof exports === 'object') {
// Node.js
module.exports = TWEEN;
} else {
// Global variable
root.TWEEN = TWEEN;
}
})(this);
loadTween = function(){
return TWEEN
}

View file

@ -0,0 +1 @@
web: node index.js

View file

@ -0,0 +1,57 @@
//
// index.js
// examples
//
// Created by Eric Levin on 11/10/2015.
// Copyright 2013 High Fidelity, Inc.
//
// This is a simple REST API that allows an interface client script to get a list of files paths from an S3 bucket.
// To change your bucket, modify line 34 to your desired bucket.
// Please refer to http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html
// for instructions on how to configure the SDK to work with your bucket.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
var express = require('express');
var app = express();
var AWS = require('aws-sdk');
var url = require('url');
var querystring = require('querystring');
var _ = require('underscore');
AWS.config.update({
region: "us-east-1"
});
var s3 = new AWS.S3();
app.set('port', (process.env.PORT || 5000));
app.get('/', function(req, res) {
var urlParts = url.parse(req.url)
var query = querystring.parse(urlParts.query);
var params = {
Bucket: "hifi-public",
Marker: query.assetDir,
MaxKeys: 10
};
s3.listObjects(params, function(err, data) {
if (err) {
console.log(err, err.stack);
res.send("ERROR")
} else {
var keys = _.pluck(data.Contents, 'Key')
res.send({
urls: keys
});
}
});
});
app.listen(app.get('port'), function() {
console.log('Node app is running on port', app.get('port'));
})

View file

@ -0,0 +1,18 @@
{
"name": "s3fileserver",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "eric",
"license": "ISC",
"dependencies": {
"aws-sdk": "^2.2.15",
"express": "^4.13.3",
"querystring": "^0.2.0",
"underscore": "^1.8.3",
"url": "^0.11.0"
}
}

View file

@ -0,0 +1,95 @@
//
// dynamicLoader.js
// examples
//
// Created by Eric Levin on 11/10/2015.
// Copyright 2013 High Fidelity, Inc.
//
// This script is an example of a way to dynamically load and place models in a grid from a specified s3 directory on the hifi-public bucket.
// The directory can be specified by changing the query string variable on line 19 to the desired relative path.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
var BASE_URL = "https://hifi-public.s3.amazonaws.com/";
var models = [];
var floorPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(Camera.getOrientation())));;
floorPosition.y = MyAvatar.position.y - 5;
var floor = Entities.addEntity({
type: "Model",
modelURL: "https://hifi-public.s3.amazonaws.com/ozan/3d_marketplace/props/floor/3d_mp_floor.fbx",
position: floorPosition,
shapeType: 'box',
dimensions: {
x: 1000,
y: 9,
z: 1000
}
});
var urls = [];
var req = new XMLHttpRequest();
req.open("GET", "https://serene-headland-4300.herokuapp.com/?assetDir=ozan/3d_marketplace/sets", false);
req.send();
var res = req.responseText;
var urls = JSON.parse(res).urls;
if (urls.length > 0) {
// We've got an array of urls back from server- let's display them in grid
createGrid();
}
function createGrid() {
var fbxUrls = urls.filter(function(url) {
return url.indexOf('fbx') !== -1;
});
var modelParams = {
type: "Model",
dimensions: {
x: 10,
y: 10,
z: 10
},
};
var modelPosition = {
x: floorPosition.x + 10,
y: floorPosition.y + 8.5,
z: floorPosition.z
};
for (var i = 0; i < fbxUrls.length; i++) {
if(i % 2 === 0) {
modelPosition.x = floorPosition.x - 40
} else {
modelPosition.x = floorPosition.x + 40
}
modelPosition.z -= 30;
modelParams.position = modelPosition;
modelParams.modelURL = BASE_URL + fbxUrls[i]
var model = Entities.addEntity(modelParams);
models.push(model);
}
Script.setTimeout(function() {
//Until we add callbacks on model loaded, we need to set a timeout and hope model is loaded by the time
//we hit it in order to set model dimensions correctly
for(var i = 0; i < models.length; i++){
var modelDimensions = Entities.getEntityProperties(models[i], 'naturalDimensions').naturalDimensions;
Entities.editEntity(models[i], {dimensions: modelDimensions});
}
}, 10000);
}
function cleanup() {
Entities.deleteEntity(floor);
models.forEach(function(model) {
Entities.deleteEntity(model);
});
Entities.deleteEntity(model);
}
Script.scriptEnding.connect(cleanup);

View file

@ -1,217 +0,0 @@
//
// originalPositionResetter.js
// toybox
//
// Created by James B. Pollack @imgntn 10/16/2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var TARGET_MODEL_URL = HIFI_PUBLIC_BUCKET + "models/ping_pong_gun/target.fbx";
var TARGET_COLLISION_HULL_URL = HIFI_PUBLIC_BUCKET + "models/ping_pong_gun/target_collision_hull.obj";
var TARGET_DIMENSIONS = {
x: 0.06,
y: 0.42,
z: 0.42
};
var TARGET_ROTATION = Quat.fromPitchYawRollDegrees(0, -55.25, 0);
var targetsScriptURL = Script.resolvePath('../ping_pong_gun/wallTarget.js');
var basketballURL = HIFI_PUBLIC_BUCKET + "models/content/basketball2.fbx";
var NUMBER_OF_BALLS = 4;
var BALL_DIAMETER = 0.30;
var RESET_DISTANCE = 1;
var MINIMUM_MOVE_LENGTH = 0.05;
var totalTime = 0;
var lastUpdate = 0;
var UPDATE_INTERVAL = 1 / 5; // 5fps
var Resetter = {
searchForEntitiesToResetToOriginalPosition: function(searchOrigin, objectName) {
var ids = Entities.findEntities(searchOrigin, 5);
var objects = [];
var i;
var entityID;
var name;
for (i = 0; i < ids.length; i++) {
entityID = ids[i];
name = Entities.getEntityProperties(entityID, "name").name;
if (name === objectName) {
//we found an object to reset
objects.push(entityID);
}
}
return objects;
},
deleteObjects: function(objects) {
while (objects.length > 0) {
Entities.deleteEntity(objects.pop());
}
},
createBasketBalls: function() {
var position = {
x: 542.86,
y: 494.84,
z: 475.06
};
var i;
var ballPosition;
var collidingBall;
for (i = 0; i < NUMBER_OF_BALLS; i++) {
ballPosition = {
x: position.x,
y: position.y + BALL_DIAMETER * 2,
z: position.z + (BALL_DIAMETER) - (BALL_DIAMETER * i)
};
collidingBall = Entities.addEntity({
type: "Model",
name: 'Hifi-Basketball',
shapeType: 'Sphere',
position: ballPosition,
dimensions: {
x: BALL_DIAMETER,
y: BALL_DIAMETER,
z: BALL_DIAMETER
},
restitution: 1.0,
linearDamping: 0.00001,
gravity: {
x: 0,
y: -9.8,
z: 0
},
collisionsWillMove: true,
ignoreForCollisions: false,
modelURL: basketballURL,
userData: JSON.stringify({
originalPositionKey: {
originalPosition: ballPosition
},
resetMe: {
resetMe: true
},
grabbable: {
invertSolidWhileHeld: true
}
})
});
}
},
testBallDistanceFromStart: function(balls) {
var resetCount = 0;
balls.forEach(function(ball, index) {
var properties = Entities.getEntityProperties(ball, ["position", "userData"]);
var currentPosition = properties.position;
var originalPosition = properties.userData.originalPositionKey.originalPosition;
var distance = Vec3.subtract(originalPosition, currentPosition);
var length = Vec3.length(distance);
if (length > RESET_DISTANCE) {
Script.setTimeout(function() {
var newPosition = Entities.getEntityProperties(ball, "position").position;
var moving = Vec3.length(Vec3.subtract(currentPosition, newPosition));
if (moving < MINIMUM_MOVE_LENGTH) {
if (resetCount === balls.length) {
this.deleteObjects(balls);
this.createBasketBalls();
}
}
}, 200);
}
});
},
testTargetDistanceFromStart: function(targets) {
targets.forEach(function(target, index) {
var properties = Entities.getEntityProperties(target, ["position", "userData"]);
var currentPosition = properties.position;
var originalPosition = properties.userData.originalPositionKey.originalPosition;
var distance = Vec3.subtract(originalPosition, currentPosition);
var length = Vec3.length(distance);
if (length > RESET_DISTANCE) {
Script.setTimeout(function() {
var newPosition = Entities.getEntityProperties(target, "position").position;
var moving = Vec3.length(Vec3.subtract(currentPosition, newPosition));
if (moving < MINIMUM_MOVE_LENGTH) {
Entities.deleteEntity(target);
var targetProperties = {
name: 'Hifi-Target',
type: 'Model',
modelURL: TARGET_MODEL_URL,
shapeType: 'compound',
collisionsWillMove: true,
dimensions: TARGET_DIMENSIONS,
compoundShapeURL: TARGET_COLLISION_HULL_URL,
position: originalPosition,
rotation: TARGET_ROTATION,
script: targetsScriptURL,
userData: JSON.stringify({
resetMe: {
resetMe: true
},
grabbableKey: {
grabbable: false
},
originalPositionKey: originalPosition
})
};
Entities.addEntity(targetProperties);
}
}, 200);
}
});
}
};
function update(deltaTime) {
if (!Entities.serversExist() || !Entities.canRez()) {
return;
}
totalTime += deltaTime;
// We don't want to edit the entity EVERY update cycle, because that's just a lot
// of wasted bandwidth and extra effort on the server for very little visual gain
if (totalTime - lastUpdate > UPDATE_INTERVAL) {
//do stuff
var balls = Resetter.searchForEntitiesToResetToOriginalPosition({
x: 542.86,
y: 494.84,
z: 475.06
}, "Hifi-Basketball");
var targets = Resetter.searchForEntitiesToResetToOriginalPosition({
x: 548.68,
y: 497.30,
z: 509.74
}, "Hifi-Target");
if (balls.length !== 0) {
Resetter.testBallDistanceFromStart(balls);
}
if (targets.length !== 0) {
Resetter.testTargetDistanceFromStart(targets);
}
lastUpdate = totalTime;
}
}
Script.update.connect(update);

View file

@ -35,7 +35,10 @@
#include <TextRenderer3D.h>
#include <UserActivityLogger.h>
#include <AnimDebugDraw.h>
#include <recording/Deck.h>
#include <recording/Recorder.h>
#include <recording/Clip.h>
#include <recording/Frame.h>
#include "devices/Faceshift.h"
@ -77,6 +80,10 @@ const QString& DEFAULT_AVATAR_COLLISION_SOUND_URL = "https://hifi-public.s3.amaz
const float MyAvatar::ZOOM_MIN = 0.5f;
const float MyAvatar::ZOOM_MAX = 25.0f;
const float MyAvatar::ZOOM_DEFAULT = 1.5f;
static const QString HEADER_NAME = "com.highfidelity.recording.AvatarData";
static recording::FrameType AVATAR_FRAME_TYPE = recording::Frame::TYPE_INVALID;
static std::once_flag frameTypeRegistration;
MyAvatar::MyAvatar(RigPointer rig) :
Avatar(rig),
@ -112,6 +119,19 @@ MyAvatar::MyAvatar(RigPointer rig) :
_audioListenerMode(FROM_HEAD),
_hmdAtRestDetector(glm::vec3(0), glm::quat())
{
using namespace recording;
std::call_once(frameTypeRegistration, [] {
AVATAR_FRAME_TYPE = recording::Frame::registerFrameType(HEADER_NAME);
});
// FIXME how to deal with driving multiple avatars locally?
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this](Frame::Pointer frame) {
qDebug() << "Playback of avatar frame length: " << frame->data.size();
avatarStateFromFrame(frame->data, this);
});
for (int i = 0; i < MAX_DRIVE_KEYS; i++) {
_driveKeys[i] = 0.0f;
}
@ -235,14 +255,12 @@ void MyAvatar::update(float deltaTime) {
simulate(deltaTime);
}
extern QByteArray avatarStateToFrame(const AvatarData* _avatar);
extern void avatarStateFromFrame(const QByteArray& frameData, AvatarData* _avatar);
void MyAvatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("simulate");
// Play back recording
if (_player && _player->isPlaying()) {
_player->play();
}
if (_scale != _targetScale) {
float scale = (1.0f - SMOOTHING_RATIO) * _scale + SMOOTHING_RATIO * _targetScale;
setScale(scale);
@ -310,7 +328,7 @@ void MyAvatar::simulate(float deltaTime) {
// Record avatars movements.
if (_recorder && _recorder->isRecording()) {
_recorder->record();
_recorder->recordFrame(AVATAR_FRAME_TYPE, avatarStateToFrame(this));
}
// consider updating our billboard
@ -580,33 +598,35 @@ bool MyAvatar::isRecording() {
return _recorder && _recorder->isRecording();
}
qint64 MyAvatar::recorderElapsed() {
float MyAvatar::recorderElapsed() {
if (QThread::currentThread() != thread()) {
float result;
QMetaObject::invokeMethod(this, "recorderElapsed", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(float, result));
return result;
}
if (!_recorder) {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
QMetaObject::invokeMethod(this, "recorderElapsed", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
return result;
}
return _recorder->elapsed();
return (float)_recorder->position() / MSECS_PER_SECOND;
}
QMetaObject::Connection _audioClientRecorderConnection;
void MyAvatar::startRecording() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "startRecording", Qt::BlockingQueuedConnection);
return;
}
if (!_recorder) {
_recorder = QSharedPointer<Recorder>::create(this);
}
_recorder = std::make_shared<recording::Recorder>();
// connect to AudioClient's signal so we get input audio
auto audioClient = DependencyManager::get<AudioClient>();
connect(audioClient.data(), &AudioClient::inputReceived, _recorder.data(),
&Recorder::recordAudio, Qt::QueuedConnection);
_recorder->startRecording();
_audioClientRecorderConnection = connect(audioClient.data(), &AudioClient::inputReceived, [] {
// FIXME, missing audio data handling
});
setRecordingBasis();
_recorder->start();
}
void MyAvatar::stopRecording() {
@ -618,15 +638,14 @@ void MyAvatar::stopRecording() {
return;
}
if (_recorder) {
// stop grabbing audio from the AudioClient
auto audioClient = DependencyManager::get<AudioClient>();
disconnect(audioClient.data(), 0, _recorder.data(), 0);
_recorder->stopRecording();
QObject::disconnect(_audioClientRecorderConnection);
_audioClientRecorderConnection = QMetaObject::Connection();
_recorder->stop();
clearRecordingBasis();
}
}
void MyAvatar::saveRecording(QString filename) {
void MyAvatar::saveRecording(const QString& filename) {
if (!_recorder) {
qCDebug(interfaceapp) << "There is no recording to save";
return;
@ -636,8 +655,10 @@ void MyAvatar::saveRecording(QString filename) {
Q_ARG(QString, filename));
return;
}
if (_recorder) {
_recorder->saveToFile(filename);
auto clip = _recorder->getClip();
recording::Clip::toFile(filename, clip);
}
}
@ -646,15 +667,18 @@ void MyAvatar::loadLastRecording() {
QMetaObject::invokeMethod(this, "loadLastRecording", Qt::BlockingQueuedConnection);
return;
}
if (!_recorder) {
if (!_recorder || !_recorder->getClip()) {
qCDebug(interfaceapp) << "There is no recording to load";
return;
}
if (!_player) {
_player = QSharedPointer<Player>::create(this);
_player = std::make_shared<recording::Deck>();
}
_player->loadRecording(_recorder->getRecording());
_player->queueClip(_recorder->getClip());
_player->play();
}
void MyAvatar::startAnimation(const QString& url, float fps, float priority,

View file

@ -257,10 +257,10 @@ public slots:
bool setJointReferential(const QUuid& id, int jointIndex);
bool isRecording();
qint64 recorderElapsed();
float recorderElapsed();
void startRecording();
void stopRecording();
void saveRecording(QString filename);
void saveRecording(const QString& filename);
void loadLastRecording();
virtual void rebuildSkeletonBody() override;
@ -312,8 +312,8 @@ private:
const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(), float scale = 1.0f,
bool allowDuplicates = false, bool useSaved = true) override;
const RecorderPointer getRecorder() const { return _recorder; }
const PlayerPointer getPlayer() const { return _player; }
const recording::RecorderPointer getRecorder() const { return _recorder; }
const recording::DeckPointer getPlayer() const { return _player; }
//void beginFollowingHMD();
//bool shouldFollowHMD() const;
@ -361,7 +361,7 @@ private:
eyeContactTarget _eyeContactTarget;
RecorderPointer _recorder;
recording::RecorderPointer _recorder;
glm::vec3 _trackedHeadPosition;

View file

@ -1,3 +1,3 @@
set(TARGET_NAME avatars)
setup_hifi_library(Network Script)
link_hifi_libraries(audio shared networking)
link_hifi_libraries(audio shared networking recording)

View file

@ -16,6 +16,9 @@
#include <QtCore/QDataStream>
#include <QtCore/QThread>
#include <QtCore/QUuid>
#include <QtCore/QJsonDocument>
#include <QtCore/QJsonArray>
#include <QtCore/QJsonObject>
#include <QtNetwork/QNetworkReply>
#include <QtNetwork/QNetworkRequest>
@ -25,6 +28,10 @@
#include <GLMHelpers.h>
#include <StreamUtils.h>
#include <UUID.h>
#include <shared/JSONHelpers.h>
#include <shared/UniformTransform.h>
#include <recording/Deck.h>
#include <recording/Clip.h>
#include "AvatarLogging.h"
#include "AvatarData.h"
@ -62,7 +69,6 @@ AvatarData::AvatarData() :
_targetVelocity(0.0f),
_localAABox(DEFAULT_LOCAL_AABOX_CORNER, DEFAULT_LOCAL_AABOX_SCALE)
{
}
AvatarData::~AvatarData() {
@ -791,7 +797,7 @@ bool AvatarData::isPaused() {
return _player && _player->isPaused();
}
qint64 AvatarData::playerElapsed() {
float AvatarData::playerElapsed() {
if (!_player) {
return 0;
}
@ -801,10 +807,10 @@ qint64 AvatarData::playerElapsed() {
Q_RETURN_ARG(qint64, result));
return result;
}
return _player->elapsed();
return (float)_player->position() / MSECS_PER_SECOND;
}
qint64 AvatarData::playerLength() {
float AvatarData::playerLength() {
if (!_player) {
return 0;
}
@ -814,28 +820,24 @@ qint64 AvatarData::playerLength() {
Q_RETURN_ARG(qint64, result));
return result;
}
return _player->getRecording()->getLength();
return _player->length() / MSECS_PER_SECOND;
}
int AvatarData::playerCurrentFrame() {
return (_player) ? _player->getCurrentFrame() : 0;
}
int AvatarData::playerFrameNumber() {
return (_player && _player->getRecording()) ? _player->getRecording()->getFrameNumber() : 0;
}
void AvatarData::loadRecording(QString filename) {
void AvatarData::loadRecording(const QString& filename) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection,
Q_ARG(QString, filename));
return;
}
if (!_player) {
_player = QSharedPointer<Player>::create(this);
using namespace recording;
ClipPointer clip = Clip::fromFile(filename);
if (!clip) {
qWarning() << "Unable to load clip data from " << filename;
}
_player->loadFromFile(filename);
_player = std::make_shared<Deck>();
_player->queueClip(clip);
}
void AvatarData::startPlaying() {
@ -843,70 +845,56 @@ void AvatarData::startPlaying() {
QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection);
return;
}
if (!_player) {
_player = QSharedPointer<Player>::create(this);
qWarning() << "No clip loaded for playback";
return;
}
_player->startPlaying();
setRecordingBasis();
_player->play();
}
void AvatarData::setPlayerVolume(float volume) {
if (_player) {
_player->setVolume(volume);
}
// FIXME
}
void AvatarData::setPlayerAudioOffset(int audioOffset) {
if (_player) {
_player->setAudioOffset(audioOffset);
}
void AvatarData::setPlayerAudioOffset(float audioOffset) {
// FIXME
}
void AvatarData::setPlayerFrame(unsigned int frame) {
if (_player) {
_player->setCurrentFrame(frame);
}
}
void AvatarData::setPlayerTime(float time) {
if (!_player) {
qWarning() << "No player active";
return;
}
void AvatarData::setPlayerTime(unsigned int time) {
if (_player) {
_player->setCurrentTime(time);
}
_player->seek(time * MSECS_PER_SECOND);
}
void AvatarData::setPlayFromCurrentLocation(bool playFromCurrentLocation) {
if (_player) {
_player->setPlayFromCurrentLocation(playFromCurrentLocation);
}
// FIXME
}
void AvatarData::setPlayerLoop(bool loop) {
if (_player) {
_player->setLoop(loop);
_player->loop(loop);
}
}
void AvatarData::setPlayerUseDisplayName(bool useDisplayName) {
if(_player) {
_player->useDisplayName(useDisplayName);
}
// FIXME
}
void AvatarData::setPlayerUseAttachments(bool useAttachments) {
if(_player) {
_player->useAttachements(useAttachments);
}
// FIXME
}
void AvatarData::setPlayerUseHeadModel(bool useHeadModel) {
if(_player) {
_player->useHeadModel(useHeadModel);
}
// FIXME
}
void AvatarData::setPlayerUseSkeletonModel(bool useSkeletonModel) {
if(_player) {
_player->useSkeletonModel(useSkeletonModel);
}
// FIXME
}
void AvatarData::play() {
@ -920,6 +908,10 @@ void AvatarData::play() {
}
}
std::shared_ptr<UniformTransform> AvatarData::getRecordingBasis() const {
return _recordingBasis;
}
void AvatarData::pausePlayer() {
if (!_player) {
return;
@ -929,7 +921,7 @@ void AvatarData::pausePlayer() {
return;
}
if (_player) {
_player->pausePlayer();
_player->pause();
}
}
@ -942,7 +934,7 @@ void AvatarData::stopPlaying() {
return;
}
if (_player) {
_player->stopPlaying();
_player->stop();
}
}
@ -1514,3 +1506,177 @@ void registerAvatarTypes(QScriptEngine* engine) {
new AttachmentDataObject(), QScriptEngine::ScriptOwnership));
}
void AvatarData::setRecordingBasis(std::shared_ptr<UniformTransform> recordingBasis) {
if (!recordingBasis) {
recordingBasis = std::make_shared<UniformTransform>();
recordingBasis->rotation = getOrientation();
recordingBasis->translation = getPosition();
recordingBasis->scale = getTargetScale();
}
_recordingBasis = recordingBasis;
}
void AvatarData::clearRecordingBasis() {
_recordingBasis.reset();
}
static const QString JSON_AVATAR_BASIS = QStringLiteral("basisTransform");
static const QString JSON_AVATAR_RELATIVE = QStringLiteral("relativeTransform");
static const QString JSON_AVATAR_JOINT_ROTATIONS = QStringLiteral("jointRotations");
static const QString JSON_AVATAR_HEAD = QStringLiteral("head");
static const QString JSON_AVATAR_HEAD_ROTATION = QStringLiteral("rotation");
static const QString JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS = QStringLiteral("blendShapes");
static const QString JSON_AVATAR_HEAD_LEAN_FORWARD = QStringLiteral("leanForward");
static const QString JSON_AVATAR_HEAD_LEAN_SIDEWAYS = QStringLiteral("leanSideways");
static const QString JSON_AVATAR_HEAD_LOOKAT = QStringLiteral("lookAt");
static const QString JSON_AVATAR_HEAD_MODEL = QStringLiteral("headModel");
static const QString JSON_AVATAR_BODY_MODEL = QStringLiteral("bodyModel");
static const QString JSON_AVATAR_DISPLAY_NAME = QStringLiteral("displayName");
static const QString JSON_AVATAR_ATTACHEMENTS = QStringLiteral("attachments");
// Every frame will store both a basis for the recording and a relative transform
// This allows the application to decide whether playback should be relative to an avatar's
// transform at the start of playback, or relative to the transform of the recorded
// avatar
QByteArray avatarStateToFrame(const AvatarData* _avatar) {
QJsonObject root;
if (!_avatar->getFaceModelURL().isEmpty()) {
root[JSON_AVATAR_HEAD_MODEL] = _avatar->getFaceModelURL().toString();
}
if (!_avatar->getSkeletonModelURL().isEmpty()) {
root[JSON_AVATAR_BODY_MODEL] = _avatar->getSkeletonModelURL().toString();
}
if (!_avatar->getDisplayName().isEmpty()) {
root[JSON_AVATAR_DISPLAY_NAME] = _avatar->getDisplayName();
}
if (!_avatar->getAttachmentData().isEmpty()) {
// FIXME serialize attachment data
}
auto recordingBasis = _avatar->getRecordingBasis();
if (recordingBasis) {
// FIXME if the resulting relative basis is identity, we shouldn't record anything
// Record the transformation basis
root[JSON_AVATAR_BASIS] = recordingBasis->toJson();
// Record the relative transform
auto relativeTransform = recordingBasis->relativeTransform(
UniformTransform(_avatar->getPosition(), _avatar->getOrientation(), _avatar->getTargetScale()));
root[JSON_AVATAR_RELATIVE] = relativeTransform.toJson();
}
QJsonArray jointRotations;
for (const auto& jointRotation : _avatar->getJointRotations()) {
jointRotations.push_back(toJsonValue(jointRotation));
}
root[JSON_AVATAR_JOINT_ROTATIONS] = jointRotations;
const HeadData* head = _avatar->getHeadData();
if (head) {
QJsonObject headJson;
QJsonArray blendshapeCoefficients;
for (const auto& blendshapeCoefficient : head->getBlendshapeCoefficients()) {
blendshapeCoefficients.push_back(blendshapeCoefficient);
}
headJson[JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS] = blendshapeCoefficients;
headJson[JSON_AVATAR_HEAD_ROTATION] = toJsonValue(head->getRawOrientation());
headJson[JSON_AVATAR_HEAD_LEAN_FORWARD] = QJsonValue(head->getLeanForward());
headJson[JSON_AVATAR_HEAD_LEAN_SIDEWAYS] = QJsonValue(head->getLeanSideways());
vec3 relativeLookAt = glm::inverse(_avatar->getOrientation()) *
(head->getLookAtPosition() - _avatar->getPosition());
headJson[JSON_AVATAR_HEAD_LOOKAT] = toJsonValue(relativeLookAt);
root[JSON_AVATAR_HEAD] = headJson;
}
return QJsonDocument(root).toBinaryData();
}
void avatarStateFromFrame(const QByteArray& frameData, AvatarData* _avatar) {
QJsonDocument doc = QJsonDocument::fromBinaryData(frameData);
QJsonObject root = doc.object();
if (root.contains(JSON_AVATAR_HEAD_MODEL)) {
auto faceModelURL = root[JSON_AVATAR_HEAD_MODEL].toString();
if (faceModelURL != _avatar->getFaceModelURL().toString()) {
_avatar->setFaceModelURL(faceModelURL);
}
}
if (root.contains(JSON_AVATAR_BODY_MODEL)) {
auto bodyModelURL = root[JSON_AVATAR_BODY_MODEL].toString();
if (bodyModelURL != _avatar->getSkeletonModelURL().toString()) {
_avatar->setSkeletonModelURL(bodyModelURL);
}
}
if (root.contains(JSON_AVATAR_DISPLAY_NAME)) {
auto newDisplayName = root[JSON_AVATAR_DISPLAY_NAME].toString();
if (newDisplayName != _avatar->getDisplayName()) {
_avatar->setDisplayName(newDisplayName);
}
}
// During playback you can either have the recording basis set to the avatar current state
// meaning that all playback is relative to this avatars starting position, or
// the basis can be loaded from the recording, meaning the playback is relative to the
// original avatar location
// The first is more useful for playing back recordings on your own avatar, while
// the latter is more useful for playing back other avatars within your scene.
auto currentBasis = _avatar->getRecordingBasis();
if (!currentBasis) {
currentBasis = UniformTransform::parseJson(root[JSON_AVATAR_BASIS]);
}
auto relativeTransform = UniformTransform::parseJson(root[JSON_AVATAR_RELATIVE]);
auto worldTransform = currentBasis->worldTransform(*relativeTransform);
_avatar->setPosition(worldTransform.translation);
_avatar->setOrientation(worldTransform.rotation);
_avatar->setTargetScale(worldTransform.scale);
#if 0
if (root.contains(JSON_AVATAR_ATTACHEMENTS)) {
// FIXME de-serialize attachment data
}
// Joint rotations are relative to the avatar, so they require no basis correction
if (root.contains(JSON_AVATAR_JOINT_ROTATIONS)) {
QVector<quat> jointRotations;
QJsonArray jointRotationsJson = root[JSON_AVATAR_JOINT_ROTATIONS].toArray();
jointRotations.reserve(jointRotationsJson.size());
for (const auto& jointRotationJson : jointRotationsJson) {
jointRotations.push_back(quatFromJsonValue(jointRotationJson));
}
}
// Most head data is relative to the avatar, and needs no basis correction,
// but the lookat vector does need correction
HeadData* head = _avatar->_headData;
if (head && root.contains(JSON_AVATAR_HEAD)) {
QJsonObject headJson = root[JSON_AVATAR_HEAD].toObject();
if (headJson.contains(JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS)) {
QVector<float> blendshapeCoefficients;
QJsonArray blendshapeCoefficientsJson = headJson[JSON_AVATAR_HEAD_BLENDSHAPE_COEFFICIENTS].toArray();
for (const auto& blendshapeCoefficient : blendshapeCoefficientsJson) {
blendshapeCoefficients.push_back((float)blendshapeCoefficient.toDouble());
}
head->setBlendshapeCoefficients(blendshapeCoefficients);
}
if (headJson.contains(JSON_AVATAR_HEAD_ROTATION)) {
head->setOrientation(quatFromJsonValue(headJson[JSON_AVATAR_HEAD_ROTATION]));
}
if (headJson.contains(JSON_AVATAR_HEAD_LEAN_FORWARD)) {
head->setLeanForward((float)headJson[JSON_AVATAR_HEAD_LEAN_FORWARD].toDouble());
}
if (headJson.contains(JSON_AVATAR_HEAD_LEAN_SIDEWAYS)) {
head->setLeanSideways((float)headJson[JSON_AVATAR_HEAD_LEAN_SIDEWAYS].toDouble());
}
if (headJson.contains(JSON_AVATAR_HEAD_LOOKAT)) {
auto relativeLookAt = vec3FromJsonValue(headJson[JSON_AVATAR_HEAD_LOOKAT]);
if (glm::length2(relativeLookAt) > 0.01) {
head->setLookAtPosition((_avatar->getOrientation() * relativeLookAt) + _avatar->getPosition());
}
}
}
#endif
}

View file

@ -134,6 +134,7 @@ class QDataStream;
class AttachmentData;
class JointData;
struct UniformTransform;
class AvatarData : public QObject {
Q_OBJECT
@ -332,6 +333,11 @@ public:
bool shouldDie() const { return _owningAvatarMixer.isNull() || getUsecsSinceLastUpdate() > AVATAR_SILENCE_THRESHOLD_USECS; }
void clearRecordingBasis();
std::shared_ptr<UniformTransform> getRecordingBasis() const;
void setRecordingBasis(std::shared_ptr<UniformTransform> recordingBasis = std::shared_ptr<UniformTransform>());
public slots:
void sendAvatarDataPacket();
void sendIdentityPacket();
@ -344,17 +350,13 @@ public slots:
bool isPlaying();
bool isPaused();
qint64 playerElapsed();
qint64 playerLength();
int playerCurrentFrame();
int playerFrameNumber();
void loadRecording(QString filename);
float playerElapsed();
float playerLength();
void loadRecording(const QString& filename);
void startPlaying();
void setPlayerVolume(float volume);
void setPlayerAudioOffset(int audioOffset);
void setPlayerFrame(unsigned int frame);
void setPlayerTime(unsigned int time);
void setPlayerAudioOffset(float audioOffset);
void setPlayerTime(float time);
void setPlayFromCurrentLocation(bool playFromCurrentLocation);
void setPlayerLoop(bool loop);
void setPlayerUseDisplayName(bool useDisplayName);
@ -364,7 +366,7 @@ public slots:
void play();
void pausePlayer();
void stopPlaying();
protected:
QUuid _sessionUUID;
glm::vec3 _position = START_LOCATION;
@ -418,7 +420,7 @@ protected:
QWeakPointer<Node> _owningAvatarMixer;
PlayerPointer _player;
recording::DeckPointer _player;
/// Loads the joint indices, names from the FST file (if any)
virtual void updateJointMappings();
@ -432,8 +434,13 @@ protected:
SimpleMovingAverage _averageBytesReceived;
QMutex avatarLock; // Name is redundant, but it aids searches.
// During recording, this holds the starting position, orientation & scale of the recorded avatar
// During playback, it holds the
std::shared_ptr<UniformTransform> _recordingBasis;
private:
friend void avatarStateFromFrame(const QByteArray& frameData, AvatarData* _avatar);
static QUrl _defaultFullAvatarModelUrl;
// privatize the copy constructor and assignment operator so they cannot be called
AvatarData(const AvatarData&);

View file

@ -42,8 +42,20 @@ HeadData::HeadData(AvatarData* owningAvatar) :
}
glm::quat HeadData::getRawOrientation() const {
return glm::quat(glm::radians(glm::vec3(_basePitch, _baseYaw, _baseRoll)));
}
void HeadData::setRawOrientation(const glm::quat& q) {
auto euler = glm::eulerAngles(q);
_basePitch = euler.x;
_baseYaw = euler.y;
_baseRoll = euler.z;
}
glm::quat HeadData::getOrientation() const {
return _owningAvatar->getOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, _baseYaw, _baseRoll)));
return _owningAvatar->getOrientation() * getRawOrientation();
}
void HeadData::setOrientation(const glm::quat& orientation) {

View file

@ -48,6 +48,8 @@ public:
virtual float getFinalYaw() const { return _baseYaw; }
virtual float getFinalPitch() const { return _basePitch; }
virtual float getFinalRoll() const { return _baseRoll; }
virtual glm::quat getRawOrientation() const;
virtual void setRawOrientation(const glm::quat& orientation);
glm::quat getOrientation() const;
void setOrientation(const glm::quat& orientation);

View file

@ -9,6 +9,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#if 0
#include <AudioConstants.h>
#include <GLMHelpers.h>
#include <NodeList.h>
@ -438,3 +440,4 @@ bool Player::computeCurrentFrame() {
return true;
}
#endif

View file

@ -12,6 +12,9 @@
#ifndef hifi_Player_h
#define hifi_Player_h
#include <recording/Forward.h>
#if 0
#include <AudioInjector.h>
#include <QElapsedTimer>
@ -86,5 +89,6 @@ private:
bool _useHeadURL;
bool _useSkeletonURL;
};
#endif
#endif // hifi_Player_h

View file

@ -10,6 +10,7 @@
//
#if 0
#include <GLMHelpers.h>
#include <NodeList.h>
#include <StreamUtils.h>
@ -143,3 +144,4 @@ void Recorder::record() {
void Recorder::recordAudio(const QByteArray& audioByteArray) {
_recording->addAudioPacket(audioByteArray);
}
#endif

View file

@ -12,6 +12,9 @@
#ifndef hifi_Recorder_h
#define hifi_Recorder_h
#include <recording/Forward.h>
#if 0
#include "Recording.h"
template<class C>
@ -49,6 +52,6 @@ private:
AvatarData* _avatar;
};
#endif
#endif // hifi_Recorder_h

View file

@ -9,6 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#if 0
#include <AudioConstants.h>
#include <GLMHelpers.h>
#include <NetworkAccessManager.h>
@ -659,3 +660,4 @@ RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString
return recording;
}
#endif

View file

@ -12,6 +12,8 @@
#ifndef hifi_Recording_h
#define hifi_Recording_h
#if 0
#include <QString>
#include <QVector>
@ -124,5 +126,6 @@ private:
void writeRecordingToFile(RecordingPointer recording, const QString& filename);
RecordingPointer readRecordingFromFile(RecordingPointer recording, const QString& filename);
RecordingPointer readRecordingFromRecFile(RecordingPointer recording, const QString& filename, const QByteArray& byteArray);
#endif
#endif // hifi_Recording_h

View file

@ -25,6 +25,7 @@
#include "RecurseOctreeToMapOperator.h"
#include "LogHandler.h"
static const quint64 DELETED_ENTITIES_EXTRA_USECS_TO_CONSIDER = USECS_PER_MSEC * 50;
EntityTree::EntityTree(bool shouldReaverage) :
Octree(shouldReaverage),
@ -384,16 +385,15 @@ void EntityTree::deleteEntities(QSet<EntityItemID> entityIDs, bool force, bool i
}
void EntityTree::processRemovedEntities(const DeleteEntityOperator& theOperator) {
quint64 deletedAt = usecTimestampNow();
const RemovedEntities& entities = theOperator.getEntities();
foreach(const EntityToDeleteDetails& details, entities) {
EntityItemPointer theEntity = details.entity;
if (getIsServer()) {
// set up the deleted entities ID
quint64 deletedAt = usecTimestampNow();
_recentlyDeletedEntitiesLock.lockForWrite();
QWriteLocker locker(&_recentlyDeletedEntitiesLock);
_recentlyDeletedEntityItemIDs.insert(deletedAt, theEntity->getEntityItemID());
_recentlyDeletedEntitiesLock.unlock();
}
if (_simulation) {
@ -802,25 +802,37 @@ void EntityTree::update() {
}
bool EntityTree::hasEntitiesDeletedSince(quint64 sinceTime) {
quint64 considerEntitiesSince = sinceTime - DELETED_ENTITIES_EXTRA_USECS_TO_CONSIDER;
// we can probably leverage the ordered nature of QMultiMap to do this quickly...
bool hasSomethingNewer = false;
_recentlyDeletedEntitiesLock.lockForRead();
QReadLocker locker(&_recentlyDeletedEntitiesLock);
QMultiMap<quint64, QUuid>::const_iterator iterator = _recentlyDeletedEntityItemIDs.constBegin();
while (iterator != _recentlyDeletedEntityItemIDs.constEnd()) {
if (iterator.key() > sinceTime) {
if (iterator.key() > considerEntitiesSince) {
hasSomethingNewer = true;
break; // if we have at least one item, we don't need to keep searching
}
++iterator;
}
_recentlyDeletedEntitiesLock.unlock();
#ifdef EXTRA_ERASE_DEBUGGING
if (hasSomethingNewer) {
int elapsed = usecTimestampNow() - considerEntitiesSince;
int difference = considerEntitiesSince - sinceTime;
qDebug() << "EntityTree::hasEntitiesDeletedSince() sinceTime:" << sinceTime
<< "considerEntitiesSince:" << considerEntitiesSince << "elapsed:" << elapsed << "difference:" << difference;
}
#endif
return hasSomethingNewer;
}
// sinceTime is an in/out parameter - it will be side effected with the last time sent out
std::unique_ptr<NLPacket> EntityTree::encodeEntitiesDeletedSince(OCTREE_PACKET_SEQUENCE sequenceNumber, quint64& sinceTime,
bool& hasMore) {
quint64 considerEntitiesSince = sinceTime - DELETED_ENTITIES_EXTRA_USECS_TO_CONSIDER;
auto deletesPacket = NLPacket::create(PacketType::EntityErase);
// pack in flags
@ -841,48 +853,56 @@ std::unique_ptr<NLPacket> EntityTree::encodeEntitiesDeletedSince(OCTREE_PACKET_S
// we keep a multi map of entity IDs to timestamps, we only want to include the entity IDs that have been
// deleted since we last sent to this node
_recentlyDeletedEntitiesLock.lockForRead();
{
QReadLocker locker(&_recentlyDeletedEntitiesLock);
bool hasFilledPacket = false;
bool hasFilledPacket = false;
auto it = _recentlyDeletedEntityItemIDs.constBegin();
while (it != _recentlyDeletedEntityItemIDs.constEnd()) {
QList<QUuid> values = _recentlyDeletedEntityItemIDs.values(it.key());
for (int valueItem = 0; valueItem < values.size(); ++valueItem) {
auto it = _recentlyDeletedEntityItemIDs.constBegin();
while (it != _recentlyDeletedEntityItemIDs.constEnd()) {
QList<QUuid> values = _recentlyDeletedEntityItemIDs.values(it.key());
for (int valueItem = 0; valueItem < values.size(); ++valueItem) {
// if the timestamp is more recent then out last sent time, include it
if (it.key() > sinceTime) {
QUuid entityID = values.at(valueItem);
deletesPacket->write(entityID.toRfc4122());
// if the timestamp is more recent then out last sent time, include it
if (it.key() > considerEntitiesSince) {
QUuid entityID = values.at(valueItem);
++numberOfIDs;
// FIXME - we still seem to see cases where incorrect EntityIDs get sent from the server
// to the client. These were causing "lost" entities like flashlights and laser pointers
// now that we keep around some additional history of the erased entities and resend that
// history for a longer time window, these entities are not "lost". But we haven't yet
// found/fixed the underlying issue that caused bad UUIDs to be sent to some users.
deletesPacket->write(entityID.toRfc4122());
++numberOfIDs;
// check to make sure we have room for one more ID
if (NUM_BYTES_RFC4122_UUID > deletesPacket->bytesAvailableForWrite()) {
hasFilledPacket = true;
break;
#ifdef EXTRA_ERASE_DEBUGGING
qDebug() << "EntityTree::encodeEntitiesDeletedSince() including:" << entityID;
#endif
// check to make sure we have room for one more ID
if (NUM_BYTES_RFC4122_UUID > deletesPacket->bytesAvailableForWrite()) {
hasFilledPacket = true;
break;
}
}
}
// check to see if we're about to return
if (hasFilledPacket) {
// let our caller know how far we got
sinceTime = it.key();
break;
}
++it;
}
// check to see if we're about to return
if (hasFilledPacket) {
// let our caller know how far we got
sinceTime = it.key();
break;
// if we got to the end, then we're done sending
if (it == _recentlyDeletedEntityItemIDs.constEnd()) {
hasMore = false;
}
++it;
}
// if we got to the end, then we're done sending
if (it == _recentlyDeletedEntityItemIDs.constEnd()) {
hasMore = false;
}
_recentlyDeletedEntitiesLock.unlock();
// replace the count for the number of included IDs
deletesPacket->seek(numberOfIDsPos);
deletesPacket->writePrimitive(numberOfIDs);
@ -893,14 +913,14 @@ std::unique_ptr<NLPacket> EntityTree::encodeEntitiesDeletedSince(OCTREE_PACKET_S
// called by the server when it knows all nodes have been sent deleted packets
void EntityTree::forgetEntitiesDeletedBefore(quint64 sinceTime) {
quint64 considerSinceTime = sinceTime - DELETED_ENTITIES_EXTRA_USECS_TO_CONSIDER;
QSet<quint64> keysToRemove;
_recentlyDeletedEntitiesLock.lockForWrite();
QWriteLocker locker(&_recentlyDeletedEntitiesLock);
QMultiMap<quint64, QUuid>::iterator iterator = _recentlyDeletedEntityItemIDs.begin();
// First find all the keys in the map that are older and need to be deleted
while (iterator != _recentlyDeletedEntityItemIDs.end()) {
if (iterator.key() <= sinceTime) {
if (iterator.key() <= considerSinceTime) {
keysToRemove << iterator.key();
}
++iterator;
@ -910,13 +930,14 @@ void EntityTree::forgetEntitiesDeletedBefore(quint64 sinceTime) {
foreach (quint64 value, keysToRemove) {
_recentlyDeletedEntityItemIDs.remove(value);
}
_recentlyDeletedEntitiesLock.unlock();
}
// TODO: consider consolidating processEraseMessageDetails() and processEraseMessage()
int EntityTree::processEraseMessage(NLPacket& packet, const SharedNodePointer& sourceNode) {
#ifdef EXTRA_ERASE_DEBUGGING
qDebug() << "EntityTree::processEraseMessage()";
#endif
withWriteLock([&] {
packet.seek(sizeof(OCTREE_PACKET_FLAGS) + sizeof(OCTREE_PACKET_SEQUENCE) + sizeof(OCTREE_PACKET_SENT_TIME));
@ -934,6 +955,9 @@ int EntityTree::processEraseMessage(NLPacket& packet, const SharedNodePointer& s
}
QUuid entityID = QUuid::fromRfc4122(packet.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
#ifdef EXTRA_ERASE_DEBUGGING
qDebug() << " ---- EntityTree::processEraseMessage() contained ID:" << entityID;
#endif
EntityItemID entityItemID(entityID);
entityItemIDsToDelete << entityItemID;
@ -953,6 +977,9 @@ int EntityTree::processEraseMessage(NLPacket& packet, const SharedNodePointer& s
// NOTE: Caller must lock the tree before calling this.
// TODO: consider consolidating processEraseMessageDetails() and processEraseMessage()
int EntityTree::processEraseMessageDetails(const QByteArray& dataByteArray, const SharedNodePointer& sourceNode) {
#ifdef EXTRA_ERASE_DEBUGGING
qDebug() << "EntityTree::processEraseMessageDetails()";
#endif
const unsigned char* packetData = (const unsigned char*)dataByteArray.constData();
const unsigned char* dataAt = packetData;
size_t packetLength = dataByteArray.size();
@ -979,6 +1006,10 @@ int EntityTree::processEraseMessageDetails(const QByteArray& dataByteArray, cons
dataAt += encodedID.size();
processedBytes += encodedID.size();
#ifdef EXTRA_ERASE_DEBUGGING
qDebug() << " ---- EntityTree::processEraseMessageDetails() contains id:" << entityID;
#endif
EntityItemID entityItemID(entityID);
entityItemIDsToDelete << entityItemID;

View file

@ -16,6 +16,7 @@
#include <controllers/UserInputMapper.h>
#include <PathUtils.h>
#include <NumericalConstants.h>
const QString KeyboardMouseDevice::NAME = "Keyboard/Mouse";
@ -63,7 +64,8 @@ void KeyboardMouseDevice::mousePressEvent(QMouseEvent* event, unsigned int devic
// key pressed again ? without catching the release event ?
}
_lastCursor = event->pos();
_mousePressAt = event->pos();
_mousePressTime = usecTimestampNow();
_mouseMoved = false;
eraseMouseClicked();
}
@ -72,10 +74,11 @@ void KeyboardMouseDevice::mouseReleaseEvent(QMouseEvent* event, unsigned int dev
auto input = _inputDevice->makeInput((Qt::MouseButton) event->button());
_inputDevice->_buttonPressedMap.erase(input.getChannel());
// if we pressed and released at the same location, then create a "_CLICKED" input for this button
// we might want to add some small tolerance to this so if you do a small drag it still counts as
// a clicked.
if (_mousePressAt == event->pos()) {
// if we pressed and released at the same location within a small time window, then create a "_CLICKED"
// input for this button we might want to add some small tolerance to this so if you do a small drag it
// till counts as a clicked.
static const int CLICK_TIME = USECS_PER_MSEC * 500; // 500 ms to click
if (!_mouseMoved && (usecTimestampNow() - _mousePressTime < CLICK_TIME)) {
_inputDevice->_buttonPressedMap.insert(_inputDevice->makeInput((Qt::MouseButton) event->button(), true).getChannel());
}
}
@ -97,6 +100,7 @@ void KeyboardMouseDevice::mouseMoveEvent(QMouseEvent* event, unsigned int device
_inputDevice->_axisStateMap[MOUSE_AXIS_Y_NEG] = (currentMove.y() > 0 ? currentMove.y() : 0.0f);
_lastCursor = currentPos;
_mouseMoved = true;
eraseMouseClicked();
}

View file

@ -115,7 +115,8 @@ public:
protected:
QPoint _lastCursor;
QPoint _mousePressAt;
quint64 _mousePressTime;
bool _mouseMoved;
glm::vec2 _lastTouch;
std::shared_ptr<InputDevice> _inputDevice { std::make_shared<InputDevice>() };

View file

@ -35,7 +35,7 @@ Clip::Pointer Clip::duplicate() {
Clip::Pointer result = std::make_shared<BufferClip>();
Locker lock(_mutex);
float currentPosition = position();
Time currentPosition = position();
seek(0);
Frame::Pointer frame = nextFrame();

View file

@ -28,11 +28,11 @@ public:
Pointer duplicate();
virtual float duration() const = 0;
virtual Time duration() const = 0;
virtual size_t frameCount() const = 0;
virtual void seek(float offset) = 0;
virtual float position() const = 0;
virtual void seek(Time offset) = 0;
virtual Time position() const = 0;
virtual FramePointer peekFrame() const = 0;
virtual FramePointer nextFrame() = 0;

View file

@ -6,7 +6,116 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Deck.h"
#include <NumericalConstants.h>
#include <SharedUtil.h>
// FIXME -- DO NOT include headers in empty CPP files, it produces warnings. Once we define new symbols
// and some actual code here, we can uncomment this include.
//#include "Deck.h"
#include "Clip.h"
#include "Frame.h"
#include "Logging.h"
using namespace recording;
void Deck::queueClip(ClipPointer clip, Time timeOffset) {
if (!clip) {
qCWarning(recordingLog) << "Clip invalid, ignoring";
return;
}
// FIXME if the time offset is not zero, wrap the clip in a OffsetClip wrapper
_clips.push_back(clip);
}
void Deck::play() {
if (_pause) {
_pause = false;
_startEpoch = usecTimestampNow() - (_position * USECS_PER_MSEC);
emit playbackStateChanged();
processFrames();
}
}
void Deck::pause() {
if (!_pause) {
_pause = true;
emit playbackStateChanged();
}
}
Clip::Pointer Deck::getNextClip() {
Clip::Pointer result;
Time soonestFramePosition = INVALID_TIME;
for (const auto& clip : _clips) {
Time nextFramePosition = clip->position();
if (nextFramePosition < soonestFramePosition) {
result = clip;
soonestFramePosition = nextFramePosition;
}
}
return result;
}
void Deck::seek(Time position) {
_position = position;
// FIXME reset the frames to the appropriate spot
for (auto& clip : _clips) {
clip->seek(position);
}
if (!_pause) {
// FIXME what if the timer is already running?
processFrames();
}
}
Time Deck::position() const {
if (_pause) {
return _position;
}
return (usecTimestampNow() - _startEpoch) / USECS_PER_MSEC;
}
static const Time MIN_FRAME_WAIT_INTERVAL_MS = 1;
void Deck::processFrames() {
if (_pause) {
return;
}
_position = position();
auto triggerPosition = _position + MIN_FRAME_WAIT_INTERVAL_MS;
Clip::Pointer nextClip;
for (nextClip = getNextClip(); nextClip; nextClip = getNextClip()) {
// If the clip is too far in the future, just break out of the handling loop
Time framePosition = nextClip->position();
if (framePosition > triggerPosition) {
break;
}
// Handle the frame and advance the clip
Frame::handleFrame(nextClip->nextFrame());
}
if (!nextClip) {
qCDebug(recordingLog) << "No more frames available";
// No more frames available, so handle the end of playback
if (_loop) {
qCDebug(recordingLog) << "Looping enabled, seeking back to beginning";
// If we have looping enabled, start the playback over
seek(0);
} else {
// otherwise pause playback
pause();
}
return;
}
// If we have more clip frames available, set the timer for the next one
Time nextClipPosition = nextClip->position();
Time interval = nextClipPosition - _position;
_timer.singleShot(interval, [this] {
processFrames();
});
}

View file

@ -10,26 +10,62 @@
#ifndef hifi_Recording_Deck_h
#define hifi_Recording_Deck_h
#include "Forward.h"
#include <utility>
#include <list>
#include <QtCore/QObject>
#include <QtCore/QTimer>
#include "Forward.h"
class QIODevice;
namespace recording {
class Deck : public QObject {
Q_OBJECT
public:
using Pointer = std::shared_ptr<Deck>;
Deck(QObject* parent = nullptr) : QObject(parent) {}
virtual ~Deck();
// Place a clip on the deck for recording or playback
void queueClip(ClipPointer clip, float timeOffset = 0.0f);
void play(float timeOffset = 0.0f);
void reposition(float timeOffsetDelta);
void setPlaybackSpeed(float rate);
void queueClip(ClipPointer clip, Time timeOffset = 0.0f);
void play();
bool isPlaying() { return !_pause; }
void pause();
bool isPaused() const { return _pause; }
void stop() { pause(); seek(0.0f); }
Time length() const { return _length; }
void loop(bool enable = true) { _loop = enable; }
bool isLooping() const { return _loop; }
Time position() const;
void seek(Time position);
void setPlaybackSpeed(float factor) { _playbackSpeed = factor; }
float getPlaybackSpeed() { return _playbackSpeed; }
signals:
void playbackStateChanged();
private:
using Clips = std::list<ClipPointer>;
ClipPointer getNextClip();
void processFrames();
QTimer _timer;
Clips _clips;
quint64 _startEpoch { 0 };
Time _position { 0 };
float _playbackSpeed { 1.0f };
bool _pause { true };
bool _loop { false };
Time _length { 0 };
};
}

View file

@ -12,11 +12,18 @@
#include <memory>
#include <list>
#include <limits>
namespace recording {
using Time = uint32_t;
static const Time INVALID_TIME = std::numeric_limits<uint32_t>::max();
using FrameType = uint16_t;
using FrameSize = uint16_t;
struct Frame;
using FramePointer = std::shared_ptr<Frame>;

View file

@ -82,7 +82,8 @@ FrameType Frame::registerFrameType(const QString& frameTypeName) {
Q_ASSERT(headerType == Frame::TYPE_HEADER);
Q_UNUSED(headerType); // FIXME - build system on unix still not upgraded to Qt 5.5.1 so Q_ASSERT still produces warnings
});
return frameTypes.registerValue(frameTypeName);
auto result = frameTypes.registerValue(frameTypeName);
return result;
}
QMap<QString, FrameType> Frame::getFrameTypes() {
@ -102,3 +103,16 @@ Frame::Handler Frame::registerFrameHandler(FrameType type, Handler handler) {
handlerMap[type] = handler;
return result;
}
void Frame::handleFrame(const Frame::Pointer& frame) {
Handler handler;
{
Locker lock(mutex);
auto iterator = handlerMap.find(frame->type);
if (iterator == handlerMap.end()) {
return;
}
handler = *iterator;
}
handler(frame);
}

View file

@ -26,7 +26,7 @@ public:
static const FrameType TYPE_INVALID = 0xFFFF;
static const FrameType TYPE_HEADER = 0x0;
FrameType type { TYPE_INVALID };
float timeOffset { 0 };
Time timeOffset { 0 };
QByteArray data;
Frame() {}
@ -37,6 +37,7 @@ public:
static QMap<QString, FrameType> getFrameTypes();
static QMap<FrameType, QString> getFrameTypeNames();
static Handler registerFrameHandler(FrameType type, Handler handler);
static void handleFrame(const Pointer& frame);
};
}

View file

@ -9,25 +9,35 @@
#include "Recorder.h"
#include <NumericalConstants.h>
#include <SharedUtil.h>
#include "impl/BufferClip.h"
#include "Frame.h"
using namespace recording;
Recorder::~Recorder() {
}
Time Recorder::position() {
return 0.0f;
}
void Recorder::start() {
if (!_recording) {
_recording = true;
if (!_clip) {
_clip = std::make_shared<BufferClip>();
}
_startEpoch = usecTimestampNow();
_timer.start();
emit recordingStateChanged();
}
}
void Recorder::stop() {
if (!_recording) {
if (_recording) {
_recording = false;
_elapsed = _timer.elapsed();
emit recordingStateChanged();
@ -50,13 +60,11 @@ void Recorder::recordFrame(FrameType type, QByteArray frameData) {
Frame::Pointer frame = std::make_shared<Frame>();
frame->type = type;
frame->data = frameData;
frame->timeOffset = (float)(_elapsed + _timer.elapsed()) / MSECS_PER_SECOND;
frame->timeOffset = (usecTimestampNow() - _startEpoch) / USECS_PER_MSEC;
_clip->addFrame(frame);
}
ClipPointer Recorder::getClip() {
auto result = _clip;
_clip.reset();
return result;
return _clip;
}

View file

@ -20,18 +20,23 @@ namespace recording {
// An interface for interacting with clips, creating them by recording or
// playing them back. Also serialization to and from files / network sources
class Recorder : public QObject {
Q_OBJECT
public:
using Pointer = std::shared_ptr<Recorder>;
Recorder(QObject* parent = nullptr) : QObject(parent) {}
virtual ~Recorder();
Time position();
// Start recording frames
void start();
// Stop recording
void stop();
// Test if recording is active
bool isRecording();
// Erase the currently recorded content
void clear();
@ -46,7 +51,8 @@ signals:
private:
QElapsedTimer _timer;
ClipPointer _clip;
quint64 _elapsed;
quint64 _elapsed { 0 };
quint64 _startEpoch { 0 };
bool _recording { false };
};

View file

@ -8,24 +8,26 @@
#include "BufferClip.h"
#include <NumericalConstants.h>
#include "../Frame.h"
using namespace recording;
void BufferClip::seek(float offset) {
void BufferClip::seek(Time offset) {
Locker lock(_mutex);
auto itr = std::lower_bound(_frames.begin(), _frames.end(), offset,
[](Frame::Pointer a, float b)->bool{
auto itr = std::lower_bound(_frames.begin(), _frames.end(), offset,
[](Frame::Pointer a, Time b)->bool {
return a->timeOffset < b;
}
);
_frameIndex = itr - _frames.begin();
}
float BufferClip::position() const {
Time BufferClip::position() const {
Locker lock(_mutex);
float result = std::numeric_limits<float>::max();
Time result = INVALID_TIME;
if (_frameIndex < _frames.size()) {
result = _frames[_frameIndex]->timeOffset;
}
@ -77,7 +79,7 @@ void BufferClip::reset() {
_frameIndex = 0;
}
float BufferClip::duration() const {
Time BufferClip::duration() const {
if (_frames.empty()) {
return 0;
}

View file

@ -22,11 +22,11 @@ public:
virtual ~BufferClip() {}
virtual float duration() const override;
virtual Time duration() const override;
virtual size_t frameCount() const override;
virtual void seek(float offset) override;
virtual float position() const override;
virtual void seek(Time offset) override;
virtual Time position() const override;
virtual FramePointer peekFrame() const override;
virtual FramePointer nextFrame() override;

View file

@ -22,7 +22,7 @@
using namespace recording;
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(float) + sizeof(uint16_t);
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(Time) + sizeof(FrameSize);
static const QString FRAME_TYPE_MAP = QStringLiteral("frameTypes");
@ -60,10 +60,10 @@ FrameHeaderList parseFrameHeaders(uchar* const start, const qint64& size) {
FrameHeader header;
memcpy(&(header.type), current, sizeof(FrameType));
current += sizeof(FrameType);
memcpy(&(header.timeOffset), current, sizeof(float));
current += sizeof(float);
memcpy(&(header.size), current, sizeof(uint16_t));
current += sizeof(uint16_t);
memcpy(&(header.timeOffset), current, sizeof(Time));
current += sizeof(Time);
memcpy(&(header.size), current, sizeof(FrameSize));
current += sizeof(FrameSize);
header.fileOffset = current - start;
if (end - current < header.size) {
current = end;
@ -117,6 +117,7 @@ FileClip::FileClip(const QString& fileName) : _file(fileName) {
qWarning() << "Header missing frame type map, invalid file";
return;
}
qDebug() << translationMap;
// Update the loaded headers with the frame data
_frameHeaders.reserve(parsedFrameHeaders.size());
@ -132,16 +133,21 @@ FileClip::FileClip(const QString& fileName) : _file(fileName) {
// FIXME move to frame?
bool writeFrame(QIODevice& output, const Frame& frame) {
if (frame.type == Frame::TYPE_INVALID) {
qWarning() << "Attempting to write invalid frame";
return true;
}
auto written = output.write((char*)&(frame.type), sizeof(FrameType));
if (written != sizeof(FrameType)) {
return false;
}
written = output.write((char*)&(frame.timeOffset), sizeof(float));
if (written != sizeof(float)) {
written = output.write((char*)&(frame.timeOffset), sizeof(Time));
if (written != sizeof(Time)) {
return false;
}
uint16_t dataSize = frame.data.size();
written = output.write((char*)&dataSize, sizeof(uint16_t));
written = output.write((char*)&dataSize, sizeof(FrameSize));
if (written != sizeof(uint16_t)) {
return false;
}
@ -201,19 +207,19 @@ FileClip::~FileClip() {
}
}
void FileClip::seek(float offset) {
void FileClip::seek(Time offset) {
Locker lock(_mutex);
auto itr = std::lower_bound(_frameHeaders.begin(), _frameHeaders.end(), offset,
[](const FrameHeader& a, float b)->bool {
[](const FrameHeader& a, Time b)->bool {
return a.timeOffset < b;
}
);
_frameIndex = itr - _frameHeaders.begin();
}
float FileClip::position() const {
Time FileClip::position() const {
Locker lock(_mutex);
float result = std::numeric_limits<float>::max();
Time result = INVALID_TIME;
if (_frameIndex < _frameHeaders.size()) {
result = _frameHeaders[_frameIndex].timeOffset;
}
@ -260,7 +266,7 @@ void FileClip::addFrame(FramePointer) {
throw std::runtime_error("File clips are read only");
}
float FileClip::duration() const {
Time FileClip::duration() const {
if (_frameHeaders.empty()) {
return 0;
}

View file

@ -26,11 +26,11 @@ public:
FileClip(const QString& file);
virtual ~FileClip();
virtual float duration() const override;
virtual Time duration() const override;
virtual size_t frameCount() const override;
virtual void seek(float offset) override;
virtual float position() const override;
virtual void seek(Time offset) override;
virtual Time position() const override;
virtual FramePointer peekFrame() const override;
virtual FramePointer nextFrame() override;
@ -45,7 +45,7 @@ public:
struct FrameHeader {
FrameType type;
float timeOffset;
Time timeOffset;
uint16_t size;
quint64 fileOffset;
};

View file

@ -1099,10 +1099,12 @@ void Model::setGeometry(const QSharedPointer<NetworkGeometry>& newGeometry) {
void Model::deleteGeometry() {
_blendedVertexBuffers.clear();
_rig->clearJointStates();
_meshStates.clear();
_rig->deleteAnimations();
_rig->destroyAnimGraph();
if (_rig) {
_rig->clearJointStates();
_rig->deleteAnimations();
_rig->destroyAnimGraph();
}
_blendedBlendshapeCoefficients.clear();
}

View file

@ -1,3 +1,3 @@
set(TARGET_NAME script-engine)
setup_hifi_library(Gui Network Script WebSockets Widgets)
link_hifi_libraries(shared networking octree gpu procedural model model-networking fbx entities controllers animation audio physics)
link_hifi_libraries(shared networking octree gpu procedural model model-networking recording avatars fbx entities controllers animation audio physics)

View file

@ -64,15 +64,15 @@ glm::quat Quat::inverse(const glm::quat& q) {
}
glm::vec3 Quat::getFront(const glm::quat& orientation) {
return orientation * IDENTITY_FRONT;
return orientation * Vectors::FRONT;
}
glm::vec3 Quat::getRight(const glm::quat& orientation) {
return orientation * IDENTITY_RIGHT;
return orientation * Vectors::RIGHT;
}
glm::vec3 Quat::getUp(const glm::quat& orientation) {
return orientation * IDENTITY_UP;
return orientation * Vectors::UP;
}
glm::vec3 Quat::safeEulerAngles(const glm::quat& orientation) {

View file

@ -201,30 +201,7 @@ float angleBetween(const glm::vec3& v1, const glm::vec3& v2) {
// Helper function return the rotation from the first vector onto the second
glm::quat rotationBetween(const glm::vec3& v1, const glm::vec3& v2) {
float angle = angleBetween(v1, v2);
if (glm::isnan(angle) || angle < EPSILON) {
return glm::quat();
}
glm::vec3 axis;
if (angle > 179.99f * RADIANS_PER_DEGREE) { // 180 degree rotation; must use another axis
axis = glm::cross(v1, glm::vec3(1.0f, 0.0f, 0.0f));
float axisLength = glm::length(axis);
if (axisLength < EPSILON) { // parallel to x; y will work
axis = glm::normalize(glm::cross(v1, glm::vec3(0.0f, 1.0f, 0.0f)));
} else {
axis /= axisLength;
}
} else {
axis = glm::normalize(glm::cross(v1, v2));
// It is possible for axis to be nan even when angle is not less than EPSILON.
// For example when angle is small but not tiny but v1 and v2 and have very short lengths.
if (glm::isnan(glm::dot(axis, axis))) {
// set angle and axis to values that will generate an identity rotation
angle = 0.0f;
axis = glm::vec3(1.0f, 0.0f, 0.0f);
}
}
return glm::angleAxis(angle, axis);
return glm::rotation(glm::normalize(v1), glm::normalize(v2));
}
bool isPointBehindTrianglesPlane(glm::vec3 point, glm::vec3 p0, glm::vec3 p1, glm::vec3 p2) {

View file

@ -16,6 +16,7 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <glm/gtx/quaternion.hpp>
// Bring the most commonly used GLM types into the default namespace
using glm::ivec2;

View file

@ -0,0 +1,57 @@
//
// Created by Bradley Austin Davis on 2015/11/09
// Copyright 2013-2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "JSONHelpers.h"
#include <QtCore/QJsonValue>
#include <QtCore/QJsonObject>
#include <QtCore/QJsonArray>
template <typename T>
QJsonValue glmToJson(const T& t) {
static const T DEFAULT_VALUE = T();
if (t == DEFAULT_VALUE) {
return QJsonValue();
}
QJsonArray result;
for (auto i = 0; i < t.length(); ++i) {
result.push_back(t[i]);
}
return result;
}
template <typename T>
T glmFromJson(const QJsonValue& json) {
static const T DEFAULT_VALUE = T();
T result;
if (json.isArray()) {
QJsonArray array = json.toArray();
size_t length = std::min(array.size(), result.length());
for (size_t i = 0; i < length; ++i) {
result[i] = (float)array[i].toDouble();
}
}
return result;
}
QJsonValue toJsonValue(const quat& q) {
return glmToJson(q);
}
QJsonValue toJsonValue(const vec3& v) {
return glmToJson(v);
}
quat quatFromJsonValue(const QJsonValue& q) {
return glmFromJson<quat>(q);
}
vec3 vec3FromJsonValue(const QJsonValue& v) {
return glmFromJson<vec3>(v);
}

View file

@ -0,0 +1,23 @@
//
// Created by Bradley Austin Davis on 2015/11/09
// Copyright 2013-2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Shared_JSONHelpers_h
#define hifi_Shared_JSONHelpers_h
#include "../GLMHelpers.h"
QJsonValue toJsonValue(const quat& q);
QJsonValue toJsonValue(const vec3& q);
quat quatFromJsonValue(const QJsonValue& q);
vec3 vec3FromJsonValue(const QJsonValue& q);
#endif

View file

@ -0,0 +1,84 @@
//
// Created by Bradley Austin Davis on 2015/11/09
// Copyright 2013-2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "UniformTransform.h"
#include "JSONHelpers.h"
#include <QtCore/QJsonValue>
#include <QtCore/QJsonObject>
#include <QtCore/QJsonArray>
#include <glm/gtc/matrix_transform.hpp>
const float UniformTransform::DEFAULT_SCALE = 1.0f;
std::shared_ptr<UniformTransform> UniformTransform::parseJson(const QJsonValue& basis) {
std::shared_ptr<UniformTransform> result = std::make_shared<UniformTransform>();
result->fromJson(basis);
return result;
}
static const QString JSON_TRANSLATION = QStringLiteral("translation");
static const QString JSON_ROTATION = QStringLiteral("rotation");
static const QString JSON_SCALE = QStringLiteral("scale");
void UniformTransform::fromJson(const QJsonValue& basisValue) {
if (!basisValue.isObject()) {
return;
}
QJsonObject basis = basisValue.toObject();
if (basis.contains(JSON_ROTATION)) {
rotation = quatFromJsonValue(basis[JSON_ROTATION]);
}
if (basis.contains(JSON_TRANSLATION)) {
translation = vec3FromJsonValue(basis[JSON_TRANSLATION]);
}
if (basis.contains(JSON_SCALE)) {
scale = (float)basis[JSON_SCALE].toDouble();
}
}
glm::mat4 toMat4(const UniformTransform& transform) {
return glm::translate(glm::mat4(), transform.translation) * glm::mat4_cast(transform.rotation);
}
UniformTransform fromMat4(const glm::mat4& m) {
UniformTransform result;
result.translation = vec3(m[3]);
result.rotation = glm::quat_cast(m);
return result;
}
UniformTransform UniformTransform::relativeTransform(const UniformTransform& worldTransform) const {
UniformTransform result = fromMat4(glm::inverse(toMat4(*this)) * toMat4(worldTransform));
result.scale = scale / worldTransform.scale;
return result;
}
UniformTransform UniformTransform::worldTransform(const UniformTransform& relativeTransform) const {
UniformTransform result = fromMat4(toMat4(*this) * toMat4(relativeTransform));
result.scale = relativeTransform.scale * scale;
return result;
}
QJsonObject UniformTransform::toJson() const {
QJsonObject result;
auto json = toJsonValue(translation);
if (!json.isNull()) {
result[JSON_TRANSLATION] = json;
}
json = toJsonValue(rotation);
if (!json.isNull()) {
result[JSON_ROTATION] = json;
}
if (scale != DEFAULT_SCALE) {
result[JSON_SCALE] = scale;
}
return result;
}

View file

@ -0,0 +1,40 @@
//
// Created by Bradley Austin Davis on 2015/11/09
// Copyright 2013-2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Shared_UniformTransform_h
#define hifi_Shared_UniformTransform_h
#include "../GLMHelpers.h"
class QJsonValue;
struct UniformTransform {
static const float DEFAULT_SCALE;
glm::vec3 translation;
glm::quat rotation;
float scale { DEFAULT_SCALE };
UniformTransform() {}
UniformTransform(const glm::vec3& translation, const glm::quat& rotation, const float& scale)
: translation(translation), rotation(rotation), scale(scale) {}
UniformTransform relativeTransform(const UniformTransform& worldTransform) const;
glm::vec3 relativeVector(const UniformTransform& worldTransform) const;
UniformTransform worldTransform(const UniformTransform& relativeTransform) const;
glm::vec3 worldVector(const UniformTransform& relativeTransform) const;
QJsonObject toJson() const;
void fromJson(const QJsonValue& json);
static std::shared_ptr<UniformTransform> parseJson(const QJsonValue& json);
};
#endif