Merge branch 'master' of https://github.com/highfidelity/hifi into baseball

This commit is contained in:
Atlante45 2015-11-06 13:31:39 -08:00
commit 55c6706f76
57 changed files with 1521 additions and 238 deletions

View file

@ -277,7 +277,10 @@ void Agent::processAgentAvatarAndAudio(float deltaTime) {
QByteArray avatarByteArray = _avatarData->toByteArray(true, randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO);
_avatarData->doneEncoding(true);
auto avatarPacket = NLPacket::create(PacketType::AvatarData, avatarByteArray.size());
static AvatarDataSequenceNumber sequenceNumber = 0;
auto avatarPacket = NLPacket::create(PacketType::AvatarData, avatarByteArray.size() + sizeof(sequenceNumber));
avatarPacket->writePrimitive(sequenceNumber++);
avatarPacket->write(avatarByteArray);

View file

@ -4,7 +4,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://github.com/boostorg/config/archive/boost-1.58.0.zip
#URL https://github.com/boostorg/config/archive/boost-1.58.0.zip
URL http://hifi-public.s3.amazonaws.com/dependencies/config-boost-1.58.0.zip
URL_MD5 42fa673bae2b7645a22736445e80eb8d
CONFIGURE_COMMAND ""
BUILD_COMMAND ""

View file

@ -17,7 +17,8 @@ include(ExternalProject)
if (WIN32)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://bullet.googlecode.com/files/bullet-2.82-r2704.zip
# URL https://bullet.googlecode.com/files/bullet-2.82-r2704.zip
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-r2704.zip
URL_MD5 f5e8914fc9064ad32e0d62d19d33d977
CMAKE_ARGS ${PLATFORM_CMAKE_ARGS} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DBUILD_EXTRAS=0 -DINSTALL_LIBS=1 -DBUILD_DEMOS=0 -DUSE_GLUT=0 -DUSE_DX11=0
LOG_DOWNLOAD 1
@ -28,7 +29,8 @@ if (WIN32)
else ()
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://bullet.googlecode.com/files/bullet-2.82-r2704.tgz
#URL http://bullet.googlecode.com/files/bullet-2.82-r2704.tgz
URL http://hifi-public.s3.amazonaws.com/dependencies/bullet-2.82-r2704.tgz
URL_MD5 70b3c8d202dee91a0854b4cbc88173e8
CMAKE_ARGS ${PLATFORM_CMAKE_ARGS} -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DBUILD_EXTRAS=0 -DINSTALL_LIBS=1 -DBUILD_DEMOS=0 -DUSE_GLUT=0
LOG_DOWNLOAD 1

View file

@ -7,7 +7,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://github.com/ValveSoftware/openvr/archive/0.9.1.zip
#URL https://github.com/ValveSoftware/openvr/archive/0.9.1.zip
URL http://hifi-public.s3.amazonaws.com/dependencies/openvr-0.9.1.zip
URL_MD5 f986f5a6815e9454c53c5bf58ce02fdc
CONFIGURE_COMMAND ""
BUILD_COMMAND ""

View file

@ -7,7 +7,7 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
if (WIN32)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://www.libsdl.org/release/SDL2-devel-2.0.3-VC.zip
URL http://hifi-public.s3.amazonaws.com/dependencies/SDL2-devel-2.0.3-VC.zip
URL_MD5 30a333bcbe94bc5016e8799c73e86233
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
@ -18,7 +18,7 @@ elseif (APPLE)
ExternalProject_Add(
${EXTERNAL_NAME}
URL https://hifi-public.s3.amazonaws.com/dependencies/SDL2-2.0.3.zip
URL http://hifi-public.s3.amazonaws.com/dependencies/SDL2-2.0.3.zip
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR> -DVIDEO_OPENGL=OFF
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
LOG_DOWNLOAD 1

View file

@ -4,15 +4,15 @@ set(EXTERNAL_NAME sixense)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
#set(SIXENSE_URL "https://hifi-public.s3.amazonaws.com/dependencies/SixenseSDK_062612.zip")
#set(SIXENSE_URL "http://hifi-public.s3.amazonaws.com/dependencies/SixenseSDK_062612.zip")
#set(SIXENSE_URL_MD5 "10cc8dc470d2ac1244a88cf04bc549cc")
#set(SIXENSE_NEW_LAYOUT 0)
#set(SIXENSE_URL "https://public.s3.amazonaws.com/dependencies/SixenseSDK_071615.zip")
#set(SIXENSE_URL "http://public.s3.amazonaws.com/dependencies/SixenseSDK_071615.zip")
#set(SIXENSE_URL_MD5 "752a3901f334124e9cffc2ba4136ef7d")
#set(SIXENSE_NEW_LAYOUT 1)
set(SIXENSE_URL "https://hifi-public.s3.amazonaws.com/dependencies/SixenseSDK_102215.zip")
set(SIXENSE_URL "http://hifi-public.s3.amazonaws.com/dependencies/SixenseSDK_102215.zip")
set(SIXENSE_URL_MD5 "93c3a6795cce777a0f472b09532935f1")
set(SIXENSE_NEW_LAYOUT 1)
@ -43,7 +43,7 @@ if (WIN32)
endif()
if (${SIXENSE_NEW_LAYOUT})
# for 2015 SDKs (using the 2013 versions may be causing the crash)
# for 2015 SDKs (using the VS2013 versions may be causing the crash, so use the VS2010 versions)
set(${EXTERNAL_NAME_UPPER}_DLL_PATH "${SOURCE_DIR}/bin/${ARCH_DIR}/VS2010/release_dll")
set(${EXTERNAL_NAME_UPPER}_LIB_PATH "${SOURCE_DIR}/lib/${ARCH_DIR}/VS2010/release_dll")
else()

View file

@ -16,11 +16,11 @@ var NUM_AC = 3; // This is the number of AC. Their ID need to be unique and betw
var NAMES = new Array("Craig", "Clement", "Jeff"); // ACs names ordered by IDs (Default name is "ACx", x = ID + 1))
// Those variables MUST be common to every scripts
var controlVoxelSize = 0.25;
var controlVoxelPosition = { x: 2000 , y: 0, z: 0 };
var controlEntitySize = 0.25;
var controlEntityPosition = { x: 2000 , y: 0, z: 0 };
// Script. DO NOT MODIFY BEYOND THIS LINE.
Script.include("libraries/toolBars.js");
Script.include("../libraries/toolBars.js");
var DO_NOTHING = 0;
var PLAY = 1;
@ -138,16 +138,22 @@ function sendCommand(id, action) {
return;
}
if (id === toolBars.length - 1) {
if (id === (toolBars.length - 1)) {
for (i = 0; i < NUM_AC; i++) {
sendCommand(i, action);
}
return;
}
// TODO: Fix this to use some mechanism other than voxels
//Voxels.setVoxel(controlVoxelPosition.x + id * controlVoxelSize, controlVoxelPosition.y, controlVoxelPosition.z,
// controlVoxelSize, COLORS[action].red, COLORS[action].green, COLORS[action].blue);
var position = { x: controlEntityPosition.x + id * controlEntitySize,
y: controlEntityPosition.y, z: controlEntityPosition.z };
Entities.addEntity({
type: "Box",
position: position,
dimensions: { x: controlEntitySize, y: controlEntitySize, z: controlEntitySize },
color: COLORS[action],
lifetime: 5
});
}
function mousePressEvent(event) {

View file

@ -12,27 +12,25 @@
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
// Set the following variables to the values needed
var filename = HIFI_PUBLIC_BUCKET + "ozan/bartender.rec";
var filename = "/Users/clement/Desktop/recording.hfr";
var playFromCurrentLocation = true;
var useDisplayName = true;
var useAttachments = true;
var useHeadModel = true;
var useSkeletonModel = true;
var useAvatarModel = true;
// ID of the agent. Two agents can't have the same ID.
var id = 0;
// Set head and skeleton models
Avatar.faceModelURL = "http://public.highfidelity.io/models/heads/EvilPhilip_v7.fst";
Avatar.skeletonModelURL = "http://public.highfidelity.io/models/skeletons/Philip_Carl_Body_A-Pose.fst";
// Set avatar model URL
Avatar.skeletonModelURL = "https://hifi-public.s3.amazonaws.com/marketplace/contents/e21c0b95-e502-4d15-8c41-ea2fc40f1125/3585ddf674869a67d31d5964f7b52de1.fst?1427169998";
// Set position/orientation/scale here if playFromCurrentLocation is true
Avatar.position = { x:1, y: 1, z: 1 };
Avatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
Avatar.scale = 1.0;
// Those variables MUST be common to every scripts
var controlVoxelSize = 0.25;
var controlVoxelPosition = { x: 2000 , y: 0, z: 0 };
var controlEntitySize = 0.25;
var controlEntityPosition = { x: 2000, y: 0, z: 0 };
// Script. DO NOT MODIFY BEYOND THIS LINE.
var DO_NOTHING = 0;
@ -49,113 +47,111 @@ COLORS[STOP] = { red: STOP, green: 0, blue: 0 };
COLORS[SHOW] = { red: SHOW, green: 0, blue: 0 };
COLORS[HIDE] = { red: HIDE, green: 0, blue: 0 };
controlVoxelPosition.x += id * controlVoxelSize;
controlEntityPosition.x += id * controlEntitySize;
Avatar.loadRecording(filename);
Avatar.setPlayFromCurrentLocation(playFromCurrentLocation);
Avatar.setPlayerUseDisplayName(useDisplayName);
Avatar.setPlayerUseAttachments(useAttachments);
Avatar.setPlayerUseHeadModel(useHeadModel);
Avatar.setPlayerUseSkeletonModel(useSkeletonModel);
Avatar.setPlayerUseHeadModel(false);
Avatar.setPlayerUseSkeletonModel(useAvatarModel);
function setupVoxelViewer() {
var voxelViewerOffset = 10;
var voxelViewerPosition = JSON.parse(JSON.stringify(controlVoxelPosition));
voxelViewerPosition.x -= voxelViewerOffset;
var voxelViewerOrientation = Quat.fromPitchYawRollDegrees(0, -90, 0);
VoxelViewer.setPosition(voxelViewerPosition);
VoxelViewer.setOrientation(voxelViewerOrientation);
VoxelViewer.queryOctree();
function setupEntityViewer() {
var entityViewerOffset = 10;
var entityViewerPosition = { x: controlEntityPosition.x - entityViewerOffset,
y: controlEntityPosition.y, z: controlEntityPosition.z };
var entityViewerOrientation = Quat.fromPitchYawRollDegrees(0, -90, 0);
EntityViewer.setPosition(entityViewerPosition);
EntityViewer.setOrientation(entityViewerOrientation);
EntityViewer.queryOctree();
}
function getAction(controlVoxel) {
if (controlVoxel.x != controlVoxelPosition.x ||
controlVoxel.y != controlVoxelPosition.y ||
controlVoxel.z != controlVoxelPosition.z ||
controlVoxel.s != controlVoxelSize) {
return DO_NOTHING;
}
for (i in COLORS) {
if (controlVoxel.red === COLORS[i].red &&
controlVoxel.green === COLORS[i].green &&
controlVoxel.blue === COLORS[i].blue) {
// TODO: Fix this to use some mechanism other than voxels
//Voxels.eraseVoxel(controlVoxelPosition.x, controlVoxelPosition.y, controlVoxelPosition.z, controlVoxelSize);
return parseInt(i);
function getAction(controlEntity) {
if (controlEntity === null ||
controlEntity.position.x !== controlEntityPosition.x ||
controlEntity.position.y !== controlEntityPosition.y ||
controlEntity.position.z !== controlEntityPosition.z ||
controlEntity.dimensions.x !== controlEntitySize) {
return DO_NOTHING;
}
}
return DO_NOTHING;
for (i in COLORS) {
if (controlEntity.color.red === COLORS[i].red &&
controlEntity.color.green === COLORS[i].green &&
controlEntity.color.blue === COLORS[i].blue) {
Entities.deleteEntity(controlEntity.id);
return parseInt(i);
}
}
return DO_NOTHING;
}
count = 300; // This is necessary to wait for the audio mixer to connect
count = 100; // This is necessary to wait for the audio mixer to connect
function update(event) {
VoxelViewer.queryOctree();
if (count > 0) {
count--;
return;
}
// TODO: Fix this to use some mechanism other than voxels
// Voxels.getVoxelAt(controlVoxelPosition.x, controlVoxelPosition.y, controlVoxelPosition.z, controlVoxelSize);
var controlVoxel = false;
var action = getAction(controlVoxel);
switch(action) {
case PLAY:
print("Play");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
if (!Avatar.isPlaying()) {
Avatar.startPlaying();
}
Avatar.setPlayerLoop(false);
break;
case PLAY_LOOP:
print("Play loop");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
if (!Avatar.isPlaying()) {
Avatar.startPlaying();
}
Avatar.setPlayerLoop(true);
break;
case STOP:
print("Stop");
if (Avatar.isPlaying()) {
Avatar.stopPlaying();
}
break;
case SHOW:
print("Show");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
break;
case HIDE:
print("Hide");
if (Avatar.isPlaying()) {
Avatar.stopPlaying();
}
Agent.isAvatar = false;
break;
case DO_NOTHING:
break;
default:
print("Unknown action: " + action);
break;
}
if (Avatar.isPlaying()) {
Avatar.play();
}
EntityViewer.queryOctree();
if (count > 0) {
count--;
return;
}
var controlEntity = Entities.findClosestEntity(controlEntityPosition, controlEntitySize);
var action = getAction(Entities.getEntityProperties(controlEntity));
switch(action) {
case PLAY:
print("Play");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
if (!Avatar.isPlaying()) {
Avatar.startPlaying();
}
Avatar.setPlayerLoop(false);
break;
case PLAY_LOOP:
print("Play loop");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
if (!Avatar.isPlaying()) {
Avatar.startPlaying();
}
Avatar.setPlayerLoop(true);
break;
case STOP:
print("Stop");
if (Avatar.isPlaying()) {
Avatar.stopPlaying();
}
break;
case SHOW:
print("Show");
if (!Agent.isAvatar) {
Agent.isAvatar = true;
}
break;
case HIDE:
print("Hide");
if (Avatar.isPlaying()) {
Avatar.stopPlaying();
}
Agent.isAvatar = false;
break;
case DO_NOTHING:
break;
default:
print("Unknown action: " + action);
break;
}
if (Avatar.isPlaying()) {
Avatar.play();
}
}
Script.update.connect(update);
setupVoxelViewer();
setupEntityViewer();

View file

@ -14,8 +14,7 @@ var filename = "http://your.recording.url";
var playFromCurrentLocation = true;
var loop = true;
Avatar.faceModelURL = "http://public.highfidelity.io/models/heads/EvilPhilip_v7.fst";
Avatar.skeletonModelURL = "http://public.highfidelity.io/models/skeletons/Philip_Carl_Body_A-Pose.fst";
Avatar.skeletonModelURL = "https://hifi-public.s3.amazonaws.com/marketplace/contents/e21c0b95-e502-4d15-8c41-ea2fc40f1125/3585ddf674869a67d31d5964f7b52de1.fst?1427169998";
// Set position here if playFromCurrentLocation is true
Avatar.position = { x:1, y: 1, z: 1 };
@ -23,30 +22,34 @@ Avatar.orientation = Quat.fromPitchYawRollDegrees(0, 0, 0);
Avatar.scale = 1.0;
Agent.isAvatar = true;
Avatar.loadRecording(filename);
count = 300; // This is necessary to wait for the audio mixer to connect
function update(event) {
if (count > 0) {
count--;
return;
}
if (count == 0) {
Avatar.setPlayFromCurrentLocation(playFromCurrentLocation);
Avatar.setPlayerLoop(loop);
Avatar.startPlaying();
Avatar.play();
Vec3.print("Playing from ", Avatar.position);
count--;
}
if (Avatar.isPlaying()) {
Avatar.play();
} else {
Script.update.disconnect(update);
}
if (count > 0) {
count--;
return;
}
if (count == 0) {
Avatar.setPlayFromCurrentLocation(playFromCurrentLocation);
Avatar.setPlayerLoop(loop);
Avatar.setPlayerUseDisplayName(true);
Avatar.setPlayerUseAttachments(true);
Avatar.setPlayerUseHeadModel(false);
Avatar.setPlayerUseSkeletonModel(true);
Avatar.startPlaying();
Avatar.play();
Vec3.print("Playing from ", Avatar.position);
count--;
}
if (Avatar.isPlaying()) {
Avatar.play();
} else {
Script.update.disconnect(update);
}
}
Script.update.connect(update);

129
examples/away.js Normal file
View file

@ -0,0 +1,129 @@
"use strict";
/*jslint vars: true, plusplus: true*/
/*global HMD, AudioDevice, MyAvatar, Controller, Script, Overlays, print*/
//
// away.js
// examples
//
// Created by Howard Stearns 11/3/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Goes into "paused" when the '.' key (and automatically when started in HMD), and normal when pressing any key.
// See MAIN CONTROL, below, for what "paused" actually does.
var IK_WINDOW_AFTER_GOING_ACTIVE = 3000; // milliseconds
var OVERLAY_DATA = {
text: "Paused:\npress any key to continue",
font: {size: 75},
color: {red: 200, green: 255, blue: 255},
alpha: 0.9
};
// ANIMATION
// We currently don't have play/stopAnimation integrated with the animation graph, but we can get the same effect
// using an animation graph with a state that we turn on and off through the animation var defined with that state.
var awayAnimationHandlerId, activeAnimationHandlerId, stopper;
function playAwayAnimation() {
function animateAway() {
return {isAway: true, isNotAway: false, isNotMoving: false, ikOverlayAlpha: 0.0};
}
if (stopper) {
Script.clearTimeout(stopper);
stopper = false;
MyAvatar.removeAnimationStateHandler(activeAnimationHandlerId); // do it now, before making new assignment
}
awayAnimationHandlerId = MyAvatar.addAnimationStateHandler(animateAway, null);
}
function stopAwayAnimation() {
MyAvatar.removeAnimationStateHandler(awayAnimationHandlerId);
if (stopper) {
print('WARNING: unexpected double stop');
return;
}
// How do we know when to turn ikOverlayAlpha back on?
// It cannot be as soon as we want to stop the away animation, because then things will look goofy as we come out of that animation.
// (Imagine an away animation that sits or kneels, and then stands back up when coming out of it. If head is at the HMD, then it won't
// want to track the standing up animation.)
// Our standard anim graph flips 'awayOutroOnDone' for one frame, but it's a trigger (not an animVar) and other folks might use different graphs.
// So... Just give us a fixed amount of time to be done with animation, before we turn ik back on.
var backToNormal = false;
stopper = Script.setTimeout(function () {
backToNormal = true;
stopper = false;
}, IK_WINDOW_AFTER_GOING_ACTIVE);
function animateActive(state) {
if (state.ikOverlayAlpha) {
// Once the right state gets reflected back to us, we don't need the hander any more.
// But we are locked against handler changes during the execution of a handler, so remove asynchronously.
Script.setTimeout(function () { MyAvatar.removeAnimationStateHandler(activeAnimationHandlerId); }, 0);
}
// It might be cool to "come back to life" by fading the ik overlay back in over a short time. But let's see how this goes.
return {isAway: false, isNotAway: true, ikOverlayAlpha: backToNormal ? 1.0 : 0.0}; // IWBNI we had a way of deleting an anim var.
}
activeAnimationHandlerId = MyAvatar.addAnimationStateHandler(animateActive, ['isAway', 'isNotAway', 'isNotMoving', 'ikOverlayAlpha']);
}
// OVERLAY
var overlay = Overlays.addOverlay("text", OVERLAY_DATA);
function showOverlay() {
var screen = Controller.getViewportDimensions();
Overlays.editOverlay(overlay, {visible: true, x: screen.x / 4, y: screen.y / 4});
}
function hideOverlay() {
Overlays.editOverlay(overlay, {visible: false});
}
hideOverlay();
// MAIN CONTROL
var wasMuted, isAway;
function goAway() {
if (isAway) {
return;
}
isAway = true;
print('going "away"');
wasMuted = AudioDevice.getMuted();
if (!wasMuted) {
AudioDevice.toggleMute();
}
MyAvatar.setEnableMeshVisible(false); // just for our own display, without changing point of view
playAwayAnimation(); // animation is still seen by others
showOverlay();
}
function goActive() {
if (!isAway) {
return;
}
isAway = false;
print('going "active"');
if (!wasMuted) {
AudioDevice.toggleMute();
}
MyAvatar.setEnableMeshVisible(true); // IWBNI we respected Developer->Avatar->Draw Mesh setting.
stopAwayAnimation();
hideOverlay();
}
Script.scriptEnding.connect(goActive);
Controller.keyPressEvent.connect(function (event) {
if (event.isAutoRepeat) { // isAutoRepeat is true when held down (or when Windows feels like it)
return;
}
if (!isAway && (event.text === '.')) {
goAway();
} else {
goActive();
}
});
var wasHmdActive = false;
Script.update.connect(function () {
if (HMD.active !== wasHmdActive) {
wasHmdActive = !wasHmdActive;
if (wasHmdActive) {
goAway();
}
}
});

View file

@ -0,0 +1,108 @@
//
// controllerScriptingExamples.js
// examples
//
// Created by Sam Gondelman on 6/2/15
// Rewritten by Alessandro Signa on 11/05/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// This allows to change the input mapping to easly understand of how the new mapping works.
// Two different ways are presented: the first one uses a JSON file to create the mapping, the second one declares the new routes explicitly one at a time.
// You shuold prefer the first method if you have a lot of new routes, the second one if you want to express the action as a function.
/*
This function returns a JSON body. It's in charge to modify the standard controller and the mouse/keyboard mapping.
The Standard controller is an abstraction: all the actual controllers are mapped to it. (i.e. Hydra --mapped to-> Standard --mapped to-> Action)
This example will overwrite the mapping of the left axis (Standard.LY, Standard.LX).
It's possible to find all the standard inputs (and their mapping) into standard.json
To try these changes you need a controller, not the keyboard.
The keyboard/mouse inputs are mapped directly to actions since the keyboard doesn't have its default mapping passing through the Standard controller.
If this new mapping contains inputs which are defined in the standard mapping, these will overwrite the old ones(Keyboard.W, Keyboard.RightMouseButton).
If this new mapping contains inputs which are not defined in the standard, these will be added to the mapping(Keyboard.M).
*/
myFirstMapping = function() {
return {
"name": "controllerMapping_First",
"channels": [
{ "from": "Standard.LY", "to": "Actions.Yaw" },
{ "from": "Standard.LX", "to": "Actions.Yaw" },
{ "from": "Keyboard.W", "to": "Actions.YAW_LEFT" },
{ "from": "Keyboard.M", "to": "Actions.YAW_RIGHT" },
{ "from": "Keyboard.LeftMouseButton", "to": "Actions.Up" }
]
}
}
var firstWay = true;
var mapping;
var MAPPING_NAME;
if(firstWay){
var myFirstMappingJSON = myFirstMapping();
print('myfirstMappingJSON' + JSON.stringify(myFirstMappingJSON));
mapping = Controller.parseMapping(JSON.stringify(myFirstMappingJSON));
mapping.enable();
}else{
MAPPING_NAME = "controllerMapping_Second";
var mapping2 = Controller.newMapping(MAPPING_NAME);
mapping2.from(Controller.Hardware.Keyboard.RightMouseClicked).to(function (value) {
print("Keyboard.RightMouseClicked");
});
mapping2.from(Controller.Standard.LX).to(Controller.Actions.Yaw);
Controller.enableMapping(MAPPING_NAME);
}
/*
//-----------------some info prints that you would like to enable-----------------------
Object.keys(Controller.Standard).forEach(function (input) {
print("Controller.Standard." + input + ":" + Controller.Standard[input]);
});
Object.keys(Controller.Hardware).forEach(function (deviceName) {
Object.keys(Controller.Hardware[deviceName]).forEach(function (input) {
print("Controller.Hardware." + deviceName + "." + input + ":" + Controller.Hardware[deviceName][input]);
});
});
Object.keys(Controller.Actions).forEach(function (actionName) {
print("Controller.Actions." + actionName + ":" + Controller.Actions[actionName]);
});
*/
Controller.hardwareChanged.connect(function () {
print("hardwareChanged ---------------------------------------------------");
Object.keys(Controller.Hardware).forEach(function (deviceName) {
Object.keys(Controller.Hardware[deviceName]).forEach(function (input) {
print("Controller.Hardware." + deviceName + "." + input + ":" + Controller.Hardware[deviceName][input]);
});
});
print("-------------------------------------------------------------------");
});
Script.scriptEnding.connect(function () {
if(firstWay){
mapping.disable();
} else {
Controller.disableMapping(MAPPING_NAME);
}
});

View file

@ -1,10 +0,0 @@
var MAPPING_NAME = "com.highfidelity.rightClickExample";
var mapping = Controller.newMapping(MAPPING_NAME);
mapping.from(Controller.Hardware.Keyboard.RightMouseClicked).to(function (value) {
print("Keyboard.RightMouseClicked");
});
Controller.enableMapping(MAPPING_NAME);
Script.scriptEnding.connect(function () {
Controller.disableMapping(MAPPING_NAME);
});

View file

@ -8,6 +8,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.load("away.js");
Script.load("progress.js");
Script.load("edit.js");
Script.load("selectAudioDevice.js");

View file

@ -0,0 +1,142 @@
// dustSetSpawner.js
// examples
//
// Created by Eric Levin on 9/2/15
// Copyright 2015 High Fidelity, Inc.
//
// Spawns a set with blocks and a desert-y ground. When blocks (or anything else is thrown), dust particles will kick up at the point the object hits the ground
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
/*global print, MyAvatar, Entities, AnimationCache, SoundCache, Scene, Camera, Overlays, Audio, HMD, AvatarList, AvatarManager, Controller, UndoStack, Window, Account, GlobalServices, Script, ScriptDiscoveryService, LODManager, Menu, Vec3, Quat, AudioDevice, Paths, Clipboard, Settings, XMLHttpRequest, randFloat, randInt, pointInExtents, vec3equal, setEntityCustomData, getEntityCustomData */
map = function(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}
orientationOf = function(vector) {
var Y_AXIS = {
x: 0,
y: 1,
z: 0
};
var X_AXIS = {
x: 1,
y: 0,
z: 0
};
var theta = 0.0;
var RAD_TO_DEG = 180.0 / Math.PI;
var direction, yaw, pitch;
direction = Vec3.normalize(vector);
yaw = Quat.angleAxis(Math.atan2(direction.x, direction.z) * RAD_TO_DEG, Y_AXIS);
pitch = Quat.angleAxis(Math.asin(-direction.y) * RAD_TO_DEG, X_AXIS);
return Quat.multiply(yaw, pitch);
}
var ground, wall;
var boxes = [];
var dustSystems = [];
var ZERO_VEC = {x: 0, y: 0, z: 0};
Script.include("../libraries/utils.js");
function spawnGround() {
var groundModelURL = "https://hifi-public.s3.amazonaws.com/alan/Playa/Ground.fbx";
var groundPosition = Vec3.sum(MyAvatar.position, {x: 0, y: -2, z: 0});
ground = Entities.addEntity({
type: "Model",
modelURL: groundModelURL,
shapeType: "box",
position: groundPosition,
dimensions: {x: 900, y: 0.82, z: 900},
});
// Script.addEventHandler(ground, "collisionWithEntity", entityCollisionWithGround);
}
/*function entityCollisionWithGround(ground, entity, collision) {
var dVelocityMagnitude = Vec3.length(collision.velocityChange);
var position = Entities.getEntityProperties(entity, "position").position;
var particleRadius = map(dVelocityMagnitude, 0.05, 3, 0.5, 2);
var speed = map(dVelocityMagnitude, 0.05, 3, 0.02, 0.09);
var displayTime = 400;
var orientationChange = orientationOf(collision.velocityChange);
var dustEffect = Entities.addEntity({
type: "ParticleEffect",
name: "Dust-Puff",
position: position,
color: {red: 195, green: 170, blue: 185},
lifespan: 3,
lifetime: 7,//displayTime/1000 * 2, //So we can fade particle system out gracefully
emitRate: 5,
emitSpeed: speed,
emitAcceleration: ZERO_VEC,
accelerationSpread: ZERO_VEC,
isEmitting: true,
polarStart: Math.PI/2,
polarFinish: Math.PI/2,
emitOrientation: orientationChange,
radiusSpread: 0.1,
radiusStart: particleRadius,
radiusFinish: particleRadius + particleRadius/2,
particleRadius: particleRadius,
alpha: 0.45,
alphaFinish: 0.001,
textures: "https://hifi-public.s3.amazonaws.com/alan/Playa/Particles/Particle-Sprite-Gen.png"
});
dustSystems.push(dustEffect);
Script.setTimeout(function() {
var newRadius = 0.05;
Entities.editEntity(dustEffect, {
alpha: 0.0
});
}, displayTime);
}*/
function spawnBoxes() {
var boxModelURL = "https://hifi-public.s3.amazonaws.com/alan/Tower-Spawn/Stone-Block.fbx";
var collisionSoundURL = "https://hifi-public.s3.amazonaws.com/sounds/Collisions-otherorganic/ToyWoodBlock.L.wav";
var numBoxes = 200;
var center = Vec3.sum(MyAvatar.position, Vec3.multiply(3, Quat.getFront(Camera.getOrientation())));
for (var i = 0; i < numBoxes; i++) {
var position = Vec3.sum(center, {x: Math.random() * numBoxes, y: Math.random() * 3, z: Math.random() * numBoxes })
var box = Entities.addEntity({
type: "Model",
modelURL: boxModelURL,
collisionSoundURL: collisionSoundURL,
shapeType: "box",
position: position,
collisionsWillMove: true,
dimensions: {x: 1, y: 2, z: 3},
velocity: {x: 0, y: -.01, z: 0},
gravity: {x: 0, y: -2.5 - Math.random() * 6, z: 0}
});
boxes.push(box);
}
}
spawnGround();
spawnBoxes();
function cleanup() {
Entities.deleteEntity(ground);
boxes.forEach(function(box){
Entities.deleteEntity(box);
});
dustSystems.forEach(function(dustEffect) {
Entities.deleteEntity(dustEffect);
})
}
Script.scriptEnding.connect(cleanup);

View file

@ -13,13 +13,18 @@
Script.include("../../utilities.js");
Script.include("../../libraries/utils.js");
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/james/bubblewand/models/wand/wand.fbx';
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/james/bubblewand/models/wand/collisionHull.obj';
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/wand.fbx';
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/actual_no_top_collision_hull.obj';
var WAND_SCRIPT_URL = Script.resolvePath("wand.js");
//create the wand in front of the avatar
var center = Vec3.sum(Vec3.sum(MyAvatar.position, {x: 0, y: 0.5, z: 0}), Vec3.multiply(0.5, Quat.getFront(Camera.getOrientation())));
var center = Vec3.sum(Vec3.sum(MyAvatar.position, {
x: 0,
y: 0.5,
z: 0
}), Vec3.multiply(0.5, Quat.getFront(Camera.getOrientation())));
var wand = Entities.addEntity({
name: 'Bubble Wand',

View file

@ -17,7 +17,7 @@
Script.include("../../utilities.js");
Script.include("../../libraries/utils.js");
var BUBBLE_MODEL = "http://hifi-public.s3.amazonaws.com/james/bubblewand/models/bubble/bubble.fbx";
var BUBBLE_MODEL = "http://hifi-public.s3.amazonaws.com/models/bubblewand/bubble.fbx";
var BUBBLE_INITIAL_DIMENSIONS = {
x: 0.01,

View file

@ -112,10 +112,9 @@ function setupTimer() {
text: (0.00).toFixed(3),
backgroundColor: COLOR_OFF,
x: 0, y: 0,
width: 0,
height: 0,
alpha: 1.0,
backgroundAlpha: 1.0,
width: 0, height: 0,
leftMargin: 10, topMargin: 10,
alpha: 1.0, backgroundAlpha: 1.0,
visible: true
});

View file

@ -102,7 +102,7 @@ endif()
# link required hifi libraries
link_hifi_libraries(shared octree environment gpu gl procedural model render
fbx networking model-networking entities avatars
recording fbx networking model-networking entities avatars
audio audio-client animation script-engine physics
render-utils entities-renderer ui auto-updater
controllers plugins display-plugins input-plugins )

View file

@ -5,6 +5,7 @@
"type": "overlay",
"data": {
"alpha": 1.0,
"alphaVar": "ikOverlayAlpha",
"boneSet": "fullBody"
},
"children": [
@ -178,7 +179,7 @@
"type": "clip",
"data": {
"url": "http://hifi-public.s3.amazonaws.com/ozan/anim/hand_anims/point_right_hand.fbx",
"startFrame": 0.0,
"startFrame": 12.0,
"endFrame": 65.0,
"timeScale": 1.0,
"loopFlag": false
@ -327,7 +328,7 @@
"type": "clip",
"data": {
"url": "http://hifi-public.s3.amazonaws.com/ozan/anim/hand_anims/point_left_hand.fbx",
"startFrame": 0.0,
"startFrame": 12.0,
"endFrame": 65.0,
"timeScale": 1.0,
"loopFlag": false
@ -378,15 +379,16 @@
"states": [
{
"id": "idle",
"interpTarget": 6,
"interpDuration": 6,
"interpTarget": 15,
"interpDuration": 15,
"transitions": [
{ "var": "isMovingForward", "state": "walkFwd" },
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" }
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isAway", "state": "awayIntro" }
]
},
{
@ -399,7 +401,8 @@
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" }
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isAway", "state": "awayIntro" }
]
},
{
@ -412,7 +415,8 @@
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" }
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isAway", "state": "awayIntro" }
]
},
{
@ -425,7 +429,8 @@
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" }
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isAway", "state": "awayIntro" }
]
},
{
@ -438,7 +443,8 @@
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isTurningLeft", "state": "turnLeft" }
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isAway", "state": "awayIntro" }
]
},
{
@ -451,7 +457,8 @@
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isTurningLeft", "state": "turnLeft" }
{ "var": "isTurningLeft", "state": "turnLeft" },
{ "var": "isAway", "state": "awayIntro" }
]
},
{
@ -464,7 +471,32 @@
{ "var": "isMovingBackward", "state": "walkBwd" },
{ "var": "isMovingRight", "state": "strafeRight" },
{ "var": "isMovingLeft", "state": "strafeLeft" },
{ "var": "isTurningRight", "state": "turnRight" }
{ "var": "isTurningRight", "state": "turnRight" },
{ "var": "isAway", "state": "awayIntro" }
]
},
{
"id": "awayIntro",
"interpTarget": 30,
"interpDuration": 30,
"transitions": [
{ "var": "awayIntroOnDone", "state": "away"}
]
},
{
"id": "away",
"interpTarget": 3,
"interpDuration": 3,
"transitions": [
{ "var": "isNotAway", "state": "awayOutro" }
]
},
{
"id": "awayOutro",
"interpTarget": 3,
"interpDuration": 3,
"transitions": [
{ "var": "awayOutroOnDone", "state": "idle" }
]
}
]
@ -704,6 +736,42 @@
"children": []
}
]
},
{
"id": "awayIntro",
"type": "clip",
"data": {
"url": "https://hifi-public.s3.amazonaws.com/ozan/anim/kneel/kneel.fbx",
"startFrame": 0.0,
"endFrame": 83.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
},
{
"id": "away",
"type": "clip",
"data": {
"url": "https://hifi-public.s3.amazonaws.com/ozan/anim/kneel/kneel.fbx",
"startFrame": 83.0,
"endFrame": 84.0,
"timeScale": 1.0,
"loopFlag": true
},
"children": []
},
{
"id": "awayOutro",
"type": "clip",
"data": {
"url": "https://hifi-public.s3.amazonaws.com/ozan/anim/kneel/kneel.fbx",
"startFrame": 84.0,
"endFrame": 167.0,
"timeScale": 1.0,
"loopFlag": false
},
"children": []
}
]
}

View file

@ -1027,10 +1027,7 @@ void Application::initializeUi() {
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
QString name = inputPlugin->getName();
if (name == KeyboardMouseDevice::NAME) {
auto kbm = static_cast<KeyboardMouseDevice*>(inputPlugin.data());
// FIXME incredibly evil.... _keyboardMouseDevice is now owned by
// both a QSharedPointer and a std::shared_ptr
_keyboardMouseDevice = std::shared_ptr<KeyboardMouseDevice>(kbm);
_keyboardMouseDevice = std::dynamic_pointer_cast<KeyboardMouseDevice>(inputPlugin);
}
}
updateInputModes();
@ -4645,7 +4642,7 @@ DisplayPlugin* Application::getActiveDisplayPlugin() {
updateDisplayMode();
Q_ASSERT(_displayPlugin);
}
return _displayPlugin.data();
return _displayPlugin.get();
}
const DisplayPlugin* Application::getActiveDisplayPlugin() const {
@ -4685,10 +4682,10 @@ void Application::updateDisplayMode() {
bool first = true;
foreach(auto displayPlugin, displayPlugins) {
addDisplayPluginToMenu(displayPlugin, first);
QObject::connect(displayPlugin.data(), &DisplayPlugin::requestRender, [this] {
QObject::connect(displayPlugin.get(), &DisplayPlugin::requestRender, [this] {
paintGL();
});
QObject::connect(displayPlugin.data(), &DisplayPlugin::recommendedFramebufferSizeChanged, [this](const QSize & size) {
QObject::connect(displayPlugin.get(), &DisplayPlugin::recommendedFramebufferSizeChanged, [this](const QSize & size) {
resizeGL();
});
@ -4814,12 +4811,14 @@ void Application::updateInputModes() {
foreach(auto inputPlugin, inputPlugins) {
QString name = inputPlugin->getName();
QAction* action = menu->getActionForOption(name);
if (action->isChecked() && !_activeInputPlugins.contains(inputPlugin)) {
_activeInputPlugins.append(inputPlugin);
newInputPlugins.append(inputPlugin);
} else if (!action->isChecked() && _activeInputPlugins.contains(inputPlugin)) {
_activeInputPlugins.removeOne(inputPlugin);
removedInputPlugins.append(inputPlugin);
auto it = std::find(std::begin(_activeInputPlugins), std::end(_activeInputPlugins), inputPlugin);
if (action->isChecked() && it == std::end(_activeInputPlugins)) {
_activeInputPlugins.push_back(inputPlugin);
newInputPlugins.push_back(inputPlugin);
} else if (!action->isChecked() && it != std::end(_activeInputPlugins)) {
_activeInputPlugins.erase(it);
removedInputPlugins.push_back(inputPlugin);
}
}

View file

@ -130,8 +130,12 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
withWriteLock([&]{
if (_kinematicSetVelocity) {
if (_previousSet) {
glm::vec3 positionalVelocity = (_positionalTarget - _previousPositionalTarget) / deltaTimeStep;
// smooth velocity over 2 frames
glm::vec3 positionalDelta = _positionalTarget - _previousPositionalTarget;
glm::vec3 positionalVelocity = (positionalDelta + _previousPositionalDelta) / (deltaTimeStep + _previousDeltaTimeStep);
rigidBody->setLinearVelocity(glmToBullet(positionalVelocity));
_previousPositionalDelta = positionalDelta;
_previousDeltaTimeStep = deltaTimeStep;
}
}

View file

@ -46,6 +46,9 @@ private:
bool _previousSet { false };
glm::vec3 _previousPositionalTarget;
glm::quat _previousRotationalTarget;
float _previousDeltaTimeStep = 0.0f;
glm::vec3 _previousPositionalDelta;
};
#endif // hifi_AvatarActionHold_h

View file

@ -604,7 +604,7 @@ void MyAvatar::startRecording() {
// connect to AudioClient's signal so we get input audio
auto audioClient = DependencyManager::get<AudioClient>();
connect(audioClient.data(), &AudioClient::inputReceived, _recorder.data(),
&Recorder::recordAudio, Qt::BlockingQueuedConnection);
&Recorder::recordAudio, Qt::QueuedConnection);
_recorder->startRecording();
}

View file

@ -81,7 +81,7 @@ void AnimClip::setCurrentFrameInternal(float frame) {
// because dt is 0, we should not encounter any triggers
const float dt = 0.0f;
Triggers triggers;
_frame = ::accumulateTime(_startFrame, _endFrame, _timeScale, frame, dt, _loopFlag, _id, triggers);
_frame = ::accumulateTime(_startFrame, _endFrame, _timeScale, frame + _startFrame, dt, _loopFlag, _id, triggers);
}
void AnimClip::copyFromNetworkAnim() {

View file

@ -748,12 +748,13 @@ void AudioClient::handleAudioInput() {
_audioPacket = NLPacket::create(PacketType::MicrophoneAudioNoEcho);
}
float inputToNetworkInputRatio = calculateDeviceToNetworkInputRatio();
const float inputToNetworkInputRatio = calculateDeviceToNetworkInputRatio();
const int inputSamplesRequired = (int)((float)AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * inputToNetworkInputRatio);
const auto inputAudioSamples = std::unique_ptr<int16_t[]>(new int16_t[inputSamplesRequired]);
int inputSamplesRequired = (int)((float)AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * inputToNetworkInputRatio);
static int leadingBytes = sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
int16_t* networkAudioSamples = (int16_t*)(_audioPacket->getPayload() + leadingBytes);
static const int leadingBytes = sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
int16_t* const networkAudioSamples = (int16_t*)(_audioPacket->getPayload() + leadingBytes);
QByteArray inputByteArray = _inputDevice->readAll();
@ -802,16 +803,12 @@ void AudioClient::handleAudioInput() {
_timeSinceLastClip += (float) numNetworkSamples / (float) AudioConstants::SAMPLE_RATE;
}
int16_t* inputAudioSamples = new int16_t[inputSamplesRequired];
_inputRingBuffer.readSamples(inputAudioSamples, inputSamplesRequired);
_inputRingBuffer.readSamples(inputAudioSamples.get(), inputSamplesRequired);
possibleResampling(_inputToNetworkResampler,
inputAudioSamples, networkAudioSamples,
inputAudioSamples.get(), networkAudioSamples,
inputSamplesRequired, numNetworkSamples,
_inputFormat, _desiredInputFormat);
delete[] inputAudioSamples;
// Remove DC offset
if (!_isStereoInput && !_audioSourceInjectEnabled) {
_inputGate.removeDCOffset(networkAudioSamples, numNetworkSamples);
@ -842,8 +839,7 @@ void AudioClient::handleAudioInput() {
_lastInputLoudness = fabs(loudness / numNetworkSamples);
}
emit inputReceived(QByteArray(reinterpret_cast<const char*>(networkAudioSamples),
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL * sizeof(AudioConstants::AudioSample)));
emit inputReceived({reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes});
} else {
// our input loudness is 0, since we're muted

View file

@ -205,7 +205,7 @@ void AudioInjector::injectToMixer() {
while (_currentSendOffset < _audioData.size() && !_shouldStop) {
int bytesToCopy = std::min(((_options.stereo) ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL,
int bytesToCopy = std::min((_options.stereo ? 2 : 1) * AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL,
_audioData.size() - _currentSendOffset);
// Measure the loudness of this frame
@ -261,7 +261,7 @@ void AudioInjector::injectToMixer() {
// not the first packet and not done
// sleep for the appropriate time
int usecToSleep = (++nextFrame * AudioConstants::NETWORK_FRAME_USECS) - timer.nsecsElapsed() / 1000;
int usecToSleep = (++nextFrame * (_options.stereo ? 2 : 1) * AudioConstants::NETWORK_FRAME_USECS) - timer.nsecsElapsed() / 1000;
if (usecToSleep > 0) {
usleep(usecToSleep);

View file

@ -397,16 +397,15 @@ bool Player::computeCurrentFrame() {
}
qint64 elapsed = glm::clamp(Player::elapsed() - _audioOffset, (qint64)0, (qint64)_recording->getLength());
while(_currentFrame >= 0 &&
_recording->getFrameTimestamp(_currentFrame) > elapsed) {
--_currentFrame;
}
while (_currentFrame < _recording->getFrameNumber() &&
_recording->getFrameTimestamp(_currentFrame) < elapsed) {
++_currentFrame;
}
--_currentFrame;
while(_currentFrame > 0 &&
_recording->getFrameTimestamp(_currentFrame) > elapsed) {
--_currentFrame;
}
if (_currentFrame == _recording->getFrameNumber() - 1) {
--_currentFrame;

View file

@ -43,7 +43,6 @@ public slots:
void record();
void recordAudio(const QByteArray& audioArray);
private:
QElapsedTimer _timer;
RecordingPointer _recording;

View file

@ -69,10 +69,10 @@ const RecordingFrame& Recording::getFrame(int i) const {
int Recording::numberAudioChannel() const {
// Check for stereo audio
int MSEC_PER_SEC = 1000;
int channelLength = (getLength() / MSEC_PER_SEC) *
AudioConstants::SAMPLE_RATE * sizeof(AudioConstants::AudioSample);
return glm::round((float)channelLength / (float)getAudioData().size());
float MSEC_PER_SEC = 1000.0f;
float channelLength = ((float)getLength() / MSEC_PER_SEC) * AudioConstants::SAMPLE_RATE *
sizeof(AudioConstants::AudioSample);
return glm::round((float)getAudioData().size() / channelLength);
}
void Recording::addFrame(int timestamp, RecordingFrame &frame) {

View file

@ -206,7 +206,7 @@ namespace controller {
void ScriptingInterface::updateMaps() {
QVariantMap newHardware;
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
auto devices = userInputMapper->getDevices();
const auto& devices = userInputMapper->getDevices();
for (const auto& deviceMapping : devices) {
auto deviceID = deviceMapping.first;
if (deviceID != userInputMapper->getStandardDeviceID()) {

View file

@ -100,7 +100,7 @@ namespace controller {
void setSensorToWorldMat(glm::mat4 sensorToWorldMat) { _sensorToWorldMat = sensorToWorldMat; }
glm::mat4 getSensorToWorldMat() { return _sensorToWorldMat; }
DevicesMap getDevices() { return _registeredDevices; }
const DevicesMap& getDevices() { return _registeredDevices; }
uint16 getStandardDeviceID() const { return STANDARD_DEVICE; }
InputDevice::Pointer getStandardDevice() { return _registeredDevices[getStandardDeviceID()]; }

View file

@ -29,7 +29,7 @@ InputPluginList getInputPlugins() {
InputPluginList result;
for (int i = 0; PLUGIN_POOL[i]; ++i) {
InputPlugin * plugin = PLUGIN_POOL[i];
InputPlugin* plugin = PLUGIN_POOL[i];
if (plugin->isSupported()) {
plugin->init();
result.push_back(InputPluginPointer(plugin));

View file

@ -7,9 +7,8 @@
//
#pragma once
#include <QList>
#include <QVector>
#include <QSharedPointer>
#include <vector>
#include <memory>
class DisplayPlugin;
class InputPlugin;
@ -17,8 +16,8 @@ class Plugin;
class PluginContainer;
class PluginManager;
using DisplayPluginPointer = QSharedPointer<DisplayPlugin>;
using DisplayPluginList = QVector<DisplayPluginPointer>;
using InputPluginPointer = QSharedPointer<InputPlugin>;
using InputPluginList = QVector<InputPluginPointer>;
using DisplayPluginPointer = std::shared_ptr<DisplayPlugin>;
using DisplayPluginList = std::vector<DisplayPluginPointer>;
using InputPluginPointer = std::shared_ptr<InputPlugin>;
using InputPluginList = std::vector<InputPluginPointer>;

View file

@ -7,6 +7,8 @@
//
#pragma once
#include <QObject>
#include "Forward.h"
class PluginManager : public QObject {

View file

@ -0,0 +1,48 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Clip.h"
#include "Frame.h"
#include "impl/FileClip.h"
#include "impl/BufferClip.h"
using namespace recording;
Clip::Pointer Clip::fromFile(const QString& filePath) {
return std::make_shared<FileClip>(filePath);
}
void Clip::toFile(Clip::Pointer clip, const QString& filePath) {
// FIXME
}
Clip::Pointer Clip::duplicate() {
Clip::Pointer result = std::make_shared<BufferClip>();
float currentPosition = position();
seek(0);
Frame::Pointer frame = nextFrame();
while (frame) {
result->appendFrame(frame);
}
seek(currentPosition);
return result;
}
#if 0
Clip::Pointer Clip::fromIODevice(QIODevice * device) {
return std::make_shared<IOClip>(device);
}
void Clip::fromIODevice(Clip::Pointer clip, QIODevice * device) {
}
#endif

View file

@ -0,0 +1,48 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Recording_Clip_h
#define hifi_Recording_Clip_h
#include "Forward.h"
#include <QtCore/QObject>
class QIODevice;
namespace recording {
class Clip : public QObject {
public:
using Pointer = std::shared_ptr<Clip>;
Clip(QObject* parent = nullptr) : QObject(parent) {}
virtual ~Clip() {}
Pointer duplicate();
virtual void seek(float offset) = 0;
virtual float position() const = 0;
virtual FramePointer peekFrame() const = 0;
virtual FramePointer nextFrame() = 0;
virtual void skipFrame() = 0;
virtual void appendFrame(FramePointer) = 0;
static Pointer fromFile(const QString& filePath);
static void toFile(Pointer clip, const QString& filePath);
protected:
virtual void reset() = 0;
};
}
#endif

View file

@ -0,0 +1,9 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Deck.h"

View file

@ -0,0 +1,37 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Recording_Deck_h
#define hifi_Recording_Deck_h
#include "Forward.h"
#include <QtCore/QObject>
class QIODevice;
namespace recording {
class Deck : public QObject {
public:
using Pointer = std::shared_ptr<Deck>;
Deck(QObject* parent = nullptr) : QObject(parent) {}
virtual ~Deck();
// Place a clip on the deck for recording or playback
void queueClip(ClipPointer clip, float timeOffset = 0.0f);
void play(float timeOffset = 0.0f);
void reposition(float timeOffsetDelta);
void setPlaybackSpeed(float rate);
};
}
#endif

View file

@ -15,14 +15,28 @@
namespace recording {
using FrameType = uint16_t;
struct Frame;
using FramePointer = std::shared_ptr<Frame>;
// A recording of some set of state from the application, usually avatar
// data + audio for a single person
class Clip;
// An interface for interacting with clips, creating them by recording or
// playing them back. Also serialization to and from files / network sources
using ClipPointer = std::shared_ptr<Clip>;
// An interface for playing back clips
class Deck;
using DeckPointer = std::shared_ptr<Deck>;
// An interface for recording a single clip
class Recorder;
using RecorderPointer = std::shared_ptr<Recorder>;
}
#endif

View file

@ -0,0 +1,103 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Frame.h"
#include <mutex>
#include <QtCore/QMap>
using namespace recording;
// FIXME move to shared
template <typename Key, typename Value>
class Registry {
public:
using ForwardMap = QMap<Value, Key>;
using BackMap = QMap<Key, Value>;
static const Key INVALID_KEY = static_cast<Key>(-1);
Key registerValue(const Value& value) {
Locker lock(_mutex);
Key result = INVALID_KEY;
if (_forwardMap.contains(value)) {
result = _forwardMap[value];
} else {
_forwardMap[value] = result = _nextKey++;
_backMap[result] = value;
}
return result;
}
Key getKey(const Value& value) {
Locker lock(_mutex);
Key result = INVALID_KEY;
if (_forwardMap.contains(value)) {
result = _forwardMap[value];
}
return result;
}
ForwardMap getKeysByValue() {
Locker lock(_mutex);
ForwardMap result = _forwardMap;
return result;
}
BackMap getValuesByKey() {
Locker lock(_mutex);
BackMap result = _backMap;
return result;
}
private:
using Mutex = std::mutex;
using Locker = std::unique_lock<Mutex>;
Mutex _mutex;
ForwardMap _forwardMap;
BackMap _backMap;
Key _nextKey { 0 };
};
static Registry<FrameType, QString> frameTypes;
static QMap<FrameType, Frame::Handler> handlerMap;
using Mutex = std::mutex;
using Locker = std::unique_lock<Mutex>;
static Mutex mutex;
static std::once_flag once;
FrameType Frame::registerFrameType(const QString& frameTypeName) {
Locker lock(mutex);
std::call_once(once, [&] {
auto headerType = frameTypes.registerValue("com.highfidelity.recording.Header");
Q_ASSERT(headerType == Frame::TYPE_HEADER);
});
return frameTypes.registerValue(frameTypeName);
}
QMap<QString, FrameType> Frame::getFrameTypes() {
return frameTypes.getKeysByValue();
}
QMap<FrameType, QString> Frame::getFrameTypeNames() {
return frameTypes.getValuesByKey();
}
Frame::Handler Frame::registerFrameHandler(FrameType type, Handler handler) {
Locker lock(mutex);
Handler result;
if (handlerMap.contains(type)) {
result = handlerMap[type];
}
handlerMap[type] = handler;
return result;
}

View file

@ -0,0 +1,40 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Recording_Frame_h
#define hifi_Recording_Frame_h
#include "Forward.h"
#include <functional>
#include <QtCore/QObject>
namespace recording {
struct Frame {
public:
using Pointer = std::shared_ptr<Frame>;
using Handler = std::function<void(Frame::Pointer frame)>;
static const FrameType TYPE_INVALID = 0xFFFF;
static const FrameType TYPE_HEADER = 0x0;
FrameType type { TYPE_INVALID };
float timeOffset { 0 };
QByteArray data;
static FrameType registerFrameType(const QString& frameTypeName);
static QMap<QString, FrameType> getFrameTypes();
static QMap<FrameType, QString> getFrameTypeNames();
static Handler registerFrameHandler(FrameType type, Handler handler);
};
}
#endif

View file

@ -0,0 +1,62 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Recorder.h"
#include <NumericalConstants.h>
#include "impl/BufferClip.h"
#include "Frame.h"
using namespace recording;
void Recorder::start() {
if (!_recording) {
_recording = true;
if (!_clip) {
_clip = std::make_shared<BufferClip>();
}
_timer.start();
emit recordingStateChanged();
}
}
void Recorder::stop() {
if (!_recording) {
_recording = false;
_elapsed = _timer.elapsed();
emit recordingStateChanged();
}
}
bool Recorder::isRecording() {
return _recording;
}
void Recorder::clear() {
_clip.reset();
}
void Recorder::recordFrame(FrameType type, QByteArray frameData) {
if (!_recording || !_clip) {
return;
}
Frame::Pointer frame = std::make_shared<Frame>();
frame->type = type;
frame->data = frameData;
frame->timeOffset = (float)(_elapsed + _timer.elapsed()) / MSECS_PER_SECOND;
_clip->appendFrame(frame);
}
ClipPointer Recorder::getClip() {
auto result = _clip;
_clip.reset();
return result;
}

View file

@ -0,0 +1,55 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Recording_Recorder_h
#define hifi_Recording_Recorder_h
#include "Forward.h"
#include <QtCore/QObject>
#include <QtCore/QElapsedTimer>
namespace recording {
// An interface for interacting with clips, creating them by recording or
// playing them back. Also serialization to and from files / network sources
class Recorder : public QObject {
public:
using Pointer = std::shared_ptr<Recorder>;
Recorder(QObject* parent = nullptr) : QObject(parent) {}
virtual ~Recorder();
// Start recording frames
void start();
// Stop recording
void stop();
// Test if recording is active
bool isRecording();
// Erase the currently recorded content
void clear();
void recordFrame(FrameType type, QByteArray frameData);
// Return the currently recorded content
ClipPointer getClip();
signals:
void recordingStateChanged();
private:
QElapsedTimer _timer;
ClipPointer _clip;
quint64 _elapsed;
bool _recording { false };
};
}
#endif

View file

@ -0,0 +1,74 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "BufferClip.h"
#include "../Frame.h"
using namespace recording;
void BufferClip::seek(float offset) {
Locker lock(_mutex);
auto itr = std::lower_bound(_frames.begin(), _frames.end(), offset,
[](Frame::Pointer a, float b)->bool{
return a->timeOffset < b;
}
);
_frameIndex = itr - _frames.begin();
}
float BufferClip::position() const {
Locker lock(_mutex);
float result = std::numeric_limits<float>::max();
if (_frameIndex < _frames.size()) {
result = _frames[_frameIndex]->timeOffset;
}
return result;
}
FramePointer BufferClip::peekFrame() const {
Locker lock(_mutex);
FramePointer result;
if (_frameIndex < _frames.size()) {
result = _frames[_frameIndex];
}
return result;
}
FramePointer BufferClip::nextFrame() {
Locker lock(_mutex);
FramePointer result;
if (_frameIndex < _frames.size()) {
result = _frames[_frameIndex];
++_frameIndex;
}
return result;
}
void BufferClip::appendFrame(FramePointer newFrame) {
auto currentPosition = position();
seek(newFrame->timeOffset);
{
Locker lock(_mutex);
_frames.insert(_frames.begin() + _frameIndex, newFrame);
}
seek(currentPosition);
}
void BufferClip::skipFrame() {
Locker lock(_mutex);
if (_frameIndex < _frames.size()) {
++_frameIndex;
}
}
void BufferClip::reset() {
Locker lock(_mutex);
_frameIndex = 0;
}

View file

@ -0,0 +1,47 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Recording_Impl_BufferClip_h
#define hifi_Recording_Impl_BufferClip_h
#include "../Clip.h"
#include <mutex>
namespace recording {
class BufferClip : public Clip {
public:
using Pointer = std::shared_ptr<BufferClip>;
BufferClip(QObject* parent = nullptr) : Clip(parent) {}
virtual ~BufferClip() {}
virtual void seek(float offset) override;
virtual float position() const override;
virtual FramePointer peekFrame() const override;
virtual FramePointer nextFrame() override;
virtual void skipFrame() override;
virtual void appendFrame(FramePointer) override;
private:
using Mutex = std::mutex;
using Locker = std::unique_lock<Mutex>;
virtual void reset() override;
std::vector<FramePointer> _frames;
mutable Mutex _mutex;
mutable size_t _frameIndex { 0 };
};
}
#endif

View file

@ -0,0 +1,105 @@
//
// Created by Bradley Austin Davis 2015/11/04
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "FileClip.h"
#include "../Frame.h"
#include <algorithm>
using namespace recording;
static const qint64 MINIMUM_FRAME_SIZE = sizeof(FrameType) + sizeof(float) + sizeof(uint16_t) + 1;
FileClip::FileClip(const QString& fileName, QObject* parent) : Clip(parent), _file(fileName) {
auto size = _file.size();
_map = _file.map(0, size, QFile::MapPrivateOption);
auto current = _map;
auto end = current + size;
// Read all the frame headers
while (end - current < MINIMUM_FRAME_SIZE) {
FrameHeader header;
memcpy(&(header.type), current, sizeof(FrameType));
current += sizeof(FrameType);
memcpy(&(header.timeOffset), current, sizeof(FrameType));
current += sizeof(float);
memcpy(&(header.size), current, sizeof(uint16_t));
current += sizeof(uint16_t);
header.fileOffset = current - _map;
if (end - current < header.size) {
break;
}
_frameHeaders.push_back(header);
}
}
FileClip::~FileClip() {
Locker lock(_mutex);
_file.unmap(_map);
_map = nullptr;
}
void FileClip::seek(float offset) {
Locker lock(_mutex);
auto itr = std::lower_bound(_frameHeaders.begin(), _frameHeaders.end(), offset,
[](const FrameHeader& a, float b)->bool {
return a.timeOffset < b;
}
);
_frameIndex = itr - _frameHeaders.begin();
}
float FileClip::position() const {
Locker lock(_mutex);
float result = std::numeric_limits<float>::max();
if (_frameIndex < _frameHeaders.size()) {
result = _frameHeaders[_frameIndex].timeOffset;
}
return result;
}
FramePointer FileClip::readFrame(uint32_t frameIndex) const {
FramePointer result;
if (frameIndex < _frameHeaders.size()) {
result = std::make_shared<Frame>();
const FrameHeader& header = _frameHeaders[frameIndex];
result->type = header.type;
result->timeOffset = header.timeOffset;
result->data.insert(0, reinterpret_cast<char*>(_map)+header.fileOffset, header.size);
}
return result;
}
FramePointer FileClip::peekFrame() const {
Locker lock(_mutex);
return readFrame(_frameIndex);
}
FramePointer FileClip::nextFrame() {
Locker lock(_mutex);
auto result = readFrame(_frameIndex);
if (_frameIndex < _frameHeaders.size()) {
++_frameIndex;
}
return result;
}
void FileClip::skipFrame() {
++_frameIndex;
}
void FileClip::reset() {
_frameIndex = 0;
}
void FileClip::appendFrame(FramePointer) {
throw std::runtime_error("File clips are read only");
}

View file

@ -0,0 +1,62 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Recording_Impl_FileClip_h
#define hifi_Recording_Impl_FileClip_h
#include "../Clip.h"
#include <QtCore/QFile>
#include <mutex>
namespace recording {
class FileClip : public Clip {
public:
using Pointer = std::shared_ptr<FileClip>;
FileClip(const QString& file, QObject* parent = nullptr);
virtual ~FileClip();
virtual void seek(float offset) override;
virtual float position() const override;
virtual FramePointer peekFrame() const override;
virtual FramePointer nextFrame() override;
virtual void appendFrame(FramePointer) override;
virtual void skipFrame() override;
private:
using Mutex = std::mutex;
using Locker = std::unique_lock<Mutex>;
virtual void reset() override;
struct FrameHeader {
FrameType type;
float timeOffset;
uint16_t size;
quint64 fileOffset;
};
using FrameHeaders = std::vector<FrameHeader>;
FramePointer readFrame(uint32_t frameIndex) const;
mutable Mutex _mutex;
QFile _file;
uint32_t _frameIndex { 0 };
uchar* _map;
FrameHeaders _frameHeaders;
};
}
#endif

View file

@ -646,13 +646,14 @@ QScriptValue ScriptEngine::evaluate(const QString& sourceCode, const QString& fi
}
void ScriptEngine::run() {
// TODO: can we add a short circuit for _stoppingAllScripts here? What does it mean to not start running if
// we're in the process of stopping?
if (_stoppingAllScripts) {
return; // bail early - avoid setting state in init(), as evaluate() will bail too
}
if (!_isInitialized) {
init();
}
_isRunning = true;
_isFinished = false;
if (_wantSignals) {

View file

@ -132,8 +132,7 @@ int main(int argc, char** argv) {
inputPlugin->activate();
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
if (name == KeyboardMouseDevice::NAME) {
auto keyboardMouseDevice = static_cast<KeyboardMouseDevice*>(inputPlugin.data()); // TODO: this seems super hacky
userInputMapper->registerDevice(std::shared_ptr<InputDevice>(keyboardMouseDevice));
userInputMapper->registerDevice(std::dynamic_pointer_cast<KeyboardMouseDevice>(inputPlugin));
}
inputPlugin->pluginUpdate(0, false);
}

View file

@ -0,0 +1,10 @@
# Declare dependencies
macro (setup_testcase_dependencies)
# link in the shared libraries
link_hifi_libraries(shared recording)
copy_dlls_beside_windows_executable()
endmacro ()
setup_hifi_testcase()

View file

@ -0,0 +1,19 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_Constants_h
#define hifi_Constants_h
static const QString HEADER_NAME = "com.highfidelity.recording.Header";
static const QString TEST_NAME = "com.highfidelity.recording.Test";
#endif // hifi_FrameTests_h

View file

@ -0,0 +1,29 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "FrameTests.h"
#include "Constants.h"
#include "../QTestExtensions.h"
#include <recording/Frame.h>
QTEST_MAIN(FrameTests)
void FrameTests::registerFrameTypeTest() {
auto result = recording::Frame::registerFrameType(TEST_NAME);
QCOMPARE(result, (recording::FrameType)1);
auto forwardMap = recording::Frame::getFrameTypes();
QCOMPARE(forwardMap.count(TEST_NAME), 1);
QCOMPARE(forwardMap[TEST_NAME], result);
QCOMPARE(forwardMap[HEADER_NAME], recording::Frame::TYPE_HEADER);
auto backMap = recording::Frame::getFrameTypeNames();
QCOMPARE(backMap.count(result), 1);
QCOMPARE(backMap[result], TEST_NAME);
QCOMPARE(backMap[recording::Frame::TYPE_HEADER], HEADER_NAME);
}

View file

@ -0,0 +1,21 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_FrameTests_h
#define hifi_FrameTests_h
#include <QtTest/QtTest>
class FrameTests : public QObject {
Q_OBJECT
private slots:
void registerFrameTypeTest();
};
#endif // hifi_FrameTests_h

View file

@ -0,0 +1,25 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "RecorderTests.h"
#include "Constants.h"
#include "../QTestExtensions.h"
#include <recording/Recorder.h>
QTEST_MAIN(RecorderTests)
void RecorderTests::recorderTest() {
//auto recorder = std::make_shared<recording::Recorder>();
//QCOMPARE(recoreder.isRecording(), false);
//recorder.start();
//QCOMPARE(recoreder.isRecording(), true);
//recorder.stop();
//QCOMPARE(recoreder.isRecording(), false);
}

View file

@ -0,0 +1,21 @@
//
// Created by Bradley Austin Davis 2015/11/05
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_RecorderTests_h
#define hifi_RecorderTests_h
#include <QtTest/QtTest>
class RecorderTests : public QObject {
Q_OBJECT
private slots:
void recorderTest();
};
#endif

View file

@ -927,9 +927,8 @@
}
function createWand(position) {
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/james/bubblewand/models/wand/wand.fbx';
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/james/bubblewand/models/wand/actual_no_top_collision_hull.obj';
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/wand.fbx';
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/actual_no_top_collision_hull.obj';
var entity = Entities.addEntity({
name: 'Bubble Wand',
type: "Model",

View file

@ -909,8 +909,8 @@ MasterReset = function() {
}
function createWand(position) {
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/james/bubblewand/models/wand/wand.fbx';
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/james/bubblewand/models/wand/actual_no_top_collision_hull.obj';
var WAND_MODEL = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/wand.fbx';
var WAND_COLLISION_SHAPE = 'http://hifi-public.s3.amazonaws.com/models/bubblewand/actual_no_top_collision_hull.obj';
var entity = Entities.addEntity({
name: 'Bubble Wand',