merge upstream/master into andrew/ragdoll

This commit is contained in:
Andrew Meadows 2014-08-21 08:21:36 -07:00
commit bc1276944c
23 changed files with 1320 additions and 100 deletions

View file

@ -611,7 +611,10 @@ var modelUploader = (function () {
index += nameLength;
if (name === "content" && previousNodeFilename !== "") {
geometry.embedded.push(previousNodeFilename);
// Blender 2.71 exporter "embeds" external textures as empty binary blobs so ignore these
if (propertyListLength > 5) {
geometry.embedded.push(previousNodeFilename);
}
}
if (name === "relativefilename") {

443
examples/frisbee.js Normal file
View file

@ -0,0 +1,443 @@
//
// frisbee.js
// examples
//
// Created by Thijs Wenker on 7/5/14.
// Copyright 2014 High Fidelity, Inc.
//
// Requirements: Razer Hydra's
//
// Fun game to throw frisbee's to eachother. Hold the trigger on any of the hydra's to create or catch a frisbee.
//
// Tip: use this together with the squeezeHands.js script to make it look nicer.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("toolBars.js");
const LEFT_PALM = 0;
const LEFT_TIP = 1;
const LEFT_BUTTON_FWD = 5;
const LEFT_BUTTON_3 = 3;
const RIGHT_PALM = 2;
const RIGHT_TIP = 3;
const RIGHT_BUTTON_FWD = 11;
const RIGHT_BUTTON_3 = 9;
const FRISBEE_RADIUS = 0.08;
const GRAVITY_STRENGTH = 0.5;
const CATCH_RADIUS = 0.5;
const MIN_SIMULATION_SPEED = 0.15;
const THROWN_VELOCITY_SCALING = 1.5;
const SOUNDS_ENABLED = true;
const FRISBEE_BUTTON_URL = "http://test.thoys.nl/hifi/images/frisbee/frisbee_button_by_Judas.svg";
const FRISBEE_MODEL_SCALE = 275;
const FRISBEE_MENU = "Toys>Frisbee";
const FRISBEE_DESIGN_MENU = "Toys>Frisbee>Design";
const FRISBEE_ENABLED_SETTING = "Frisbee>Enabled";
const FRISBEE_CREATENEW_SETTING = "Frisbee>CreateNew";
const FRISBEE_DESIGN_SETTING = "Frisbee>Design";
const FRISBEE_FORCE_MOUSE_CONTROLS_SETTING = "Frisbee>ForceMouseControls";
//Add your own designs in FRISBEE_DESIGNS, be sure to put frisbee in the URL if you want others to be able to catch it without having a copy of your frisbee script.
const FRISBEE_DESIGNS = [
{"name":"Interface", "model":"http://test.thoys.nl/hifi/models/frisbee/frisbee.fbx"},
{"name":"Pizza", "model":"http://test.thoys.nl/hifi/models/frisbee/pizza.fbx"},
{"name":"Swirl", "model":"http://test.thoys.nl/hifi/models/frisbee/swirl.fbx"},
{"name":"Mayan", "model":"http://test.thoys.nl/hifi/models/frisbee/mayan.fbx"},
];
const FRISBEE_MENU_DESIGN_POSTFIX = " Design";
const FRISBEE_DESIGN_RANDOM = "Random";
const SPIN_MULTIPLIER = 1000;
const FRISBEE_LIFETIME = 300; // 5 minutes
var windowDimensions = Controller.getViewportDimensions();
var toolHeight = 50;
var toolWidth = 50;
var frisbeeToggle;
var toolBar;
var frisbeeEnabled = true;
var newfrisbeeEnabled = false;
var forceMouseControls = false;
var hydrasConnected = false;
var selectedDesign = FRISBEE_DESIGN_RANDOM;
function loadSettings() {
frisbeeEnabled = Settings.getValue(FRISBEE_ENABLED_SETTING, "true") == "true";
newfrisbeeEnabled = Settings.getValue(FRISBEE_CREATENEW_SETTING, "false") == "true";
forceMouseControls = Settings.getValue(FRISBEE_FORCE_MOUSE_CONTROLS_SETTING, "false") == "true";
selectedDesign = Settings.getValue(FRISBEE_DESIGN_SETTING, "Random");
}
function saveSettings() {
Settings.setValue(FRISBEE_ENABLED_SETTING, frisbeeEnabled ? "true" : "false");
Settings.setValue(FRISBEE_CREATENEW_SETTING, newfrisbeeEnabled ? "true" : "false");
Settings.setValue(FRISBEE_FORCE_MOUSE_CONTROLS_SETTING, forceMouseControls ? "true" : "false");
Settings.setValue(FRISBEE_DESIGN_SETTING, selectedDesign);
}
function moveOverlays() {
var newViewPort = Controller.getViewportDimensions();
if (typeof(toolBar) === 'undefined') {
initToolBar();
} else if (windowDimensions.x == newViewPort.x &&
windowDimensions.y == newViewPort.y) {
return;
}
windowDimensions = newViewPort;
var toolsX = windowDimensions.x - 8 - toolBar.width;
var toolsY = (windowDimensions.y - toolBar.height) / 2 + 80;
toolBar.move(toolsX, toolsY);
}
function frisbeeURL() {
return selectedDesign == FRISBEE_DESIGN_RANDOM ? FRISBEE_DESIGNS[Math.floor(Math.random() * FRISBEE_DESIGNS.length)].model : getFrisbee(selectedDesign).model;
}
//This function checks if the modelURL is inside of our Designs or contains "frisbee" in it.
function validFrisbeeURL(frisbeeURL) {
for (var frisbee in FRISBEE_DESIGNS) {
if (FRISBEE_DESIGNS[frisbee].model == frisbeeURL) {
return true;
}
}
return frisbeeURL.toLowerCase().indexOf("frisbee") !== -1;
}
function getFrisbee(frisbeeName) {
for (var frisbee in FRISBEE_DESIGNS) {
if (FRISBEE_DESIGNS[frisbee].name == frisbeeName) {
return FRISBEE_DESIGNS[frisbee];
}
}
return undefined;
}
function Hand(name, palm, tip, forwardButton, button3, trigger) {
this.name = name;
this.palm = palm;
this.tip = tip;
this.forwardButton = forwardButton;
this.button3 = button3;
this.trigger = trigger;
this.holdingFrisbee = false;
this.particle = false;
this.palmPosition = function() { return Controller.getSpatialControlPosition(this.palm); }
this.grabButtonPressed = function() {
return (
Controller.isButtonPressed(this.forwardButton) ||
Controller.isButtonPressed(this.button3) ||
Controller.getTriggerValue(this.trigger) > 0.5
)
};
this.holdPosition = function() { return this.palm == LEFT_PALM ? MyAvatar.getLeftPalmPosition() : MyAvatar.getRightPalmPosition(); };
this.holdRotation = function() {
var q = Controller.getSpatialControlRawRotation(this.palm);
q = Quat.multiply(MyAvatar.orientation, q);
return {x: q.x, y: q.y, z: q.z, w: q.w};
};
this.tipVelocity = function() { return Controller.getSpatialControlVelocity(this.tip); };
}
function MouseControl(button) {
this.button = button;
}
var leftHand = new Hand("LEFT", LEFT_PALM, LEFT_TIP, LEFT_BUTTON_FWD, LEFT_BUTTON_3, 0);
var rightHand = new Hand("RIGHT", RIGHT_PALM, RIGHT_TIP, RIGHT_BUTTON_FWD, RIGHT_BUTTON_3, 1);
var leftMouseControl = new MouseControl("LEFT");
var middleMouseControl = new MouseControl("MIDDLE");
var rightMouseControl = new MouseControl("RIGHT");
var mouseControls = [leftMouseControl, middleMouseControl, rightMouseControl];
var currentMouseControl = false;
var newSound = new Sound("https://dl.dropboxusercontent.com/u/1864924/hifi-sounds/throw.raw");
var catchSound = new Sound("https://dl.dropboxusercontent.com/u/1864924/hifi-sounds/catch.raw");
var throwSound = new Sound("http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/Switches%20and%20sliders/slider%20-%20whoosh1.raw");
var simulatedFrisbees = [];
var wantDebugging = false;
function debugPrint(message) {
if (wantDebugging) {
print(message);
}
}
function playSound(sound, position) {
if (!SOUNDS_ENABLED) {
return;
}
var options = new AudioInjectionOptions();
options.position = position;
options.volume = 1.0;
Audio.playSound(sound, options);
}
function cleanupFrisbees() {
simulatedFrisbees = [];
var particles = Particles.findParticles(MyAvatar.position, 1000);
for (particle in particles) {
Particles.deleteParticle(particles[particle]);
}
}
function checkControllerSide(hand) {
// If I don't currently have a frisbee in my hand, then try to catch closest one
if (!hand.holdingFrisbee && hand.grabButtonPressed()) {
var closestParticle = Particles.findClosestParticle(hand.palmPosition(), CATCH_RADIUS);
var modelUrl = Particles.getParticleProperties(closestParticle).modelURL;
if (closestParticle.isKnownID && validFrisbeeURL(Particles.getParticleProperties(closestParticle).modelURL)) {
Particles.editParticle(closestParticle, {modelScale: 1, inHand: true, position: hand.holdPosition(), shouldDie: true});
Particles.deleteParticle(closestParticle);
debugPrint(hand.message + " HAND- CAUGHT SOMETHING!!");
var properties = {
position: hand.holdPosition(),
velocity: { x: 0, y: 0, z: 0},
gravity: { x: 0, y: 0, z: 0},
inHand: true,
radius: FRISBEE_RADIUS,
damping: 0.999,
modelURL: modelUrl,
modelScale: FRISBEE_MODEL_SCALE,
modelRotation: hand.holdRotation(),
lifetime: FRISBEE_LIFETIME
};
newParticle = Particles.addParticle(properties);
hand.holdingFrisbee = true;
hand.particle = newParticle;
playSound(catchSound, hand.holdPosition());
return; // exit early
}
}
// If '3' is pressed, and not holding a frisbee, make a new one
if (hand.grabButtonPressed() && !hand.holdingFrisbee && newfrisbeeEnabled) {
var properties = {
position: hand.holdPosition(),
velocity: { x: 0, y: 0, z: 0},
gravity: { x: 0, y: 0, z: 0},
inHand: true,
radius: FRISBEE_RADIUS,
damping: 0.999,
modelURL: frisbeeURL(),
modelScale: FRISBEE_MODEL_SCALE,
modelRotation: hand.holdRotation(),
lifetime: FRISBEE_LIFETIME
};
newParticle = Particles.addParticle(properties);
hand.holdingFrisbee = true;
hand.particle = newParticle;
// Play a new frisbee sound
playSound(newSound, hand.holdPosition());
return; // exit early
}
if (hand.holdingFrisbee) {
// If holding the frisbee keep it in the palm
if (hand.grabButtonPressed()) {
debugPrint(">>>>> " + hand.name + "-FRISBEE IN HAND, grabbing, hold and move");
var properties = {
position: hand.holdPosition(),
modelRotation: hand.holdRotation()
};
Particles.editParticle(hand.particle, properties);
} else {
debugPrint(">>>>> " + hand.name + "-FRISBEE IN HAND, not grabbing, THROW!!!");
// If frisbee just released, add velocity to it!
var properties = {
velocity: Vec3.multiply(hand.tipVelocity(), THROWN_VELOCITY_SCALING),
inHand: false,
lifetime: FRISBEE_LIFETIME,
gravity: { x: 0, y: -GRAVITY_STRENGTH, z: 0},
modelRotation: hand.holdRotation()
};
Particles.editParticle(hand.particle, properties);
simulatedFrisbees.push(hand.particle);
hand.holdingFrisbee = false;
hand.particle = false;
playSound(throwSound, hand.holdPosition());
}
}
}
function initToolBar() {
toolBar = new ToolBar(0, 0, ToolBar.VERTICAL);
frisbeeToggle = toolBar.addTool({
imageURL: FRISBEE_BUTTON_URL,
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
width: toolWidth,
height: toolHeight,
visible: true,
alpha: 0.9
}, true);
enableNewFrisbee(newfrisbeeEnabled);
}
function hydraCheck() {
var numberOfButtons = Controller.getNumberOfButtons();
var numberOfTriggers = Controller.getNumberOfTriggers();
var numberOfSpatialControls = Controller.getNumberOfSpatialControls();
var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers;
hydrasConnected = (numberOfButtons == 12 && numberOfTriggers == 2 && controllersPerTrigger == 2);
return hydrasConnected;
}
function checkController(deltaTime) {
moveOverlays();
if (!frisbeeEnabled) {
return;
}
// this is expected for hydras
if (hydraCheck()) {
checkControllerSide(leftHand);
checkControllerSide(rightHand);
}
if (!hydrasConnected || forceMouseControls) {
//TODO: add mouse cursor control code here.
}
}
function controlFrisbees(deltaTime) {
var killSimulations = [];
for (frisbee in simulatedFrisbees) {
var properties = Particles.getParticleProperties(simulatedFrisbees[frisbee]);
//get the horizon length from the velocity origin in order to get speed
var speed = Vec3.length({x:properties.velocity.x, y:0, z:properties.velocity.z});
if (speed < MIN_SIMULATION_SPEED) {
//kill the frisbee simulation when speed is low
killSimulations.push(frisbee);
continue;
}
Particles.editParticle(simulatedFrisbees[frisbee], {modelRotation: Quat.multiply(properties.modelRotation, Quat.fromPitchYawRollDegrees(0, speed * deltaTime * SPIN_MULTIPLIER, 0))});
}
for (var i = killSimulations.length - 1; i >= 0; i--) {
simulatedFrisbees.splice(killSimulations[i], 1);
}
}
//catches interfering calls of hydra-cursors
function withinBounds(coords) {
return coords.x >= 0 && coords.x < windowDimensions.x && coords.y >= 0 && coords.y < windowDimensions.y;
}
function mouseMoveEvent(event) {
//TODO: mouse controls //print(withinBounds(event)); //print("move"+event.x);
}
function mousePressEvent(event) {
print(event.x);
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
if (frisbeeToggle == toolBar.clicked(clickedOverlay)) {
newfrisbeeEnabled = !newfrisbeeEnabled;
saveSettings();
enableNewFrisbee(newfrisbeeEnabled);
}
}
function enableNewFrisbee(enable) {
if (toolBar.numberOfTools() > 0) {
toolBar.tools[0].select(enable);
}
}
function mouseReleaseEvent(event) {
//TODO: mouse controls //print(JSON.stringify(event));
}
function setupMenus() {
Menu.addMenu(FRISBEE_MENU);
Menu.addMenuItem({
menuName: FRISBEE_MENU,
menuItemName: "Frisbee Enabled",
isCheckable: true,
isChecked: frisbeeEnabled
});
Menu.addMenuItem({
menuName: FRISBEE_MENU,
menuItemName: "Cleanup Frisbees"
});
Menu.addMenuItem({
menuName: FRISBEE_MENU,
menuItemName: "Force Mouse Controls",
isCheckable: true,
isChecked: forceMouseControls
});
Menu.addMenu(FRISBEE_DESIGN_MENU);
Menu.addMenuItem({
menuName: FRISBEE_DESIGN_MENU,
menuItemName: FRISBEE_DESIGN_RANDOM + FRISBEE_MENU_DESIGN_POSTFIX,
isCheckable: true,
isChecked: selectedDesign == FRISBEE_DESIGN_RANDOM
});
for (frisbee in FRISBEE_DESIGNS) {
Menu.addMenuItem({
menuName: FRISBEE_DESIGN_MENU,
menuItemName: FRISBEE_DESIGNS[frisbee].name + FRISBEE_MENU_DESIGN_POSTFIX,
isCheckable: true,
isChecked: selectedDesign == FRISBEE_DESIGNS[frisbee].name
});
}
}
//startup calls:
loadSettings();
setupMenus();
function scriptEnding() {
toolBar.cleanup();
Menu.removeMenu(FRISBEE_MENU);
}
function menuItemEvent(menuItem) {
if (menuItem == "Cleanup Frisbees") {
cleanupFrisbees();
return;
} else if (menuItem == "Frisbee Enabled") {
frisbeeEnabled = Menu.isOptionChecked(menuItem);
saveSettings();
return;
} else if (menuItem == "Force Mouse Controls") {
forceMouseControls = Menu.isOptionChecked(menuItem);
saveSettings();
return;
}
if (menuItem.indexOf(FRISBEE_MENU_DESIGN_POSTFIX, menuItem.length - FRISBEE_MENU_DESIGN_POSTFIX.length) !== -1) {
var item_name = menuItem.substring(0, menuItem.length - FRISBEE_MENU_DESIGN_POSTFIX.length);
if (item_name == FRISBEE_DESIGN_RANDOM || getFrisbee(item_name) != undefined) {
Menu.setIsOptionChecked(selectedDesign + FRISBEE_MENU_DESIGN_POSTFIX, false);
selectedDesign = item_name;
saveSettings();
Menu.setIsOptionChecked(selectedDesign + FRISBEE_MENU_DESIGN_POSTFIX, true);
}
}
}
// register the call back so it fires before each data send
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Menu.menuItemEvent.connect(menuItemEvent);
Script.scriptEnding.connect(scriptEnding);
Script.update.connect(checkController);
Script.update.connect(controlFrisbees);

View file

@ -21,10 +21,10 @@ var position = { x: MyAvatar.position.x, y: MyAvatar.position.y, z: MyAvatar.pos
var joysticksCaptured = false;
var THRUST_CONTROLLER = 0;
var VIEW_CONTROLLER = 1;
var INITIAL_THRUST_MULTPLIER = 1.0;
var INITIAL_THRUST_MULTIPLIER = 1.0;
var THRUST_INCREASE_RATE = 1.05;
var MAX_THRUST_MULTIPLIER = 75.0;
var thrustMultiplier = INITIAL_THRUST_MULTPLIER;
var thrustMultiplier = INITIAL_THRUST_MULTIPLIER;
var grabDelta = { x: 0, y: 0, z: 0};
var grabStartPosition = { x: 0, y: 0, z: 0};
var grabDeltaVelocity = { x: 0, y: 0, z: 0};
@ -34,6 +34,8 @@ var grabbingWithRightHand = false;
var wasGrabbingWithRightHand = false;
var grabbingWithLeftHand = false;
var wasGrabbingWithLeftHand = false;
var movingWithHead = false;
var headStartPosition, headStartDeltaPitch, headStartFinalPitch, headStartRoll, headStartYaw;
var EPSILON = 0.000001;
var velocity = { x: 0, y: 0, z: 0};
var THRUST_MAG_UP = 100.0;
@ -241,6 +243,47 @@ function handleGrabBehavior(deltaTime) {
wasGrabbingWithLeftHand = grabbingWithLeftHand;
}
var HEAD_MOVE_DEAD_ZONE = 0.0;
var HEAD_STRAFE_DEAD_ZONE = 0.0;
var HEAD_ROTATE_DEAD_ZONE = 0.0;
var HEAD_THRUST_FWD_SCALE = 12000.0;
var HEAD_THRUST_STRAFE_SCALE = 1000.0;
var HEAD_YAW_RATE = 2.0;
var HEAD_PITCH_RATE = 1.0;
var HEAD_ROLL_THRUST_SCALE = 75.0;
var HEAD_PITCH_LIFT_THRUST = 3.0;
function moveWithHead(deltaTime) {
if (movingWithHead) {
var deltaYaw = MyAvatar.getHeadFinalYaw() - headStartYaw;
var deltaPitch = MyAvatar.getHeadDeltaPitch() - headStartDeltaPitch;
var bodyLocalCurrentHeadVector = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position);
bodyLocalCurrentHeadVector = Vec3.multiplyQbyV(Quat.angleAxis(-deltaYaw, {x:0, y: 1, z:0}), bodyLocalCurrentHeadVector);
var headDelta = Vec3.subtract(bodyLocalCurrentHeadVector, headStartPosition);
headDelta = Vec3.multiplyQbyV(Quat.inverse(Camera.getOrientation()), headDelta);
headDelta.y = 0.0; // Don't respond to any of the vertical component of head motion
// Thrust based on leaning forward and side-to-side
if (Math.abs(headDelta.z) > HEAD_MOVE_DEAD_ZONE) {
MyAvatar.addThrust(Vec3.multiply(Quat.getFront(Camera.getOrientation()), -headDelta.z * HEAD_THRUST_FWD_SCALE * deltaTime));
}
if (Math.abs(headDelta.x) > HEAD_STRAFE_DEAD_ZONE) {
MyAvatar.addThrust(Vec3.multiply(Quat.getRight(Camera.getOrientation()), headDelta.x * HEAD_THRUST_STRAFE_SCALE * deltaTime));
}
if (Math.abs(deltaYaw) > HEAD_ROTATE_DEAD_ZONE) {
var orientation = Quat.multiply(Quat.angleAxis(deltaYaw * HEAD_YAW_RATE * deltaTime, {x:0, y: 1, z:0}), MyAvatar.orientation);
MyAvatar.orientation = orientation;
}
// Thrust Up/Down based on head pitch
MyAvatar.addThrust(Vec3.multiply({ x:0, y:1, z:0 }, (MyAvatar.getHeadFinalPitch() - headStartFinalPitch) * HEAD_PITCH_LIFT_THRUST * deltaTime));
// For head trackers, adjust pitch by head pitch
MyAvatar.headPitch += deltaPitch * HEAD_PITCH_RATE * deltaTime;
// Thrust strafe based on roll ange
MyAvatar.addThrust(Vec3.multiply(Quat.getRight(Camera.getOrientation()), -(MyAvatar.getHeadFinalRoll() - headStartRoll) * HEAD_ROLL_THRUST_SCALE * deltaTime));
}
}
// Update for joysticks and move button
function flyWithHydra(deltaTime) {
var thrustJoystickPosition = Controller.getJoystickPosition(THRUST_CONTROLLER);
@ -262,7 +305,7 @@ function flyWithHydra(deltaTime) {
thrustJoystickPosition.x * thrustMultiplier * deltaTime);
MyAvatar.addThrust(thrustRight);
} else {
thrustMultiplier = INITIAL_THRUST_MULTPLIER;
thrustMultiplier = INITIAL_THRUST_MULTIPLIER;
}
// View Controller
@ -280,6 +323,7 @@ function flyWithHydra(deltaTime) {
MyAvatar.headPitch = newPitch;
}
handleGrabBehavior(deltaTime);
moveWithHead(deltaTime);
displayDebug();
}
@ -296,3 +340,19 @@ function scriptEnding() {
}
Script.scriptEnding.connect(scriptEnding);
Controller.keyPressEvent.connect(function(event) {
if (event.text == "SPACE" && !movingWithHead) {
movingWithHead = true;
headStartPosition = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position);
headStartDeltaPitch = MyAvatar.getHeadDeltaPitch();
headStartFinalPitch = MyAvatar.getHeadFinalPitch();
headStartRoll = MyAvatar.getHeadFinalRoll();
headStartYaw = MyAvatar.getHeadFinalYaw();
}
});
Controller.keyReleaseEvent.connect(function(event) {
if (event.text == "SPACE") {
movingWithHead = false;
}
});

201
examples/speechControl.js Normal file
View file

@ -0,0 +1,201 @@
//
// speechControl.js
// examples
//
// Created by Ryan Huffman on 07/31/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var ACCELERATION = 80;
var STEP_DURATION = 1.0; // Duration of a step command in seconds
var TURN_DEGREES = 90;
var SLIGHT_TURN_DEGREES = 45;
var TURN_AROUND_DEGREES = 180;
var TURN_RATE = 90; // Turn rate in degrees per second
/*****************************************************************************/
/** COMMANDS *****************************************************************/
var CMD_MOVE_FORWARD = "Move forward";
var CMD_MOVE_BACKWARD = "Move backward";
var CMD_MOVE_UP = "Move up";
var CMD_MOVE_DOWN = "Move down";
var CMD_MOVE_LEFT = "Move left";
var CMD_MOVE_RIGHT = "Move right";
var CMD_STEP_FORWARD = "Step forward";
var CMD_STEP_BACKWARD = "Step backward";
var CMD_STEP_LEFT = "Step left";
var CMD_STEP_RIGHT = "Step right";
var CMD_STEP_UP = "Step up";
var CMD_STEP_DOWN = "Step down";
var CMD_TURN_LEFT = "Turn left";
var CMD_TURN_SLIGHT_LEFT = "Turn slight left";
var CMD_TURN_RIGHT = "Turn right";
var CMD_TURN_SLIGHT_RIGHT = "Turn slight right";
var CMD_TURN_AROUND = "Turn around";
var CMD_STOP = "Stop";
var CMD_SHOW_COMMANDS = "Show commands";
var MOVE_COMMANDS = [
CMD_MOVE_FORWARD,
CMD_MOVE_BACKWARD,
CMD_MOVE_UP,
CMD_MOVE_DOWN,
CMD_MOVE_LEFT,
CMD_MOVE_RIGHT,
];
var STEP_COMMANDS = [
CMD_STEP_FORWARD,
CMD_STEP_BACKWARD,
CMD_STEP_UP,
CMD_STEP_DOWN,
CMD_STEP_LEFT,
CMD_STEP_RIGHT,
];
var TURN_COMMANDS = [
CMD_TURN_LEFT,
CMD_TURN_SLIGHT_LEFT,
CMD_TURN_RIGHT,
CMD_TURN_SLIGHT_RIGHT,
CMD_TURN_AROUND,
];
var OTHER_COMMANDS = [
CMD_STOP,
CMD_SHOW_COMMANDS,
];
var ALL_COMMANDS = []
.concat(MOVE_COMMANDS)
.concat(STEP_COMMANDS)
.concat(TURN_COMMANDS)
.concat(OTHER_COMMANDS);
/** END OF COMMANDS **********************************************************/
/*****************************************************************************/
var currentCommandFunc = null;
function handleCommandRecognized(command) {
if (MOVE_COMMANDS.indexOf(command) > -1 || STEP_COMMANDS.indexOf(command) > -1) {
// If this is a STEP_* command, we will want to countdown the duration
// of time to move. MOVE_* commands don't stop.
var timeRemaining = MOVE_COMMANDS.indexOf(command) > -1 ? 0 : STEP_DURATION;
var accel = { x: 0, y: 0, z: 0 };
if (command == CMD_MOVE_FORWARD || command == CMD_STEP_FORWARD) {
accel = { x: 0, y: 0, z: 1 };
} else if (command == CMD_MOVE_BACKWARD || command == CMD_STEP_BACKWARD) {
accel = { x: 0, y: 0, z: -1 };
} else if (command === CMD_MOVE_UP || command == CMD_STEP_UP) {
accel = { x: 0, y: 1, z: 0 };
} else if (command == CMD_MOVE_DOWN || command == CMD_STEP_DOWN) {
accel = { x: 0, y: -1, z: 0 };
} else if (command == CMD_MOVE_LEFT || command == CMD_STEP_LEFT) {
accel = { x: -1, y: 0, z: 0 };
} else if (command == CMD_MOVE_RIGHT || command == CMD_STEP_RIGHT) {
accel = { x: 1, y: 0, z: 0 };
}
currentCommandFunc = function(dt) {
if (timeRemaining > 0 && dt >= timeRemaining) {
dt = timeRemaining;
}
var headOrientation = MyAvatar.headOrientation;
var front = Quat.getFront(headOrientation);
var right = Quat.getRight(headOrientation);
var up = Quat.getUp(headOrientation);
var thrust = Vec3.multiply(front, accel.z * ACCELERATION);
thrust = Vec3.sum(thrust, Vec3.multiply(right, accel.x * ACCELERATION));
thrust = Vec3.sum(thrust, Vec3.multiply(up, accel.y * ACCELERATION));
MyAvatar.addThrust(thrust);
if (timeRemaining > 0) {
timeRemaining -= dt;
return timeRemaining > 0;
}
return true;
};
} else if (TURN_COMMANDS.indexOf(command) > -1) {
var degreesRemaining;
var sign;
if (command == CMD_TURN_LEFT) {
sign = 1;
degreesRemaining = TURN_DEGREES;
} else if (command == CMD_TURN_RIGHT) {
sign = -1;
degreesRemaining = TURN_DEGREES;
} else if (command == CMD_TURN_SLIGHT_LEFT) {
sign = 1;
degreesRemaining = SLIGHT_TURN_DEGREES;
} else if (command == CMD_TURN_SLIGHT_RIGHT) {
sign = -1;
degreesRemaining = SLIGHT_TURN_DEGREES;
} else if (command == CMD_TURN_AROUND) {
sign = 1;
degreesRemaining = TURN_AROUND_DEGREES;
}
currentCommandFunc = function(dt) {
// Determine how much to turn by
var turnAmount = TURN_RATE * dt;
if (turnAmount > degreesRemaining) {
turnAmount = degreesRemaining;
}
// Apply turn
var orientation = MyAvatar.orientation;
var deltaOrientation = Quat.fromPitchYawRollDegrees(0, sign * turnAmount, 0);
MyAvatar.orientation = Quat.multiply(orientation, deltaOrientation);
degreesRemaining -= turnAmount;
return turnAmount > 0;
}
} else if (command == CMD_STOP) {
currentCommandFunc = null;
} else if (command == CMD_SHOW_COMMANDS) {
var msg = "";
for (var i = 0; i < ALL_COMMANDS.length; i++) {
msg += ALL_COMMANDS[i] + "\n";
}
Window.alert(msg);
}
}
function update(dt) {
if (currentCommandFunc) {
if (currentCommandFunc(dt) === false) {
currentCommandFunc = null;
}
}
}
function setup() {
for (var i = 0; i < ALL_COMMANDS.length; i++) {
SpeechRecognizer.addCommand(ALL_COMMANDS[i]);
}
}
function scriptEnding() {
for (var i = 0; i < ALL_COMMANDS.length; i++) {
SpeechRecognizer.removeCommand(ALL_COMMANDS[i]);
}
}
Script.scriptEnding.connect(scriptEnding);
Script.update.connect(update);
SpeechRecognizer.commandRecognized.connect(handleCommandRecognized);
setup();

View file

@ -46,6 +46,15 @@ foreach(SUBDIR avatar devices renderer ui starfield location scripting voxels pa
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${SUBDIR_SRCS}")
endforeach(SUBDIR)
# Add SpeechRecognizer if on OS X, otherwise remove
if (APPLE)
file(GLOB INTERFACE_OBJCPP_SRCS "src/SpeechRecognizer.mm")
set(INTERFACE_SRCS ${INTERFACE_SRCS} ${INTERFACE_OBJCPP_SRCS})
else ()
get_filename_component(SPEECHRECOGNIZER_H "src/SpeechRecognizer.h" ABSOLUTE)
list(REMOVE_ITEM INTERFACE_SRCS ${SPEECHRECOGNIZER_H})
endif ()
find_package(Qt5 COMPONENTS Gui Multimedia Network OpenGL Script Svg WebKitWidgets)
# grab the ui files in resources/ui
@ -165,8 +174,9 @@ if (APPLE)
find_library(CoreFoundation CoreFoundation)
find_library(GLUT GLUT)
find_library(OpenGL OpenGL)
find_library(AppKit AppKit)
target_link_libraries(${TARGET_NAME} ${CoreAudio} ${CoreFoundation} ${GLUT} ${OpenGL})
target_link_libraries(${TARGET_NAME} ${CoreAudio} ${CoreFoundation} ${GLUT} ${OpenGL} ${AppKit})
# install command for OS X bundle
INSTALL(TARGETS ${TARGET_NAME}

View file

@ -894,7 +894,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
}
break;
case Qt::Key_Space:
case Qt::Key_Apostrophe:
resetSensors();
break;
@ -1051,20 +1051,6 @@ void Application::keyPressEvent(QKeyEvent* event) {
case Qt::Key_R:
if (isShifted) {
Menu::getInstance()->triggerOption(MenuOption::FrustumRenderMode);
} else if (isMeta) {
if (_myAvatar->isRecording()) {
_myAvatar->stopRecording();
} else {
_myAvatar->startRecording();
_audio.setRecorder(_myAvatar->getRecorder());
}
} else {
if (_myAvatar->isPlaying()) {
_myAvatar->stopPlaying();
} else {
_myAvatar->startPlaying();
_audio.setPlayer(_myAvatar->getPlayer());
}
}
break;
case Qt::Key_Percent:
@ -3749,6 +3735,10 @@ ScriptEngine* Application::loadScript(const QString& scriptName, bool loadScript
scriptEngine->registerGlobalObject("Camera", cameraScriptable);
connect(scriptEngine, SIGNAL(finished(const QString&)), cameraScriptable, SLOT(deleteLater()));
#ifdef Q_OS_MAC
scriptEngine->registerGlobalObject("SpeechRecognizer", Menu::getInstance()->getSpeechRecognizer());
#endif
ClipboardScriptingInterface* clipboardScriptable = new ClipboardScriptingInterface();
scriptEngine->registerGlobalObject("Clipboard", clipboardScriptable);
connect(scriptEngine, SIGNAL(finished(const QString&)), clipboardScriptable, SLOT(deleteLater()));

View file

@ -94,6 +94,9 @@ Menu::Menu() :
_octreeStatsDialog(NULL),
_lodToolsDialog(NULL),
_userLocationsDialog(NULL),
#ifdef Q_OS_MAC
_speechRecognizer(),
#endif
_maxVoxels(DEFAULT_MAX_VOXELS_PER_SYSTEM),
_voxelSizeScale(DEFAULT_OCTREE_SIZE_SCALE),
_oculusUIAngularSize(DEFAULT_OCULUS_UI_ANGULAR_SIZE),
@ -225,6 +228,12 @@ Menu::Menu() :
addActionToQMenuAndActionHash(toolsMenu, MenuOption::MetavoxelEditor, 0, this, SLOT(showMetavoxelEditor()));
addActionToQMenuAndActionHash(toolsMenu, MenuOption::ScriptEditor, Qt::ALT | Qt::Key_S, this, SLOT(showScriptEditor()));
#ifdef Q_OS_MAC
QAction* speechRecognizerAction = addCheckableActionToQMenuAndActionHash(toolsMenu, MenuOption::ControlWithSpeech,
Qt::CTRL | Qt::SHIFT | Qt::Key_C, _speechRecognizer.getEnabled(), &_speechRecognizer, SLOT(setEnabled(bool)));
connect(&_speechRecognizer, SIGNAL(enabledUpdated(bool)), speechRecognizerAction, SLOT(setChecked(bool)));
#endif
#ifdef HAVE_QXMPP
_chatAction = addActionToQMenuAndActionHash(toolsMenu,
MenuOption::Chat,
@ -688,6 +697,10 @@ void Menu::loadSettings(QSettings* settings) {
QStandardPaths::writableLocation(QStandardPaths::DesktopLocation)).toString();
setScriptsLocation(settings->value("scriptsLocation", QString()).toString());
#ifdef Q_OS_MAC
_speechRecognizer.setEnabled(settings->value("speechRecognitionEnabled", false).toBool());
#endif
settings->beginGroup("View Frustum Offset Camera");
// in case settings is corrupt or missing loadSetting() will check for NaN
_viewFrustumOffset.yaw = loadSetting(settings, "viewFrustumOffsetYaw", 0.0f);
@ -735,6 +748,9 @@ void Menu::saveSettings(QSettings* settings) {
settings->setValue("boundaryLevelAdjust", _boundaryLevelAdjust);
settings->setValue("snapshotsLocation", _snapshotsLocation);
settings->setValue("scriptsLocation", _scriptsLocation);
#ifdef Q_OS_MAC
settings->setValue("speechRecognitionEnabled", _speechRecognizer.getEnabled());
#endif
settings->beginGroup("View Frustum Offset Camera");
settings->setValue("viewFrustumOffsetYaw", _viewFrustumOffset.yaw);
settings->setValue("viewFrustumOffsetPitch", _viewFrustumOffset.pitch);

View file

@ -23,6 +23,10 @@
#include <MenuItemProperties.h>
#include <OctreeConstants.h>
#ifdef Q_OS_MAC
#include "SpeechRecognizer.h"
#endif
#include "location/LocationManager.h"
#include "ui/PreferencesDialog.h"
#include "ui/ChatWindow.h"
@ -137,6 +141,10 @@ public:
void setBoundaryLevelAdjust(int boundaryLevelAdjust);
int getBoundaryLevelAdjust() const { return _boundaryLevelAdjust; }
#ifdef Q_OS_MAC
SpeechRecognizer* getSpeechRecognizer() { return &_speechRecognizer; }
#endif
// User Tweakable PPS from Voxel Server
int getMaxVoxelPacketsPerSecond() const { return _maxVoxelPacketsPerSecond; }
void setMaxVoxelPacketsPerSecond(int maxVoxelPacketsPerSecond) { _maxVoxelPacketsPerSecond = maxVoxelPacketsPerSecond; }
@ -272,6 +280,9 @@ private:
OctreeStatsDialog* _octreeStatsDialog;
LodToolsDialog* _lodToolsDialog;
UserLocationsDialog* _userLocationsDialog;
#ifdef Q_OS_MAC
SpeechRecognizer _speechRecognizer;
#endif
int _maxVoxels;
float _voxelSizeScale;
float _oculusUIAngularSize;
@ -351,6 +362,7 @@ namespace MenuOption {
const QString CollideWithVoxels = "Collide With Voxels";
const QString Collisions = "Collisions";
const QString Console = "Console...";
const QString ControlWithSpeech = "Control With Speech";
const QString DecreaseAvatarSize = "Decrease Avatar Size";
const QString DecreaseVoxelSize = "Decrease Voxel Size";
const QString DisableActivityLogger = "Disable Activity Logger";

View file

@ -11,7 +11,9 @@
#include <GLMHelpers.h>
#include <QFile>
#include <QMetaObject>
#include <QObject>
#include "Recorder.h"
@ -221,7 +223,7 @@ void Player::startPlaying() {
_audioThread = new QThread();
_options.setPosition(_avatar->getPosition());
_options.setOrientation(_avatar->getOrientation());
_injector.reset(new AudioInjector(_recording->getAudio(), _options), &QObject::deleteLater);
_injector.reset(new AudioInjector(_recording->getAudio(), _options));
_injector->moveToThread(_audioThread);
_audioThread->start();
QMetaObject::invokeMethod(_injector.data(), "injectAudio", Qt::QueuedConnection);
@ -241,9 +243,14 @@ void Player::stopPlaying() {
// Cleanup audio thread
_injector->stop();
QObject::connect(_injector.data(), &AudioInjector::finished,
_injector.data(), &AudioInjector::deleteLater);
QObject::connect(_injector.data(), &AudioInjector::destroyed,
_audioThread, &QThread::quit);
QObject::connect(_audioThread, &QThread::finished,
_audioThread, &QThread::deleteLater);
_injector.clear();
_audioThread->exit();
_audioThread->deleteLater();
_audioThread = NULL;
qDebug() << "Recorder::stopPlaying()";
}
@ -309,13 +316,249 @@ bool Player::computeCurrentFrame() {
return true;
}
void writeRecordingToFile(RecordingPointer recording, QString file) {
// TODO
qDebug() << "Writing recording to " << file;
void writeRecordingToFile(RecordingPointer recording, QString filename) {
qDebug() << "Writing recording to " << filename;
QElapsedTimer timer;
QFile file(filename);
if (!file.open(QIODevice::WriteOnly)){
return;
}
qDebug() << file.fileName();
QDataStream fileStream(&file);
fileStream << recording->_timestamps;
RecordingFrame& baseFrame = recording->_frames[0];
int totalLength = 0;
// Blendshape coefficients
fileStream << baseFrame._blendshapeCoefficients;
totalLength += baseFrame._blendshapeCoefficients.size();
// Joint Rotations
int jointRotationSize = baseFrame._jointRotations.size();
fileStream << jointRotationSize;
for (int i = 0; i < jointRotationSize; ++i) {
fileStream << baseFrame._jointRotations[i].x << baseFrame._jointRotations[i].y << baseFrame._jointRotations[i].z << baseFrame._jointRotations[i].w;
}
totalLength += jointRotationSize;
// Translation
fileStream << baseFrame._translation.x << baseFrame._translation.y << baseFrame._translation.z;
totalLength += 1;
// Rotation
fileStream << baseFrame._rotation.x << baseFrame._rotation.y << baseFrame._rotation.z << baseFrame._rotation.w;
totalLength += 1;
// Scale
fileStream << baseFrame._scale;
totalLength += 1;
// Head Rotation
fileStream << baseFrame._headRotation.x << baseFrame._headRotation.y << baseFrame._headRotation.z << baseFrame._headRotation.w;
totalLength += 1;
// Lean Sideways
fileStream << baseFrame._leanSideways;
totalLength += 1;
// Lean Forward
fileStream << baseFrame._leanForward;
totalLength += 1;
for (int i = 1; i < recording->_timestamps.size(); ++i) {
QBitArray mask(totalLength);
int maskIndex = 0;
QByteArray buffer;
QDataStream stream(&buffer, QIODevice::WriteOnly);
RecordingFrame& previousFrame = recording->_frames[i - 1];
RecordingFrame& frame = recording->_frames[i];
// Blendshape coefficients
for (int i = 0; i < frame._blendshapeCoefficients.size(); ++i) {
if (frame._blendshapeCoefficients[i] != previousFrame._blendshapeCoefficients[i]) {
stream << frame._blendshapeCoefficients[i];
mask.setBit(maskIndex);
}
maskIndex++;
}
// Joint Rotations
for (int i = 0; i < frame._jointRotations.size(); ++i) {
if (frame._jointRotations[i] != previousFrame._jointRotations[i]) {
stream << frame._jointRotations[i].x << frame._jointRotations[i].y << frame._jointRotations[i].z << frame._jointRotations[i].w;
mask.setBit(maskIndex);
}
maskIndex++;
}
// Translation
if (frame._translation != previousFrame._translation) {
stream << frame._translation.x << frame._translation.y << frame._translation.z;
mask.setBit(maskIndex);
}
maskIndex++;
// Rotation
if (frame._rotation != previousFrame._rotation) {
stream << frame._rotation.x << frame._rotation.y << frame._rotation.z << frame._rotation.w;
mask.setBit(maskIndex);
}
maskIndex++;
// Scale
if (frame._scale != previousFrame._scale) {
stream << frame._scale;
mask.setBit(maskIndex);
}
maskIndex++;
// Head Rotation
if (frame._headRotation != previousFrame._headRotation) {
stream << frame._headRotation.x << frame._headRotation.y << frame._headRotation.z << frame._headRotation.w;
mask.setBit(maskIndex);
}
maskIndex++;
// Lean Sideways
if (frame._leanSideways != previousFrame._leanSideways) {
stream << frame._leanSideways;
mask.setBit(maskIndex);
}
maskIndex++;
// Lean Forward
if (frame._leanForward != previousFrame._leanForward) {
stream << frame._leanForward;
mask.setBit(maskIndex);
}
maskIndex++;
fileStream << mask;
fileStream << buffer;
}
fileStream << recording->_audio->getByteArray();
qDebug() << "Wrote " << file.size() << " bytes in " << timer.elapsed();
}
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString file) {
// TODO
qDebug() << "Reading recording from " << file;
RecordingPointer readRecordingFromFile(RecordingPointer recording, QString filename) {
qDebug() << "Reading recording from " << filename;
if (!recording) {
recording.reset(new Recording());
}
QFile file(filename);
if (!file.open(QIODevice::ReadOnly)){
return recording;
}
QDataStream fileStream(&file);
fileStream >> recording->_timestamps;
RecordingFrame baseFrame;
// Blendshape coefficients
fileStream >> baseFrame._blendshapeCoefficients;
// Joint Rotations
int jointRotationSize;
fileStream >> jointRotationSize;
baseFrame._jointRotations.resize(jointRotationSize);
for (int i = 0; i < jointRotationSize; ++i) {
fileStream >> baseFrame._jointRotations[i].x >> baseFrame._jointRotations[i].y >> baseFrame._jointRotations[i].z >> baseFrame._jointRotations[i].w;
}
fileStream >> baseFrame._translation.x >> baseFrame._translation.y >> baseFrame._translation.z;
fileStream >> baseFrame._rotation.x >> baseFrame._rotation.y >> baseFrame._rotation.z >> baseFrame._rotation.w;
fileStream >> baseFrame._scale;
fileStream >> baseFrame._headRotation.x >> baseFrame._headRotation.y >> baseFrame._headRotation.z >> baseFrame._headRotation.w;
fileStream >> baseFrame._leanSideways;
fileStream >> baseFrame._leanForward;
recording->_frames << baseFrame;
for (int i = 1; i < recording->_timestamps.size(); ++i) {
QBitArray mask;
QByteArray buffer;
QDataStream stream(&buffer, QIODevice::ReadOnly);
RecordingFrame frame;
RecordingFrame& previousFrame = recording->_frames.last();
fileStream >> mask;
fileStream >> buffer;
int maskIndex = 0;
// Blendshape Coefficients
frame._blendshapeCoefficients.resize(baseFrame._blendshapeCoefficients.size());
for (int i = 0; i < baseFrame._blendshapeCoefficients.size(); ++i) {
if (mask[maskIndex++]) {
stream >> frame._blendshapeCoefficients[i];
} else {
frame._blendshapeCoefficients[i] = previousFrame._blendshapeCoefficients[i];
}
}
// Joint Rotations
frame._jointRotations.resize(baseFrame._jointRotations.size());
for (int i = 0; i < baseFrame._jointRotations.size(); ++i) {
if (mask[maskIndex++]) {
stream >> frame._jointRotations[i].x >> frame._jointRotations[i].y >> frame._jointRotations[i].z >> frame._jointRotations[i].w;
} else {
frame._jointRotations[i] = previousFrame._jointRotations[i];
}
}
if (mask[maskIndex++]) {
stream >> frame._translation.x >> frame._translation.y >> frame._translation.z;
} else {
frame._translation = previousFrame._translation;
}
if (mask[maskIndex++]) {
stream >> frame._rotation.x >> frame._rotation.y >> frame._rotation.z >> frame._rotation.w;
} else {
frame._rotation = previousFrame._rotation;
}
if (mask[maskIndex++]) {
stream >> frame._scale;
} else {
frame._scale = previousFrame._scale;
}
if (mask[maskIndex++]) {
stream >> frame._headRotation.x >> frame._headRotation.y >> frame._headRotation.z >> frame._headRotation.w;
} else {
frame._headRotation = previousFrame._headRotation;
}
if (mask[maskIndex++]) {
stream >> frame._leanSideways;
} else {
frame._leanSideways = previousFrame._leanSideways;
}
if (mask[maskIndex++]) {
stream >> frame._leanForward;
} else {
frame._leanForward = previousFrame._leanForward;
}
recording->_frames << frame;
}
QByteArray audioArray;
fileStream >> audioArray;
recording->addAudioPacket(audioArray);
qDebug() << "Read " << file.size() << " bytes";
return recording;
}
}

View file

@ -0,0 +1,47 @@
//
// SpeechRecognizer.h
// interface/src
//
// Created by Ryan Huffman on 07/31/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_SpeechRecognizer_h
#define hifi_SpeechRecognizer_h
#include <QObject>
#include <QSet>
#include <QString>
class SpeechRecognizer : public QObject {
Q_OBJECT
public:
SpeechRecognizer();
~SpeechRecognizer();
void handleCommandRecognized(const char* command);
bool getEnabled() const { return _enabled; }
public slots:
void setEnabled(bool enabled);
void addCommand(const QString& command);
void removeCommand(const QString& command);
signals:
void commandRecognized(const QString& command);
void enabledUpdated(bool enabled);
protected:
void reloadCommands();
private:
bool _enabled;
QSet<QString> _commands;
void* _speechRecognizerDelegate;
void* _speechRecognizer;
};
#endif // hifi_SpeechRecognizer_h

View file

@ -0,0 +1,109 @@
//
// SpeechRecognizer.mm
// interface/src
//
// Created by Ryan Huffman on 07/31/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QtGlobal>
#ifdef Q_OS_MAC
#import <Foundation/Foundation.h>
#import <AppKit/NSSpeechRecognizer.h>
#import <AppKit/NSWorkspace.h>
#include <QDebug>
#include "SpeechRecognizer.h"
@interface SpeechRecognizerDelegate : NSObject <NSSpeechRecognizerDelegate> {
SpeechRecognizer* _listener;
}
- (void)setListener:(SpeechRecognizer*)listener;
- (void)speechRecognizer:(NSSpeechRecognizer*)sender didRecognizeCommand:(id)command;
@end
@implementation SpeechRecognizerDelegate
- (void)setListener:(SpeechRecognizer*)listener {
_listener = listener;
}
- (void)speechRecognizer:(NSSpeechRecognizer*)sender didRecognizeCommand:(id)command {
_listener->handleCommandRecognized(((NSString*)command).UTF8String);
}
@end
SpeechRecognizer::SpeechRecognizer() :
QObject(),
_enabled(false),
_commands(),
_speechRecognizerDelegate([[SpeechRecognizerDelegate alloc] init]),
_speechRecognizer(NULL) {
[(id)_speechRecognizerDelegate setListener:this];
}
SpeechRecognizer::~SpeechRecognizer() {
if (_speechRecognizer) {
[(id)_speechRecognizer dealloc];
}
if (_speechRecognizerDelegate) {
[(id)_speechRecognizerDelegate dealloc];
}
}
void SpeechRecognizer::handleCommandRecognized(const char* command) {
emit commandRecognized(QString(command));
}
void SpeechRecognizer::setEnabled(bool enabled) {
if (enabled == _enabled) {
return;
}
_enabled = enabled;
if (_enabled) {
_speechRecognizer = [[NSSpeechRecognizer alloc] init];
reloadCommands();
[(id)_speechRecognizer setDelegate:(id)_speechRecognizerDelegate];
[(id)_speechRecognizer startListening];
} else {
[(id)_speechRecognizer stopListening];
[(id)_speechRecognizer dealloc];
_speechRecognizer = NULL;
}
emit enabledUpdated(_enabled);
}
void SpeechRecognizer::reloadCommands() {
if (_speechRecognizer) {
NSMutableArray* cmds = [NSMutableArray array];
for (QSet<QString>::const_iterator iter = _commands.constBegin(); iter != _commands.constEnd(); iter++) {
[cmds addObject:[NSString stringWithUTF8String:(*iter).toLocal8Bit().data()]];
}
[(id)_speechRecognizer setCommands:cmds];
}
}
void SpeechRecognizer::addCommand(const QString& command) {
_commands.insert(command);
reloadCommands();
}
void SpeechRecognizer::removeCommand(const QString& command) {
_commands.remove(command);
reloadCommands();
}
#endif // Q_OS_MAC

View file

@ -91,7 +91,7 @@ public:
const QVector<Model*>& getAttachmentModels() const { return _attachmentModels; }
glm::vec3 getChestPosition() const;
float getScale() const { return _scale; }
const glm::vec3& getVelocity() const { return _velocity; }
Q_INVOKABLE const glm::vec3& getVelocity() const { return _velocity; }
const Head* getHead() const { return static_cast<const Head*>(_headData); }
Head* getHead() { return static_cast<Head*>(_headData); }
Hand* getHand() { return static_cast<Hand*>(_handData); }
@ -152,9 +152,9 @@ public:
Q_INVOKABLE glm::quat getJointCombinedRotation(int index) const;
Q_INVOKABLE glm::quat getJointCombinedRotation(const QString& name) const;
glm::vec3 getAcceleration() const { return _acceleration; }
glm::vec3 getAngularVelocity() const { return _angularVelocity; }
glm::vec3 getAngularAcceleration() const { return _angularAcceleration; }
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }
Q_INVOKABLE glm::vec3 getAngularVelocity() const { return _angularVelocity; }
Q_INVOKABLE glm::vec3 getAngularAcceleration() const { return _angularAcceleration; }
/// Scales a world space position vector relative to the avatar position and scale

View file

@ -508,40 +508,105 @@ bool MyAvatar::setJointReferential(int id, int jointIndex) {
}
}
bool MyAvatar::isRecording() const {
bool MyAvatar::isRecording() {
if (QThread::currentThread() != thread()) {
bool result;
QMetaObject::invokeMethod(this, "isRecording", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, result));
return result;
}
return _recorder && _recorder->isRecording();
}
RecorderPointer MyAvatar::startRecording() {
void MyAvatar::startRecording() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "startRecording", Qt::BlockingQueuedConnection);
return;
}
if (!_recorder) {
_recorder = RecorderPointer(new Recorder(this));
}
Application::getInstance()->getAudio()->setRecorder(_recorder);
_recorder->startRecording();
return _recorder;
}
void MyAvatar::stopRecording() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "stopRecording", Qt::BlockingQueuedConnection);
return;
}
if (_recorder) {
_recorder->stopRecording();
}
}
bool MyAvatar::isPlaying() const {
void MyAvatar::saveRecording(QString filename) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "saveRecording", Qt::BlockingQueuedConnection,
Q_ARG(QString, filename));
return;
}
if (_recorder) {
_recorder->saveToFile(filename);
}
}
bool MyAvatar::isPlaying() {
if (QThread::currentThread() != thread()) {
bool result;
QMetaObject::invokeMethod(this, "isPlaying", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, result));
return result;
}
return _player && _player->isPlaying();
}
PlayerPointer MyAvatar::startPlaying() {
void MyAvatar::loadRecording(QString filename) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection,
Q_ARG(QString, filename));
return;
}
if (!_player) {
_player = PlayerPointer(new Player(this));
}
if (_recorder) {
_player->loadRecording(_recorder->getRecording());
_player->startPlaying();
_player->loadFromFile(filename);
}
void MyAvatar::loadLastRecording() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "loadLastRecording", Qt::BlockingQueuedConnection);
return;
}
return _player;
if (!_recorder) {
return;
}
if (!_player) {
_player = PlayerPointer(new Player(this));
}
_player->loadRecording(_recorder->getRecording());
}
void MyAvatar::startPlaying() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection);
return;
}
if (!_player) {
_player = PlayerPointer(new Player(this));
}
Application::getInstance()->getAudio()->setPlayer(_player);
_player->startPlaying();
}
void MyAvatar::stopPlaying() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "stopPlaying", Qt::BlockingQueuedConnection);
return;
}
if (_player) {
_player->stopPlaying();
}
@ -955,7 +1020,7 @@ void MyAvatar::clearJointsData() {
for (int i = 0; i < _jointData.size(); ++i) {
Avatar::clearJointData(i);
if (QThread::currentThread() == thread()) {
_skeletonModel.clearJointState(i);
_skeletonModel.clearJointAnimationPriority(i);
}
}
}

View file

@ -106,7 +106,15 @@ public:
virtual int parseDataAtOffset(const QByteArray& packet, int offset);
static void sendKillAvatar();
Q_INVOKABLE glm::vec3 getHeadPosition() const { return getHead()->getPosition(); }
Q_INVOKABLE float getHeadFinalYaw() const { return getHead()->getFinalYaw(); }
Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); }
Q_INVOKABLE float getHeadFinalPitch() const { return getHead()->getFinalPitch(); }
Q_INVOKABLE float getHeadDeltaPitch() const { return getHead()->getDeltaPitch(); }
Q_INVOKABLE glm::vec3 getEyePosition() const { return getHead()->getEyePosition(); }
Q_INVOKABLE glm::vec3 getTargetAvatarPosition() const { return _targetAvatarPosition; }
AvatarData* getLookAtTargetAvatar() const { return _lookAtTargetAvatar.data(); }
void updateLookAtTargetAvatar();
@ -134,6 +142,10 @@ public:
/// Renders a laser pointer for UI picking
void renderLaserPointers();
glm::vec3 getLaserPointerTipPosition(const PalmData* palm);
const RecorderPointer getRecorder() const { return _recorder; }
const PlayerPointer getPlayer() const { return _player; }
public slots:
void goHome();
void increaseSize();
@ -157,14 +169,15 @@ public slots:
bool setModelReferential(int id);
bool setJointReferential(int id, int jointIndex);
const RecorderPointer getRecorder() const { return _recorder; }
bool isRecording() const;
RecorderPointer startRecording();
bool isRecording();
void startRecording();
void stopRecording();
void saveRecording(QString filename);
const PlayerPointer getPlayer() const { return _player; }
bool isPlaying() const;
PlayerPointer startPlaying();
bool isPlaying();
void loadRecording(QString filename);
void loadLastRecording();
void startPlaying();
void stopPlaying();

View file

@ -389,7 +389,6 @@ void CaraFaceTracker::decodePacket(const QByteArray& buffer) {
if (theta > EPSILON) {
float rMag = glm::length(glm::vec3(r.x, r.y, r.z));
const float AVERAGE_CARA_FRAME_TIME = 0.04f;
const float ANGULAR_VELOCITY_MIN = 1.2f;
const float YAW_STANDARD_DEV_DEG = 2.5f;
_headAngularVelocity = theta / AVERAGE_CARA_FRAME_TIME * glm::vec3(r.x, r.y, r.z) / rMag;

View file

@ -194,12 +194,12 @@ float updateAndGetCoefficient(float * coefficient, float currentValue, bool scal
coefficient[AVG] = LONG_TERM_AVERAGE * coefficient[AVG] + (1.f - LONG_TERM_AVERAGE) * currentValue;
if (coefficient[MAX] > coefficient[MIN]) {
if (scaleToRange) {
return glm::clamp((currentValue - coefficient[AVG]) / (coefficient[MAX] - coefficient[MIN]), 0.f, 1.f);
return glm::clamp((currentValue - coefficient[AVG]) / (coefficient[MAX] - coefficient[MIN]), 0.0f, 1.0f);
} else {
return glm::clamp(currentValue - coefficient[AVG], 0.f, 1.f);
return glm::clamp(currentValue - coefficient[AVG], 0.0f, 1.0f);
}
} else {
return 0.f;
return 0.0f;
}
}
@ -242,13 +242,11 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
// Set blendshapes
float EYE_MAGNIFIER = 4.0f;
float rightEye = (updateAndGetCoefficient(_rightEye, packet.expressions[0])) * EYE_MAGNIFIER;
float rightEye = glm::clamp((updateAndGetCoefficient(_rightEye, packet.expressions[0])) * EYE_MAGNIFIER, 0.0f, 1.0f);
_blendshapeCoefficients[_rightBlinkIndex] = rightEye;
float leftEye = (updateAndGetCoefficient(_leftEye, packet.expressions[1])) * EYE_MAGNIFIER;
float leftEye = glm::clamp((updateAndGetCoefficient(_leftEye, packet.expressions[1])) * EYE_MAGNIFIER, 0.0f, 1.0f);
_blendshapeCoefficients[_leftBlinkIndex] = leftEye;
// Right eye = packet.expressions[0];
float leftBrow = 1.0f - rescaleCoef(packet.expressions[14]);
if (leftBrow < 0.5f) {
@ -270,9 +268,9 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
float JAW_OPEN_MAGNIFIER = 1.4f;
_blendshapeCoefficients[_jawOpenIndex] = rescaleCoef(packet.expressions[21]) * JAW_OPEN_MAGNIFIER;
_blendshapeCoefficients[_mouthSmileLeftIndex] = rescaleCoef(packet.expressions[24]);
_blendshapeCoefficients[_mouthSmileRightIndex] = rescaleCoef(packet.expressions[23]);
float SMILE_MULTIPLIER = 2.0f;
_blendshapeCoefficients[_mouthSmileLeftIndex] = glm::clamp(packet.expressions[24] * SMILE_MULTIPLIER, 0.0f, 1.0f);
_blendshapeCoefficients[_mouthSmileRightIndex] = glm::clamp(packet.expressions[23] * SMILE_MULTIPLIER, 0.0f, 1.0f);
} else {

View file

@ -26,11 +26,17 @@ using namespace fs;
using namespace std;
const quint16 FACESHIFT_PORT = 33433;
float STARTING_FACESHIFT_FRAME_TIME = 0.033f;
Faceshift::Faceshift() :
_tcpEnabled(true),
_tcpRetryCount(0),
_lastTrackingStateReceived(0),
_averageFrameTime(STARTING_FACESHIFT_FRAME_TIME),
_headAngularVelocity(0),
_headLinearVelocity(0),
_lastHeadTranslation(0),
_filteredHeadTranslation(0),
_eyeGazeLeftPitch(0.0f),
_eyeGazeLeftYaw(0.0f),
_eyeGazeRightPitch(0.0f),
@ -209,23 +215,41 @@ void Faceshift::receive(const QByteArray& buffer) {
float theta = 2 * acos(r.w);
if (theta > EPSILON) {
float rMag = glm::length(glm::vec3(r.x, r.y, r.z));
float AVERAGE_FACESHIFT_FRAME_TIME = 0.033f;
_headAngularVelocity = theta / AVERAGE_FACESHIFT_FRAME_TIME * glm::vec3(r.x, r.y, r.z) / rMag;
_headAngularVelocity = theta / _averageFrameTime * glm::vec3(r.x, r.y, r.z) / rMag;
} else {
_headAngularVelocity = glm::vec3(0,0,0);
}
_headRotation = newRotation;
const float ANGULAR_VELOCITY_FILTER_STRENGTH = 0.3f;
_headRotation = safeMix(_headRotation, newRotation, glm::clamp(glm::length(_headAngularVelocity) *
ANGULAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f));
const float TRANSLATION_SCALE = 0.02f;
_headTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y,
-data.m_headTranslation.z) * TRANSLATION_SCALE;
glm::vec3 newHeadTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y,
-data.m_headTranslation.z) * TRANSLATION_SCALE;
_headLinearVelocity = (newHeadTranslation - _lastHeadTranslation) / _averageFrameTime;
const float LINEAR_VELOCITY_FILTER_STRENGTH = 0.3f;
float velocityFilter = glm::clamp(1.0f - glm::length(_headLinearVelocity) *
LINEAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
_filteredHeadTranslation = velocityFilter * _filteredHeadTranslation + (1.0f - velocityFilter) * newHeadTranslation;
_lastHeadTranslation = newHeadTranslation;
_headTranslation = _filteredHeadTranslation;
_eyeGazeLeftPitch = -data.m_eyeGazeLeftPitch;
_eyeGazeLeftYaw = data.m_eyeGazeLeftYaw;
_eyeGazeRightPitch = -data.m_eyeGazeRightPitch;
_eyeGazeRightYaw = data.m_eyeGazeRightYaw;
_blendshapeCoefficients = QVector<float>::fromStdVector(data.m_coeffs);
_lastTrackingStateReceived = usecTimestampNow();
const float FRAME_AVERAGING_FACTOR = 0.99f;
quint64 usecsNow = usecTimestampNow();
if (_lastTrackingStateReceived != 0) {
_averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime +
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastTrackingStateReceived) / 1000000.0f;
}
_lastTrackingStateReceived = usecsNow;
}
break;
}

View file

@ -98,8 +98,12 @@ private:
int _tcpRetryCount;
bool _tracking;
quint64 _lastTrackingStateReceived;
float _averageFrameTime;
glm::vec3 _headAngularVelocity;
glm::vec3 _headLinearVelocity;
glm::vec3 _lastHeadTranslation;
glm::vec3 _filteredHeadTranslation;
// degrees
float _eyeGazeLeftPitch;

View file

@ -700,6 +700,12 @@ void Model::clearJointState(int index) {
}
}
void Model::clearJointAnimationPriority(int index) {
if (index != -1 && index < _jointStates.size()) {
_jointStates[index]._animationPriority = 0.0f;
}
}
void Model::setJointState(int index, bool valid, const glm::quat& rotation, float priority) {
if (index != -1 && index < _jointStates.size()) {
JointState& state = _jointStates[index];

View file

@ -121,6 +121,9 @@ public:
/// Clear the joint states
void clearJointState(int index);
/// Clear the joint animation priority
void clearJointAnimationPriority(int index);
/// Sets the joint state at the specified index.
void setJointState(int index, bool valid, const glm::quat& rotation = glm::quat(), float priority = 1.0f);

View file

@ -282,19 +282,10 @@ void Stats::display(
pingVoxel = totalPingVoxel/voxelServerCount;
}
Audio* audio = Application::getInstance()->getAudio();
lines = _expanded ? 4 : 3;
drawBackground(backgroundColor, horizontalOffset, 0, _pingStatsWidth, lines * STATS_PELS_PER_LINE + 10);
horizontalOffset += 5;
char audioJitter[30];
sprintf(audioJitter,
"Buffer msecs %.1f",
audio->getDesiredJitterBufferFrames() * BUFFER_SEND_INTERVAL_USECS / (float)USECS_PER_MSEC);
drawText(30, glWidget->height() - 22, scale, rotation, font, audioJitter, color);
char audioPing[30];
sprintf(audioPing, "Audio ping: %d", pingAudio);
@ -698,27 +689,6 @@ void Stats::display(
drawText(horizontalOffset, verticalOffset, 0.10f, 0.f, 2.f, reflectionsStatus, color);
}
// draw local light stats
QVector<Model::LocalLight> localLights = Application::getInstance()->getAvatarManager().getLocalLights();
verticalOffset = 400;
horizontalOffset = 20;
char buffer[128];
for (int i = 0; i < localLights.size(); i++) {
glm::vec3 lightDirection = localLights.at(i).direction;
snprintf(buffer, sizeof(buffer), "Light %d direction (%.2f, %.2f, %.2f)", i, lightDirection.x, lightDirection.y, lightDirection.z);
drawText(horizontalOffset, verticalOffset, scale, rotation, font, buffer, color);
verticalOffset += STATS_PELS_PER_LINE;
glm::vec3 lightColor = localLights.at(i).color;
snprintf(buffer, sizeof(buffer), "Light %d color (%.2f, %.2f, %.2f)", i, lightColor.x, lightColor.y, lightColor.z);
drawText(horizontalOffset, verticalOffset, scale, rotation, font, buffer, color);
verticalOffset += STATS_PELS_PER_LINE;
}
}

View file

@ -687,7 +687,7 @@ QVector<glm::quat> AvatarData::getJointRotations() const {
if (QThread::currentThread() != thread()) {
QVector<glm::quat> result;
QMetaObject::invokeMethod(const_cast<AvatarData*>(this),
"getJointRotation", Qt::BlockingQueuedConnection,
"getJointRotations", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(QVector<glm::quat>, result));
return result;
}
@ -702,7 +702,7 @@ void AvatarData::setJointRotations(QVector<glm::quat> jointRotations) {
if (QThread::currentThread() != thread()) {
QVector<glm::quat> result;
QMetaObject::invokeMethod(const_cast<AvatarData*>(this),
"setJointRotation", Qt::BlockingQueuedConnection,
"setJointRotations", Qt::BlockingQueuedConnection,
Q_ARG(QVector<glm::quat>, jointRotations));
}
for (int i = 0; i < jointRotations.size(); ++i) {

View file

@ -78,6 +78,8 @@ KeyEvent::KeyEvent(const QKeyEvent& event) {
text = "LEFT";
} else if (key == Qt::Key_Right) {
text = "RIGHT";
} else if (key == Qt::Key_Space) {
text = "SPACE";
} else if (key == Qt::Key_Escape) {
text = "ESC";
} else if (key == Qt::Key_Tab) {
@ -220,6 +222,8 @@ void keyEventFromScriptValue(const QScriptValue& object, KeyEvent& event) {
} else if (event.text.toUpper() == "RIGHT") {
event.key = Qt::Key_Right;
event.isKeypad = true;
} else if (event.text.toUpper() == "SPACE") {
event.key = Qt::Key_Space;
} else if (event.text.toUpper() == "ESC") {
event.key = Qt::Key_Escape;
} else if (event.text.toUpper() == "TAB") {